Ejemplo n.º 1
0
	def InitMapper( cls, metadata, ObjectType ):
		cls.__table__ = Table( cls.__tablename__, metadata,
				Column('id',	    Integer,     index = True, primary_key = True),
				Column('type_id',	Integer,     ForeignKey( ObjectType.id ), nullable = False),
				Column('parent_id', Integer,     ForeignKey( "%s.id" % cls.__tablename__ ), nullable = True),
				Column('name',      Text,        nullable = False),
				Column('size',      Integer(64), nullable = False, default = 0),
				Column('pos_x',     Integer(64), nullable = False, default = 0),
				Column('pos_y',     Integer(64), nullable = False, default = 0),
				Column('pos_z',     Integer(64), nullable = False, default = 0),
				Column('mtime',	    DateTime,    nullable = False,
					onupdate = func.current_timestamp(), default = func.current_timestamp()))

		cols = cls.__table__.c

		Index('ix_%s_position' % cls.__tablename__, cols.pos_x, cols.pos_y, cols.pos_z)

		mapper( cls, cls.__table__, polymorphic_on = cols.type_id, properties = {
			'type': relation( ObjectType,
				uselist = False,
				backref = backref( 'objects' )),
			# Tree like hierarchy for objects ie. Universe => Solar systems => Planets => etc.
			'children': relation( cls,
				backref = backref( 'parent', remote_side = [ cols.id ] )),
			# Object position in 3D space
			'position': composite( Vector3D, cols.pos_x, cols.pos_y, cols.pos_z ),
			})
Ejemplo n.º 2
0
def setup_orm():
    tables = meta.metadata.tables
    columns = tables['group_mailing_list_messages'].c
    orm.mapper(GroupMailingListMessage,
               tables['group_mailing_list_messages'],
               inherits=ContentItem,
               polymorphic_identity='mailing_list_message',
               polymorphic_on=tables['content_items'].c.content_type,
               properties = {
                             'reply_to': relation(GroupMailingListMessage,
                                                  backref=backref('replies'),
                                                  foreign_keys=(columns.reply_to_message_machine_id),
                                                  primaryjoin=columns.id == columns.reply_to_message_machine_id,
                                                  remote_side=(columns.id)),
                             'thread': relation(GroupMailingListMessage,
                                                post_update=True,
                                                order_by=[asc(columns.sent)],
                                                backref=backref('posts'),
                                                foreign_keys=(columns.thread_message_machine_id),
                                                primaryjoin=columns.id == columns.thread_message_machine_id,
                                                remote_side=(columns.id)),
                             'author': relation(User,
                                                backref=backref('messages')),
                             'group': relation(Group,
                                               primaryjoin=(columns.group_id == tables['groups'].c.id)),
                             'attachments': synonym("files")
                             })
Ejemplo n.º 3
0
    def setup_mappers(cls):
        Right, Middle, middle, right, left, Left = (
            cls.classes.Right,
            cls.classes.Middle,
            cls.tables.middle,
            cls.tables.right,
            cls.tables.left,
            cls.classes.Left,
        )

        # set up bi-directional eager loads
        mapper(Left, left)
        mapper(Right, right)
        mapper(
            Middle,
            middle,
            properties=dict(
                left=relationship(
                    Left,
                    lazy="joined",
                    backref=backref("middle", lazy="joined"),
                ),
                right=relationship(
                    Right,
                    lazy="joined",
                    backref=backref("middle", lazy="joined"),
                ),
            ),
        ),
Ejemplo n.º 4
0
    def setup_mappers(cls):
        Account, Transaction, transactions, accounts, entries, Entry = (
            cls.classes.Account,
            cls.classes.Transaction,
            cls.tables.transactions,
            cls.tables.accounts,
            cls.tables.entries,
            cls.classes.Entry,
        )

        mapper(Account, accounts)

        mapper(Transaction, transactions)

        mapper(
            Entry,
            entries,
            properties=dict(
                account=relationship(
                    Account,
                    uselist=False,
                    backref=backref(
                        "entries", lazy="select", order_by=entries.c.entry_id
                    ),
                ),
                transaction=relationship(
                    Transaction,
                    uselist=False,
                    backref=backref(
                        "entries", lazy="joined", order_by=entries.c.entry_id
                    ),
                ),
            ),
        )
def initialize_mapper():
    orm.mapper(Content, content,
               polymorphic_on=content.c.object_type,
               polymorphic_identity='content',
               properties = {
                   'children': orm.relation(
                       Content,
                       backref=orm.backref(
                           'parent',
                           remote_side=[content.c.content_id])),
                   'relations': orm.relation(
                       Relation,
                       cascade="all, delete-orphan",
                       primaryjoin=content.c.content_id==relations.c.source_id,
                       backref=orm.backref("source"))})

    orm.mapper(Relation, relations,
               properties = {
               'target': orm.relation(
                   Content, uselist=False,
                   primaryjoin=content.c.content_id==relations.c.target_id)})

    orm.mapper(File, files,
               polymorphic_on=files.c.type,
               polymorphic_identity='db-file')
Ejemplo n.º 6
0
def setup_orm():
    tables = meta.metadata.tables
    orm.mapper(Language, tables['languages'])
    orm.mapper(I18nText, tables['i18n_texts'],
               properties={
                   'versions': relation(I18nTextVersion,
                                        order_by=tables['i18n_texts_versions'].c.language_id.asc())
               })

    orm.mapper(I18nTextVersion, tables['i18n_texts_versions'],
               properties={ 'language': relation(Language) })


    # LanguageText is deprecated until it uses I18nText
    orm.mapper(LanguageText,
               tables['language_texts'],
               properties={
                   'language': relation(Language,
                                        backref=backref('texts',
                                                        order_by=tables['language_texts'].c.id.asc(),
                                                        cascade='all, delete-orphan'))})

    orm.mapper(Country,
               tables['countries'],
               properties={
                   'language': relation(Language,
                                        backref=backref('countries',
                                                        cascade='all, delete-orphan',
                                                        order_by=tables['countries'].c.id.asc()))})
Ejemplo n.º 7
0
 def parent(cls):
     if cls.__parentname__.lower() == cls.__tablename__.lower():
         return relationship(
             cls.__parentname__,
             backref=backref(cls.__tablename__.lower()),
             remote_side=[cls.client_id, cls.object_id],
         )
     else:
         return relationship(cls.__parentname__, backref=backref(cls.__tablename__.lower()))
Ejemplo n.º 8
0
 def setup_mappers(cls):
     # set up bi-directional eager loads
     mapper(Left, left)
     mapper(Right, right)
     mapper(Middle, middle, properties=dict(
         left=relation(Left,
                       lazy=False,
                       backref=backref('middle',lazy=False)),
         right=relation(Right,
                        lazy=False,
                        backref=backref('middle', lazy=False)))),
Ejemplo n.º 9
0
def init():
	"""define table class and mapping"""

	# Database definition
	from sqlalchemy import types, orm
	from sqlalchemy.schema import Column, Table, Sequence, ForeignKey
	from sqlalchemy.orm import relationship, backref, relation, mapper
	# Dependencies
	from Planning import Planning
	from Campus import Campus
	from Period import Period

	t_class = Table('class', db.metadata,
		Column('id',					types.Integer,
			Sequence('class_seq_id', optional = True),
			nullable	= False,
			primary_key	= True),

		Column('name',					types.VARCHAR(255),
			nullable	= False),

		Column('id_planning',				types.Integer,
			ForeignKey('planning.id'),
			nullable	= False),

		Column('id_campus',				types.Integer,
			ForeignKey('campus.id'),
			nullable	= False),
	)

	t_class_period = Table('class_period', db.metadata,
		Column('id_class',				types.Integer,
			ForeignKey('class.id'),
			nullable	= False),

		Column('id_period',				types.Integer,
			ForeignKey('period.id'),
			nullable	= False),
	)

	mapper(Class, t_class, properties = {
		'planning'	: relationship(Planning,
			backref		= backref('type_class', uselist = False)),

		'campus'	: relationship(Campus,
			backref		= backref('classes',
				cascade		= "all, delete-orphan",
				order_by	= t_class.c.name.desc())),

		'periods'	: relationship(Period,
			secondary	= t_class_period,
			backref		= 'classes'),
	})
Ejemplo n.º 10
0
 def setup_relationships(self):
     r"""Setup custom relationships for the custom tables"""
     self.RequestToken.consumer_key = sa.Column(sa.ForeignKey(
         self.Consumer.key))
     self.AccessToken.consumer_key = sa.Column(sa.ForeignKey(
         self.Consumer.key))
     # In particular, do not ask for cascade on delete
     self.Consumer.request_tokens = orm.relation(self.RequestToken,
         backref=orm.backref('consumer'),
         cascade='')
     self.Consumer.access_tokens = orm.relation(self.AccessToken,
         backref=orm.backref('consumer'),
         cascade='')
Ejemplo n.º 11
0
    def setup_mappers(cls):
        mapper(Account, accounts)

        mapper(Transaction, transactions)

        mapper(Entry, entries, properties=dict(
            account=relation(Account,
                             uselist=False,
                             backref=backref('entries', lazy=True,
                                             order_by=entries.c.entry_id)),
            transaction=relation(Transaction,
                                 uselist=False,
                                 backref=backref('entries', lazy=False,
                                                 order_by=entries.c.entry_id))))
Ejemplo n.º 12
0
    def test_bidirectional(self):
        place_input, transition, Transition, Place, place, place_output = (
            self.tables.place_input,
            self.tables.transition,
            self.classes.Transition,
            self.classes.Place,
            self.tables.place,
            self.tables.place_output)

        mapper(Place, place)
        mapper(Transition, transition, properties=dict(
            inputs=relationship(
                Place, place_output,
                backref=backref('inputs', order_by=transition.c.transition_id),
                order_by=Place.place_id),
            outputs=relationship(
                Place, place_input,
                backref=backref('outputs',
                                order_by=transition.c.transition_id),
                order_by=Place.place_id),
        )
        )

        t1 = Transition('transition1')
        t2 = Transition('transition2')
        t3 = Transition('transition3')
        p1 = Place('place1')
        p2 = Place('place2')
        p3 = Place('place3')

        sess = Session()
        sess.add_all([p3, p1, t1, t2, p2, t3])

        t1.inputs.append(p1)
        t1.inputs.append(p2)
        t1.outputs.append(p3)
        t2.inputs.append(p1)
        p2.inputs.append(t2)
        p3.inputs.append(t2)
        p1.outputs.append(t1)
        sess.commit()

        self.assert_result([t1],
                           Transition, {'outputs':
                                        (Place, [{'name': 'place3'},
                                                 {'name': 'place1'}])})
        self.assert_result([p2],
                           Place, {'inputs':
                                   (Transition, [{'name': 'transition1'},
                                                 {'name': 'transition2'}])})
Ejemplo n.º 13
0
def reload_mapper(metadata, now):
    """<comment-ja>
    Machine(Model)のマッパーをリロードします。
    @param metadata: リロードしたいMetaData
    @type metadata: sqlalchemy.schema.MetaData
    @param now: now
    @type now: Datatime
    </comment-ja>
    <comment-en>
    TODO: English Comment
    </comment-en>
    """
    t_machine = get_machine_table(metadata, now)
    t_machine_tag = metadata.tables['machine2tag']
    t_user = metadata.tables['user']
    
    mapper(Machine, t_machine, properties={
        'children' : relation(Machine,
                              backref=backref('parent',
                                              remote_side=[t_machine.c.id])),
        'notebook' : relation(karesansui.db.model.notebook.Notebook),
        'created_user' : relation(karesansui.db.model.user.User,
                                  primaryjoin=t_machine.c.created_user_id==t_user.c.id),
        'modified_user' : relation(karesansui.db.model.user.User,
                                  primaryjoin=t_machine.c.modified_user_id==t_user.c.id),
        'tags' : relation(karesansui.db.model.tag.Tag,
                         secondary=t_machine_tag,
                         backref="machine"),
        })
Ejemplo n.º 14
0
def upgrade(migrate_engine):
    # Upgrade operations go here. Don't create your own engine; bind
    # migrate_engine to your metadata
    Session = sessionmaker(bind=migrate_engine)
    Base.metadata.bind = migrate_engine

    class User(Base):
        __tablename__ = 'account_user'
        __table_args__ = {'autoload': True}

    Measurement.user = relationship(
        'User',
        backref=backref('measurements',
                        order_by=Measurement.m_date,
                        lazy='dynamic'))

    Base.metadata.create_all()

    # Add initial types
    session = Session()
    session.add_all([MeasurementType('Weight'),
                     MeasurementType('Height'),
                     MeasurementType('Waist')])
    session.commit()
    session.close()
Ejemplo n.º 15
0
        def make_relationship(definition):
            name = definition.attrib['relationshipname']
            relationship_type = definition.attrib['type']

            try:
                remote_class = classes[
                    definition.attrib['classname'].split('.')[-1]]

                column = getattr(table.c, definition.attrib['columnname'])
            except KeyError:
                return

            relationship_args = {'foreign_keys': column}

            try:
                other_side_name = definition.attrib['othersidename']
            except KeyError:
                pass
            else:
                backref_args = {'uselist': relationship_type != 'one-to-one'}
                if remote_class is cls:
                    backref_args['remote_side'] = table.c[tabledef.find('id').attrib['column']]

                relationship_args['backref'] = orm.backref(other_side_name, **backref_args)

            return name, orm.relationship(remote_class, **relationship_args)
Ejemplo n.º 16
0
 def parent(cls):
     return relationship(
         "Product",
         backref=backref("variation", uselist=False),
         foreign_keys="ProductVariation.parent_id",
         info={"label": _(u"parent product")},
     )
Ejemplo n.º 17
0
    def create_properties( self ):
        if self.property or self.backref:
            return

        kwargs = self.get_prop_kwargs()
        if 'order_by' in kwargs:
            kwargs['order_by'] = \
                self.target._descriptor.translate_order_by( kwargs['order_by'] )
            
        # viewonly relationships need to create "standalone" relations (ie
        # shouldn't be a backref of another relation).
        if self.inverse and not kwargs.get( 'viewonly', False ):
            # check if the inverse was already processed (and thus has already
            # defined a backref we can use)
            if self.inverse.backref:
                # let the user override the backref argument
                if 'backref' not in kwargs:
                    kwargs['backref'] = self.inverse.backref
            else:
                # SQLAlchemy doesn't like when 'secondary' is both defined on
                # the relation and the backref
                kwargs.pop('secondary', None)

                # define backref for use by the inverse
                self.backref = backref( self.name, **kwargs )
                return
        
        self.property = relationship( self.target, **kwargs )
        setattr( self.entity, self.name, self.property )
Ejemplo n.º 18
0
 def setup_mappers(cls):
     mapper(T1, t1, properties=dict(t2=relationship(T2,
            cascade='all, delete-orphan', single_parent=True)))
     mapper(T2, t2, properties=dict(t3=relationship(T3,
            cascade='all, delete-orphan', single_parent=True,
            backref=backref('t2', uselist=False))))
     mapper(T3, t3)
Ejemplo n.º 19
0
    def test_one(self):
        p_m = mapper(Part, parts)

        mapper(InheritedPart, inherited_part, properties=dict(
            part=relation(Part, lazy=False)))

        d_m = mapper(Design, design, properties=dict(
            inheritedParts=relation(InheritedPart,
                                    cascade="all, delete-orphan",
                                    backref="design")))

        mapper(DesignType, design_types)

        d_m.add_property(
            "type", relation(DesignType, lazy=False, backref="designs"))

        p_m.add_property(
            "design", relation(
                Design, lazy=False,
                backref=backref("parts", cascade="all, delete-orphan")))


        d = Design()
        sess = create_session()
        sess.add(d)
        sess.flush()
        sess.expunge_all()
        x = sess.query(Design).get(1)
        x.inheritedParts
Ejemplo n.º 20
0
 def parent(cls):
     return relationship("%s" % cls.__name__,
             backref=backref("children", enable_typechecks=False),
             order_by="%s.lft" % cls.__name__,
             remote_side="%s.id" % cls.__name__,
             primaryjoin=("%s.id==%s.parent_id" % (cls.__name__, cls.__name__)),
             enable_typechecks=False)
Ejemplo n.º 21
0
    def test_table_binds(self):

        # ensure tables are unbound
        m2 = sa.MetaData()
        users_unbound =users.tometadata(m2)
        addresses_unbound = addresses.tometadata(m2)

        mapper(Address, addresses_unbound)
        mapper(User, users_unbound, properties={
            'addresses':relationship(Address,
                                 backref=backref("user", cascade="all"),
                                 cascade="all")})

        Session = sessionmaker(binds={users_unbound: self.metadata.bind,
                                      addresses_unbound: self.metadata.bind})
        sess = Session()

        u1 = User(id=1, name='ed')
        sess.add(u1)
        eq_(sess.query(User).filter(User.id==1).all(),
            [User(id=1, name='ed')])

        sess.execute(users_unbound.insert(), params=dict(id=2, name='jack'))

        eq_(sess.execute(users_unbound.select(users_unbound.c.id
            == 2)).fetchall(), [(2, 'jack')])

        eq_(sess.execute(users_unbound.select(User.id == 2)).fetchall(),
            [(2, 'jack')])

        sess.execute(users_unbound.delete())
        eq_(sess.execute(users_unbound.select()).fetchall(), [])

        sess.close()
Ejemplo n.º 22
0
 def ref_table(cls):
     if one_to_one:
         if backref_name:
             cls._readable_name = backref_name
         if not isinstance(ref_model, str):
             if ref_name:
                 ref_model._readable_name = ref_name
             cls._one_to_models.append(ref_model)
             ref_model._one_to_models.append(cls)
     else:
         if backref_name:
             cls._readable_names = backref_name
         if not isinstance(ref_model, str):
             if ref_name:
                 ref_model._readable_name = ref_name
             cls._many_to_models.append(ref_model)
             ref_model._one_to_models.append(cls)
     model_name = cls.__name__
     table_name = cls._readable_name
     setattr(cls, foreign_key, Column(Integer, ForeignKey("{0}.id".format(ref_table_name), ondelete="CASCADE")))
     my_backref_name = backref_name or (table_name if one_to_one else "{0}s".format(table_name))
     backref_options = dict(uselist=False) if one_to_one else dict(lazy="dynamic")
     backref_options["cascade"] = "all"
     setattr(
         cls,
         ref_name,
         relationship(
             ref_model_name,
             primaryjoin="{0}.{1} == {2}.id".format(model_name, foreign_key, ref_model_name),
             backref=backref(my_backref_name, **backref_options),
             remote_side="{0}.id".format(ref_model_name),
         ),
     )
     return cls
Ejemplo n.º 23
0
def includeme(config):
    tables = config.registry['metadata'].tables

    config.include('amnesia.modules.content.mapper')
    config.include('amnesia.modules.content_type.mapper')

    orm.mapper(Folder, tables['folder'], inherits=Content,
        polymorphic_identity=get_type_id(config, 'folder'),
        inherit_condition=tables['folder'].c.content_id ==
               tables['content'].c.id,
        properties={
            'alternate_index': orm.relationship(
                Content,
                primaryjoin=tables['folder'].c.index_content_id ==
                tables['content'].c.id,

                innerjoin=True,
                uselist=False,
                post_update=True,
                backref=orm.backref('indexes')
            ),

            'polymorphic_children': orm.relationship(
                ContentType,
                secondary=tables['folder_polymorphic_loading']
            )
        }
    )
Ejemplo n.º 24
0
        def ref_table(cls):
            if backref_name:
                cls._readable_names = backref_name
            if not isinstance(ref_model, str):
                ref_model._readable_names = ref_name
                cls._many_to_models.append(ref_model)
                ref_model._many_to_models.append(cls)
            table_name = cls._readable_name

            my_middle_table_name = middle_table_name or '{0}_{1}'.format(table_name, ref_table_name)
            if table_name == ref_table_name:
                left_column_name = 'left_id'
                right_column_name = 'right_id'
            else:
                left_column_name = '{0}_id'.format(table_name)
                right_column_name = '{0}_id'.format(ref_table_name)
            middle_table = Table(my_middle_table_name, Database.Base.metadata,
                Column(left_column_name, Integer, ForeignKey('{0}.{1}'.format(table_name, cls.__id__), ondelete = "CASCADE"), primary_key = True),
                Column(right_column_name, Integer, ForeignKey('{0}.{1}'.format(ref_table_name, ref_model.__id__), ondelete = "CASCADE"), primary_key = True))

            my_backref_name = backref_name or '{0}s'.format(table_name)
            parameters = dict(secondary = middle_table, lazy = 'dynamic', backref = backref(my_backref_name, lazy = 'dynamic'))
            if table_name == ref_table_name:
                parameters['primaryjoin'] = getattr(cls, cls.__id__) == middle_table.c.left_id
                parameters['secondaryjoin'] = getattr(cls, cls.__id__) == middle_table.c.right_id

            setattr(cls, ref_name, relationship(ref_model_name, **parameters))

            return cls
Ejemplo n.º 25
0
 def payee(cls):
     return relationship(
         'PAYEE', backref=backref('invbanktrans', 
                                  cascade='all, delete-orphan',
                                  passive_deletes=True,
                                 )
     )
Ejemplo n.º 26
0
 def acctfrom(cls):
     return relationship(
     'INVACCTFROM', backref=backref('invtrans', 
                                    cascade='all, delete-orphan',
                                    passive_deletes=True,
                                   )
     )
Ejemplo n.º 27
0
 def acctfrom(cls):
     return relationship(
         'ACCTFROM', backref=backref('%ss' % cls.__name__.lower(),
                                     cascade='all, delete-orphan',
                                     passive_deletes=True,
                                    )
     )
Ejemplo n.º 28
0
    def test_useget_cancels_eager_propagated_present(self):
        """test that a one to many lazyload cancels the unnecessary
        eager many-to-one join on the other side, even when a propagated
        option is present."""

        User = self.classes.User
        Address = self.classes.Address

        mapper(User, self.tables.users)
        mapper(Address, self.tables.addresses, properties={
            'user': relationship(
                User, lazy='joined',
                backref=backref('addresses', lazy='baked_select')
            )
        })

        from sqlalchemy.orm.interfaces import MapperOption

        class MyBogusOption(MapperOption):
            propagate_to_loaders = True

        sess = Session()
        u1 = sess.query(User).options(MyBogusOption()).filter(User.id == 8).one()

        def go():
            eq_(u1.addresses[0].user, u1)
        self.assert_sql_execution(
            testing.db, go,
            CompiledSQL(
                "SELECT addresses.id AS addresses_id, addresses.user_id AS "
                "addresses_user_id, addresses.email_address AS "
                "addresses_email_address FROM addresses WHERE :param_1 = "
                "addresses.user_id",
                {'param_1': 8})
            )
Ejemplo n.º 29
0
    def test_expunge_cascade(self):
        Address, addresses, users, User = (self.classes.Address,
                                self.tables.addresses,
                                self.tables.users,
                                self.classes.User)

        mapper(Address, addresses)
        mapper(User, users, properties={
            'addresses': relationship(Address,
                                 backref=backref("user", cascade="all"),
                                 cascade="all")})

        session = create_session()
        u = session.query(User).filter_by(id=7).one()

        # get everything to load in both directions
        print([a.user for a in u.addresses])

        # then see if expunge fails
        session.expunge(u)

        assert sa.orm.object_session(u) is None
        assert sa.orm.attributes.instance_state(u).session_id is None
        for a in u.addresses:
            assert sa.orm.object_session(a) is None
            assert sa.orm.attributes.instance_state(a).session_id is None
Ejemplo n.º 30
0
 def acctto(cls):
     return relationship(
         'ACCTTO', backref=backref('invbanktrans', 
                                   cascade='all, delete-orphan',
                                   passive_deletes=True,
                                  )
     )
Ejemplo n.º 31
0
class Tag(Base):
    """Represents an association between a File and an Entity.

    Parameters
    ----------
    file : BIDSFile
        The associated BIDSFile.
    entity : Entity
        The associated Entity.
    value : json-serializable type
        The value to store for this file/entity pair. Must be of type
        str, int, float, bool, or any json-serializable structure.
    dtype : str
        Optional type for the value field. If None, inferred from
        value. If passed, must be one of str, int, float, bool, or json.
        Any other value will be treated as json (and will fail if the
        value can't be serialized to json).
    """
    __tablename__ = 'tags'

    file_path = Column(String, ForeignKey('files.path'), primary_key=True)
    entity_name = Column(String, ForeignKey('entities.name'), primary_key=True)
    _value = Column(String, nullable=False)
    _dtype = Column(String, default='str')

    file = relationship(
        'BIDSFile',
        backref=backref(
            "tags",
            collection_class=attribute_mapped_collection("entity_name")))
    entity = relationship(
        'Entity',
        backref=backref(
            "tags", collection_class=attribute_mapped_collection("file_path")))

    def __init__(self, file, entity, value, dtype=None):

        if dtype is None:
            dtype = type(value)

        self.value = value

        if not isinstance(dtype, str):
            dtype = dtype.__name__
        if dtype not in ('str', 'float', 'int', 'bool'):
            # Try serializing to JSON first
            try:
                value = json.dumps(value)
                dtype = 'json'
            except:
                raise ValueError(
                    "Passed value has an invalid dtype ({}). Must be one of "
                    "int, float, bool, or 'str.".format(dtype))
        value = str(value)
        self.file_path = file.path
        self.entity_name = entity.name

        self._value = value
        self._dtype = dtype

        self._init_on_load()

    def __repr__(self):
        msg = "<Tag file:{!r} entity:{!r} value:{!r}>"
        return msg.format(self.file_path, self.entity_name, self.value)

    @reconstructor
    def _init_on_load(self):
        if self._dtype not in ('str', 'float', 'int', 'bool', 'json'):
            raise ValueError("Invalid dtype '{}'. Must be one of 'int', "
                             "'float', 'bool', 'str', or 'json'.".format(
                                 self._dtype))
        if self._dtype == 'json':
            self.value = json.loads(self._value)
            self.dtype = 'json'
        else:
            self.dtype = eval(self._dtype)
            self.value = self.dtype(self._value)
Ejemplo n.º 32
0
class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
    UNKNOWN_STATE = "unknown"
    OK_STATE = "ok"
    TRIGGERED_STATE = "triggered"

    id = Column(db.Integer, primary_key=True)
    name = Column(db.String(255))
    query_id = Column(db.Integer, db.ForeignKey("queries.id"))
    query_rel = db.relationship(Query,
                                backref=backref("alerts", cascade="all"))
    user_id = Column(db.Integer, db.ForeignKey("users.id"))
    user = db.relationship(User, backref="alerts")
    options = Column(MutableDict.as_mutable(PseudoJSON))
    state = Column(db.String(255), default=UNKNOWN_STATE)
    subscriptions = db.relationship("AlertSubscription",
                                    cascade="all, delete-orphan")
    last_triggered_at = Column(db.DateTime(True), nullable=True)
    rearm = Column(db.Integer, nullable=True)

    __tablename__ = "alerts"

    @classmethod
    def all(cls, group_ids):
        return (cls.query.options(joinedload(
            Alert.user), joinedload(Alert.query_rel)).join(Query).join(
                DataSourceGroup,
                DataSourceGroup.data_source_id == Query.data_source_id).filter(
                    DataSourceGroup.group_id.in_(group_ids)))

    @classmethod
    def get_by_id_and_org(cls, object_id, org):
        return super(Alert, cls).get_by_id_and_org(object_id, org, Query)

    def evaluate(self):
        data = self.query_rel.latest_query_data.data

        if data["rows"] and self.options["column"] in data["rows"][0]:
            op = OPERATORS.get(self.options["op"], lambda v, t: False)

            value = data["rows"][0][self.options["column"]]
            threshold = self.options["value"]

            new_state = next_state(op, value, threshold)
        else:
            new_state = self.UNKNOWN_STATE

        return new_state

    def subscribers(self):
        return User.query.join(AlertSubscription).filter(
            AlertSubscription.alert == self)

    def render_template(self, template):
        if template is None:
            return ""

        data = self.query_rel.latest_query_data.data
        host = base_url(self.query_rel.org)

        col_name = self.options["column"]
        if data["rows"] and col_name in data["rows"][0]:
            result_value = data["rows"][0][col_name]
        else:
            result_value = None

        context = {
            "ALERT_NAME":
            self.name,
            "ALERT_URL":
            "{host}/alerts/{alert_id}".format(host=host, alert_id=self.id),
            "ALERT_STATUS":
            self.state.upper(),
            "ALERT_CONDITION":
            self.options["op"],
            "ALERT_THRESHOLD":
            self.options["value"],
            "QUERY_NAME":
            self.query_rel.name,
            "QUERY_URL":
            "{host}/queries/{query_id}".format(host=host,
                                               query_id=self.query_rel.id),
            "QUERY_RESULT_VALUE":
            result_value,
            "QUERY_RESULT_ROWS":
            data["rows"],
            "QUERY_RESULT_COLS":
            data["columns"],
        }
        return mustache_render(template, context)

    @property
    def custom_body(self):
        template = self.options.get("custom_body",
                                    self.options.get("template"))
        return self.render_template(template)

    @property
    def custom_subject(self):
        template = self.options.get("custom_subject")
        return self.render_template(template)

    @property
    def groups(self):
        return self.query_rel.groups

    @property
    def muted(self):
        return self.options.get("muted", False)
Ejemplo n.º 33
0
class Invoice(Base):
    __tablename__ = 'invoice'

    id = Column(Integer, autoincrement=True, primary_key=True)
    date = Column(
        Date,
        nullable=False,
    )
    active_until = Column(
        Date,
        nullable=False,
    )
    resource_id = Column(
        Integer,
        ForeignKey(
            'resource.id',
            name="fk_resource_id_invoice",
            ondelete='restrict',
            onupdate='cascade',
        ),
        nullable=False,
    )
    order_id = Column(
        Integer,
        ForeignKey(
            'order.id',
            name="fk_order_id_invoice",
            ondelete='restrict',
            onupdate='cascade',
        ),
        nullable=False,
    )
    account_id = Column(
        Integer,
        ForeignKey(
            'account.id',
            name="fk_account_id_invoice",
            ondelete='restrict',
            onupdate='cascade',
        ),
        nullable=False,
    )
    descr = Column(String(length=255), )
    resource = relationship(
        'Resource',
        backref=backref('invoice', uselist=False, cascade="all,delete"),
        foreign_keys=[resource_id],
        cascade="all,delete",
        uselist=False,
    )
    order = relationship(
        'Order',
        backref=backref(
            'invoice',
            uselist=False,
        ),
        uselist=False,
    )
    account = relationship(
        'Account',
        backref=backref('invoices', uselist=True, lazy="dynamic"),
        uselist=False,
    )

    @classmethod
    def get(cls, id):
        if id is None:
            return None
        return DBSession.query(cls).get(id)

    @classmethod
    def by_resource_id(cls, resource_id):
        if resource_id is None:
            return None
        return (DBSession.query(cls).filter(
            cls.resource_id == resource_id).first())

    @property
    def final_price(self):
        return sum([item.final_price for item in self.invoices_items])

    @property
    def discount(self):
        return sum([item.discount for item in self.invoices_items])

    @property
    def vat(self):
        return sum([item.vat for item in self.invoices_items])
Ejemplo n.º 34
0
class Post(Model):
    # Table fields
    __tablename__ = 'wp_posts'
    ID = Column(Integer, primary_key=True, nullable=False)
    post_author = Column(Integer,
                         ForeignKey('wp_users.ID'),
                         nullable=False,
                         default=0)
    post_date = Column(DateTime(timezone=False),
                       nullable=False,
                       default=datetime.utcnow)
    post_date_gmt = Column(DateTime(timezone=False),
                           nullable=False,
                           default=datetime.utcnow)
    post_content = Column(Text(length=None), nullable=False)
    post_title = Column(Text(length=None), nullable=False)
    post_excerpt = Column(Text(length=None), nullable=False, default='')
    post_status = Column(String(length=10), nullable=False, default='publish')
    comment_status = Column(String(length=15), nullable=False, default='open')
    ping_status = Column(String(length=6), nullable=False, default='open')
    post_password = Column(String(length=20), nullable=False, default='')
    post_name = Column(String(length=200), nullable=False)
    to_ping = Column(Text(length=None), nullable=False, default='')
    pinged = Column(Text(length=None), nullable=False, default='')
    post_modified = Column(DateTime(timezone=False),
                           nullable=False,
                           default=datetime.utcnow)
    post_modified_gmt = Column(DateTime(timezone=False),
                               nullable=False,
                               default=datetime.utcnow)
    post_content_filtered = Column(Text(length=None),
                                   nullable=False,
                                   default='')
    post_parent = Column(Integer,
                         ForeignKey('wp_posts.ID'),
                         nullable=False,
                         default=0)
    guid = Column(String(length=255), nullable=False, default='')
    menu_order = Column(Integer, nullable=False, default=0)
    post_type = Column(String(length=20), nullable=False, default='post')
    post_mime_type = Column(String(length=100), nullable=False, default='')
    comment_count = Column(Integer, nullable=False, default=0)

    # ORM layer relationships
    author = relationship('User', back_populates='posts')
    children = relationship('Post',
                            backref=backref('parent', remote_side=[ID]))
    comments = relationship('Comment', back_populates="post")
    meta = relationship(
        'PostMeta',
        collection_class=attribute_mapped_collection('meta_key'),
        back_populates="post")

    # terms = relationship(
    #     "Term",
    #     secondary=TERM_RELATIONSHIP_TABLE,
    #     back_populates='posts')

    terms = relationship(
        'Term',
        secondary=TermRelationship.__table__,
        primaryjoin='and_(Post.ID == TermRelationship.object_id)',
        secondaryjoin='and_(TermRelationship.term_taxonomy_id == Term.id)')

    def __repr__(self):
        return f"<WpPost(ID={self.ID} post_type='{self.post_type}' post_title='{self.post_title}')>\n"

    def addMeta(self, key, value):
        if self.meta.get(key) == None:
            m = PostMeta(key, value)
            m.post = self
        else:
            self.meta[key].meta_value = value
Ejemplo n.º 35
0
class Malware(Base):
    __tablename__ = 'malware'

    id = Column(Integer(), primary_key=True)
    name = Column(String(255), nullable=True)
    size = Column(Integer(), nullable=False)
    type = Column(Text(), nullable=True)
    mime = Column(String(255), nullable=True)
    md5 = Column(String(32), nullable=False, index=True)
    crc32 = Column(String(8), nullable=False)
    sha1 = Column(String(40), nullable=False)
    sha256 = Column(String(64), nullable=False, index=True)
    sha512 = Column(String(128), nullable=False)
    ssdeep = Column(String(255), nullable=True)
    created_at = Column(DateTime(timezone=False),
                        default=datetime.now(),
                        nullable=False)
    parent_id = Column(Integer(), ForeignKey('malware.id'))
    parent = relationship('Malware', lazy='subquery', remote_side=[id])
    tag = relationship('Tag',
                       secondary=association_table,
                       backref=backref('malware'))
    note = relationship('Note',
                        cascade='all, delete',
                        secondary=association_table,
                        backref=backref('malware'))
    analysis = relationship('Analysis',
                            cascade='all, delete',
                            secondary=association_table,
                            backref=backref('malware'))
    __table_args__ = (Index('hash_index',
                            'md5',
                            'crc32',
                            'sha1',
                            'sha256',
                            'sha512',
                            unique=True), )

    def to_dict(self):
        row_dict = {}
        for column in self.__table__.columns:
            value = getattr(self, column.name)
            row_dict[column.name] = value

        return row_dict

    def __repr__(self):
        return "<Malware ('{0}','{1}')>".format(self.id, self.md5)

    def __init__(self,
                 md5,
                 crc32,
                 sha1,
                 sha256,
                 sha512,
                 size,
                 type=None,
                 mime=None,
                 ssdeep=None,
                 name=None,
                 parent=None):
        self.md5 = md5
        self.sha1 = sha1
        self.crc32 = crc32
        self.sha256 = sha256
        self.sha512 = sha512
        self.size = size
        self.type = type
        self.mime = mime
        self.ssdeep = ssdeep
        self.name = name
        self.parent = parent
Ejemplo n.º 36
0
class Calendar(MailSyncBase, HasPublicID, HasRevisions, UpdatedAtMixin, DeletedAtMixin):
    API_OBJECT_NAME = "calendar"
    namespace_id = Column(ForeignKey(Namespace.id, ondelete="CASCADE"), nullable=False)

    namespace = relationship(
        Namespace, load_on_pending=True, backref=backref("calendars")
    )

    name = Column(String(MAX_INDEXABLE_LENGTH), nullable=True)
    provider_name = Column(String(128), nullable=True, default="DEPRECATED")
    description = Column(Text, nullable=True)

    # A server-provided unique ID.
    uid = Column(String(767, collation="ascii_general_ci"), nullable=False)

    read_only = Column(Boolean, nullable=False, default=False)

    last_synced = Column(DateTime, nullable=True)

    gpush_last_ping = Column(DateTime)
    gpush_expiration = Column(DateTime)

    __table_args__ = (
        UniqueConstraint("namespace_id", "provider_name", "name", "uid", name="uuid"),
    )

    @property
    def should_suppress_transaction_creation(self):
        if self in object_session(self).new or self in object_session(self).deleted:
            return False
        obj_state = inspect(self)
        return not (
            obj_state.attrs.name.history.has_changes()
            or obj_state.attrs.description.history.has_changes()
            or obj_state.attrs.read_only.history.has_changes()
        )

    def update(self, calendar):
        self.uid = calendar.uid
        self.name = calendar.name[:MAX_INDEXABLE_LENGTH]
        self.read_only = calendar.read_only
        self.description = calendar.description

    def new_event_watch(self, expiration):
        """
        Google gives us expiration as a timestamp in milliseconds
        """
        expiration = datetime.fromtimestamp(int(expiration) / 1000.0)
        self.gpush_expiration = expiration
        self.gpush_last_ping = datetime.utcnow()

    def handle_gpush_notification(self):
        self.gpush_last_ping = datetime.utcnow()

    def can_sync(self):
        if self.name == "Emailed events" and self.uid == "inbox":
            # This is our own internal calendar
            return False

        # Common to the Birthdays and Holidays calendars.
        # If you try to watch Holidays, you get a 404.
        # If you try to watch Birthdays, you get a cryptic 'Missing Title'
        # error. Thanks, Google.
        if "group.v.calendar.google.com" in self.uid:
            return False

        # If you try to watch "Phases of the Moon" or holiday calendars, you
        # get 400 ("Push notifications are not supported by this resource.")
        if self.uid == "*****@*****.**":
            return False

        if "holiday.calendar.google.com" in self.uid:
            return False

        return True

    def needs_new_watch(self):
        if not self.can_sync():
            return False

        return (
            self.gpush_expiration is None or self.gpush_expiration < datetime.utcnow()
        )

    def should_update_events(self, max_time_between_syncs, poll_frequency):
        """
        max_time_between_syncs: a timedelta object. The maximum amount of
        time we should wait until we sync, even if we haven't received
        any push notifications

        poll_frequency: a timedelta object. Amount of time we should wait until
        we sync if we don't have working push notifications.
        """
        # TODO: what do we do about calendars we cannot watch?
        if not self.can_sync():
            return False

        now = datetime.utcnow()

        return (
            # Never synced
            self.last_synced is None
            or
            # Push notifications channel is stale (and we didn't just sync it)
            (self.needs_new_watch() and now > self.last_synced + poll_frequency)
            or
            # Too much time has passed not to sync
            now > self.last_synced + max_time_between_syncs
            or
            # Events are stale, according to the push notifications
            (
                self.gpush_last_ping is not None
                and self.gpush_last_ping > self.last_synced
            )
        )
Ejemplo n.º 37
0
class SubGraphIdeaAssociation(DiscussionBoundBase):
    """Association table saying that an Idea is part of a ExplicitSubGraphView"""
    __tablename__ = 'sub_graph_idea_association'
    __table_args__ = (UniqueConstraint("idea_id", "sub_graph_id"), )

    id = Column(Integer, primary_key=True)
    sub_graph_id = Column(Integer,
                          ForeignKey('explicit_sub_graph_view.id',
                                     ondelete="CASCADE",
                                     onupdate="CASCADE"),
                          index=True,
                          nullable=False)
    sub_graph = relationship("ExplicitSubGraphView",
                             backref=backref("idea_assocs",
                                             cascade="all, delete-orphan"))
    idea_id = Column(Integer,
                     ForeignKey('idea.id',
                                ondelete="CASCADE",
                                onupdate="CASCADE"),
                     nullable=False,
                     index=True)
    # reference to the "Idea" object for proxying
    idea = relationship("Idea")

    @classmethod
    def special_quad_patterns(cls, alias_maker, discussion_id):
        idea_assoc = alias_maker.alias_from_class(cls)
        idea_alias = alias_maker.alias_from_relns(cls.idea)
        # Assume tombstone status of target is similar to source, for now.
        conditions = [(idea_assoc.idea_id == idea_alias.id),
                      (idea_alias.tombstone_date == None)]
        if discussion_id:
            conditions.append((idea_alias.discussion_id == discussion_id))
        return [
            QuadMapPatternS(Idea.iri_class().apply(idea_assoc.idea_id),
                            IDEA.inMap,
                            IdeaGraphView.iri_class().apply(
                                idea_assoc.sub_graph_id),
                            conditions=conditions,
                            name=QUADNAMES.sub_graph_idea_assoc_reln)
        ]

    def get_discussion_id(self):
        sub_graph = self.sub_graph or IdeaGraphView.get(self.sub_graph_id)
        return sub_graph.get_discussion_id()

    @classmethod
    def get_discussion_conditions(cls, discussion_id, alias_maker=None):
        return ((cls.sub_graph_id == ExplicitSubGraphView.id),
                (ExplicitSubGraphView.discussion_id == discussion_id))

    discussion = relationship(
        Discussion,
        viewonly=True,
        uselist=False,
        secondary=Idea.__table__,
        info={'rdf': QuadMapPatternS(None, ASSEMBL.in_conversation)})

    def unique_query(self):
        # documented in lib/sqla
        idea_id = self.idea_id or self.idea.id
        subgraph_id = self.sub_graph_id or self.sub_graph.id
        return self.db.query(self.__class__).filter_by(
            idea_id=idea_id, sub_graph_id=subgraph_id), True

    # @classmethod
    # def special_quad_patterns(cls, alias_maker, discussion_id):
    #     return [QuadMapPatternS(
    #         Idea.iri_class().apply(cls.source_id),
    #         IDEA.includes,
    #         Idea.iri_class().apply(cls.target_id),
    #         name=QUADNAMES.idea_inclusion_reln)]

    crud_permissions = CrudPermissions(P_ADMIN_DISC)
Ejemplo n.º 38
0
class SubGraphIdeaLinkAssociation(DiscussionBoundBase):
    """Association table saying that an IdeaLink is part of a ExplicitSubGraphView"""
    __tablename__ = 'sub_graph_idea_link_association'
    id = Column(Integer, primary_key=True)
    __table_args__ = (UniqueConstraint("idea_link_id", "sub_graph_id"), )

    sub_graph_id = Column(Integer,
                          ForeignKey('explicit_sub_graph_view.id',
                                     ondelete="CASCADE",
                                     onupdate="CASCADE"),
                          index=True,
                          nullable=False)
    sub_graph = relationship("ExplicitSubGraphView",
                             backref=backref("idealink_assocs",
                                             cascade="all, delete-orphan"))

    idea_link_id = Column(Integer,
                          ForeignKey('idea_idea_link.id',
                                     ondelete="CASCADE",
                                     onupdate="CASCADE"),
                          index=True,
                          nullable=False)

    # reference to the "IdeaLink" object for proxying
    idea_link = relationship("IdeaLink")

    @classmethod
    def special_quad_patterns(cls, alias_maker, discussion_id):
        idea_link_assoc = alias_maker.alias_from_class(cls)
        idea_link_alias = alias_maker.alias_from_relns(cls.idea_link)
        # Assume tombstone status of target is similar to source, for now.
        conditions = [(idea_link_assoc.idea_link_id == idea_link_alias.id),
                      (idea_link_alias.tombstone_date == None)]
        if discussion_id:
            conditions.extend(
                cls.get_discussion_conditions(discussion_id, alias_maker))

        return [
            QuadMapPatternS(
                IdeaLink.iri_class().apply(idea_link_assoc.idea_link_id),
                IDEA.inMap,
                IdeaGraphView.iri_class().apply(idea_link_assoc.sub_graph_id),
                conditions=conditions,
                name=QUADNAMES.sub_graph_idea_link_assoc_reln)
        ]

    def get_discussion_id(self):
        sub_graph = self.sub_graph or IdeaGraphView.get(self.sub_graph_id)
        return sub_graph.get_discussion_id()

    def unique_query(self):
        # documented in lib/sqla
        idea_link_id = self.idea_link_id or self.idea_link.id
        subgraph_id = self.sub_graph_id or self.sub_graph.id
        return self.db.query(self.__class__).filter_by(
            idea_link_id=idea_link_id, sub_graph_id=subgraph_id), True

    @classmethod
    def get_discussion_conditions(cls, discussion_id, alias_maker=None):
        if alias_maker:
            subgraph_alias = alias_maker.alias_from_relns(cls.sub_graph)
            return ((subgraph_alias.discussion_id == discussion_id))
        else:
            return ((cls.sub_graph_id == ExplicitSubGraphView.id),
                    (ExplicitSubGraphView.discussion_id == discussion_id))

    crud_permissions = CrudPermissions(P_ADMIN_DISC)
Ejemplo n.º 39
0
class Comment(Model):
    # Table fields
    __tablename__ = 'wp_comments'
    comment_ID = Column(Integer, primary_key=True, nullable=False)
    comment_post_ID = Column(Integer,
                             ForeignKey('wp_posts.ID'),
                             nullable=False)
    comment_author = Column(Text(length=None), nullable=False, default='')
    comment_author_email = Column(String(length=100),
                                  nullable=False,
                                  default='')
    comment_author_url = Column(String(length=200), nullable=False, default='')
    comment_author_IP = Column(String(length=100), nullable=False, default='')
    comment_date = Column(DateTime(timezone=False),
                          nullable=False,
                          default=datetime.utcnow)
    comment_date_gmt = Column(DateTime(timezone=False),
                              nullable=False,
                              default=datetime.utcnow)

    comment_content = Column(Text(length=None), nullable=False)
    comment_karma = Column(Integer, nullable=False, default=0)
    comment_approved = Column(String(length=4), nullable=False)
    comment_agent = Column(String(length=255), nullable=False, default='')
    comment_type = Column(String(length=20), nullable=False, default='')
    comment_parent = Column(Integer,
                            ForeignKey('wp_comments.comment_ID'),
                            nullable=False,
                            default=0)
    user_id = Column(Integer, ForeignKey('wp_users.ID'), nullable=False)

    # ORM layer relationships
    post = relationship('Post', back_populates="comments")
    children = relationship('Comment',
                            backref=backref('parent',
                                            remote_side=[comment_ID]))
    user = relationship('User', back_populates="comments")
    meta = relationship(
        'CommentMeta',
        collection_class=attribute_mapped_collection('meta_key'),
        back_populates='comment')

    likes = relationship(
        'ULikeComments',
        collection_class=attribute_mapped_collection('user_id'),
        back_populates='comment')

    def __repr__(self):
        return f"<WpComment(meta_id={self.comment_ID} comment_content='{self.comment_content}' comment_type='{self.comment_type}')"

    def addMeta(self, key, value):
        if self.meta.get(key) == None:
            m = CommentMeta(key, value)
            m.comment = self
        else:
            self.meta[key].meta_value = value

    def addLike(self, user_id, status, date_time):
        key = f'{user_id}'
        if self.likes.get(key) == None:
            l = ULikeComments(user_id, status, date_time)
            l.comment = self
        else:
            existing = self.likes.get(key)
            existing.status = status
            existing.date_time = date_time
Ejemplo n.º 40
0
class Tag(Base):
    """Simple keywords (not bound to a concept), used to tag posts."""
    __tablename__ = 'tag'
    id = Column(Integer, primary_key=True)
    locale_id = Column(Integer, ForeignKey(Locale.id))
    label_id = Column(Integer, ForeignKey(LangString.id))
    base_tag_id = Column(Integer, ForeignKey("tag.id"))

    @hybrid_property
    def group_id(self):
        return self.base_tag_id or self.id

    @group_id.expression
    def group_id(cls):
        return coalesce(cls.base_tag_id, cls.id)

    # violates DRY, but allows for faster queries.
    locale = relationship(Locale)  # corresponds to original lang of label.
    label = relationship(LangString,
                         backref=backref("label_of_tag", lazy="dynamic"))
    equivalent_tags = relationship(
        "Tag",
        foreign_keys=[base_tag_id],
        backref=backref('reference_tag', remote_side=[id]),
    )
    label_string_entry = relationship(
        LangStringEntry,
        viewonly=True,
        lazy='select',
        primaryjoin=(LangStringEntry.locale_id == locale_id) &
        (LangStringEntry.langstring_id == label_id),
        foreign_keys=[label_id, locale_id],
        remote_side=[LangStringEntry.langstring_id, LangStringEntry.locale_id])

    @classmethod
    def getOrCreateTag(cls, label, locale, db=None):
        db = db or cls.default_db
        tag = db.query(cls).filter(cls.locale == locale).join(
            cls.label_string_entry).filter(
                LangStringEntry.value == label).first()
        if not tag:
            ls = LangString.create(label, locale.code)
            tag = cls(locale=locale, label=ls)
            db.add(tag)
            db.flush()
        return tag

    @property
    def original_label(self):
        return self.label_string_entry.value

    @property
    def reference_label(self):
        # Note: by simplistic_unify, the reference_label will be identical
        # to the translated label. Maybe use the latter.
        tag = self.reference_tag if self.base_tag_id else self
        return tag.label_string_entry.value

    def simplistic_unify(self, translator):
        if self.locale.code == 'en':
            if not self.id:
                self.db.flush()
            taglocale = aliased(Locale)
            lselocale = aliased(Locale)
            sq = self.db.query(Tag.id).join(taglocale).filter(
                taglocale.code != 'en').outerjoin(
                    LangStringEntry,
                    LangStringEntry.langstring_id == Tag.label_id).filter(
                        LangStringEntry.value ==
                        self.original_label).outerjoin(lselocale).filter(
                            lselocale.code.like('en-x-mtfrom-%')).filter(
                                lselocale.id == None  # noqa: E711
                            ).subquery()
            self.db.query(Tag).filter(Tag.id.in_(sq)).update(
                {"base_tag_id": self.id}, synchronize_session=False)
        else:
            self.label.ensure_translations(['en'], translator)
            en_translations = [
                lse for lse in self.label.entries
                if lse.locale.root_locale == 'en'
            ]
            if en_translations:
                label = en_translations[0].value
                self.reference_tag = self.db.query(Tag).join(Locale).filter(
                    Locale.code == 'en').join(Tag.label_string_entry).filter(
                        LangStringEntry.value == label).first()
Ejemplo n.º 41
0
class Flag(DatabaseObject):
    """
    Flags that can be captured by players and what not. This object comes in
    these flavors:
        -static
        -regex
        -datetime
        -file
        -choice

    Depending on the cls._type value. For more information see the wiki.
    """

    uuid = Column(String(36),
                  unique=True,
                  nullable=False,
                  default=lambda: str(uuid4()))
    box_id = Column(Integer, ForeignKey("box.id"), nullable=False)
    lock_id = Column(Integer,
                     ForeignKey("flag.id", ondelete="SET NULL"),
                     nullable=True)

    _name = Column(Unicode(64), nullable=True)
    _token = Column(Unicode(256), nullable=False)
    _description = Column(Unicode(1024), nullable=False)
    _capture_message = Column(Unicode(512))
    _case_sensitive = Column(Integer, nullable=True)
    _value = Column(Integer, nullable=False)
    _original_value = Column(Integer, nullable=True)
    _order = Column(Integer, nullable=True, index=True)
    _type = Column(Unicode(16), default=False)

    flag_attachments = relationship("FlagAttachment",
                                    backref=backref("flag", lazy="select"))

    flag_choice = relationship(
        "FlagChoice",
        backref=backref("flag", lazy="select"),
        cascade="all,delete,delete-orphan",
    )

    penalties = relationship(
        "Penalty",
        backref=backref("flag", lazy="select"),
        cascade="all,delete,delete-orphan",
    )

    hints = relationship(
        "Hint",
        backref=backref("flag", lazy="select"),
        cascade="all,delete,delete-orphan",
    )

    FLAG_TYPES = [
        FLAG_FILE, FLAG_REGEX, FLAG_STATIC, FLAG_DATETIME, FLAG_CHOICE
    ]

    @classmethod
    def all(cls):
        """ Returns a list of all objects in the database """
        return dbsession.query(cls).all()

    @classmethod
    def by_id(cls, _id):
        """ Returns a the object with id of _id """
        return dbsession.query(cls).filter_by(id=_id).first()

    @classmethod
    def by_name(cls, name):
        """ Returns a the object with name of _name """
        return dbsession.query(cls).filter_by(_name=str(name)).first()

    @classmethod
    def by_uuid(cls, _uuid):
        """ Return and object based on a uuid """
        return dbsession.query(cls).filter_by(uuid=str(_uuid)).first()

    @classmethod
    def by_token(cls, token):
        """ Return and object based on a token """
        return dbsession.query(cls).filter_by(_token=str(token)).first()

    @classmethod
    def by_token_and_box_id(cls, token, box_id):
        """ Return and object based on a token """
        return dbsession.query(cls).filter_by(_token=str(token),
                                              box_id=box_id).first()

    @classmethod
    def by_type(cls, _type):
        """ Return and object based on a token """
        return dbsession.query(cls).filter_by(_type=str(_type)).all()

    @classmethod
    def captures(cls, _id):
        return dbsession.query(team_to_flag).filter_by(flag_id=_id).all()

    @classmethod
    def create_flag(cls, _type, box, name, raw_token, description, value):
        """ Check parameters applicable to all flag types """
        creators = {
            FLAG_STATIC: cls._create_flag_static,
            FLAG_REGEX: cls._create_flag_regex,
            FLAG_FILE: cls._create_flag_file,
            FLAG_DATETIME: cls._create_flag_datetime,
            FLAG_CHOICE: cls._create_flag_choice,
        }
        # TODO Don't understand why this is here - name is not unqiue value
        # and you could simply name questions per box, like "Question 1" - ElJefe 6/1/2018
        # if cls.by_name(name) is not None:
        # raise ValidationError('Flag name already exists in database')
        assert box is not None and isinstance(box, Box)
        new_flag = creators[_type](box, name, raw_token, description, value)
        new_flag._type = _type
        return new_flag

    @classmethod
    def _create_flag_file(cls, box, name, raw_token, description, value):
        """ Check flag file specific parameters """
        token = cls.digest(raw_token)
        return cls(
            box_id=box.id,
            name=name,
            token=token,
            description=description,
            value=value,
            original_value=value,
        )

    @classmethod
    def _create_flag_regex(cls, box, name, raw_token, description, value):
        """ Check flag regex specific parameters """
        try:
            re.compile(raw_token)
        except:
            raise ValidationError("Flag token is not a valid regex")
        return cls(
            box_id=box.id,
            name=name,
            token=raw_token,
            description=description,
            value=value,
            original_value=value,
        )

    @classmethod
    def _create_flag_static(cls, box, name, raw_token, description, value):
        """ Check flag static specific parameters """
        return cls(
            box_id=box.id,
            name=name,
            token=raw_token,
            description=description,
            value=value,
            original_value=value,
        )

    @classmethod
    def _create_flag_datetime(cls, box, name, raw_token, description, value):
        """ Check flag datetime specific parameters """
        try:
            parse(raw_token)
        except:
            raise ValidationError("Flag token is not a valid datetime")
        return cls(
            box_id=box.id,
            name=name,
            token=raw_token,
            description=description,
            value=value,
            original_value=value,
        )

    @classmethod
    def _create_flag_choice(cls, box, name, raw_token, description, value):
        """ Check flag choice specific parameters """
        return cls(
            box_id=box.id,
            name=name,
            token=raw_token,
            description=description,
            value=value,
            original_value=value,
        )

    @classmethod
    def digest(self, data):
        """ Token is SHA1 of data """
        return hashlib.sha1(data).hexdigest()

    @property
    def game_level(self):
        return self.box.game_level

    @property
    def name(self):
        if self._name and len(self._name) > 0:
            return self._name
        else:
            return "Question %d" % self.order

    @name.setter
    def name(self, value):
        if not len(value) <= 16:
            raise ValidationError("Flag name must be less than 16 characters")
        self._name = str(value)

    @property
    def order(self):
        if not self._order:
            self._order = self.box.flags.index(self) + 1
        return self._order

    @order.setter
    def order(self, value):
        self._order = int(value)

    @property
    def description(self):
        return self._description

    @description.setter
    def description(self, value):
        self._description = str(value)[:1024]

    @property
    def capture_message(self):
        return self._capture_message if self._capture_message else ""

    @capture_message.setter
    def capture_message(self, value):
        self._capture_message = str(value)

    @property
    def type(self):
        return self._type

    @type.setter
    def type(self, value):
        if value not in self.FLAG_TYPES:
            raise ValueError("Invalid flag type")
        self._type = str(value)

    @property
    def token(self):
        return self._token

    @token.setter
    def token(self, value):
        self._token = str(value)

    @property
    def case_sensitive(self):
        return self._case_sensitive

    @case_sensitive.setter
    def case_sensitive(self, value):
        if value is None:
            self._case_sensitive = 0
        else:
            self._case_sensitive = value

    @property
    def value(self):
        return self._value

    @value.setter
    def value(self, value):
        try:
            self._value = abs(int(value))
        except ValueError:
            raise ValidationError("Reward value must be an integer")

    @property
    def original_value(self):
        if self._original_value:
            return self._original_value
        else:
            return self._value

    @original_value.setter
    def original_value(self, value):
        try:
            self._original_value = abs(int(value))
        except ValueError:
            raise ValidationError("Reward value must be an integer")

    @property
    def get_lock_id(self):
        return self.lock_id

    @get_lock_id.setter
    def set_lock_id(self, value):
        try:
            if value is None:
                self.lock_id = value
            else:
                self.lock_id = abs(int(value))
        except ValueError:
            self.lock_id = None

    @property
    def is_text(self):
        return self._type == FLAG_REGEX or self._type == FLAG_STATIC

    @property
    def is_static(self):
        return self._type == FLAG_STATIC

    @property
    def is_file(self):
        return self._type == FLAG_FILE

    @property
    def box(self):
        return Box.by_id(self.box_id)

    def choices(self):
        # inlucdes the choice uuid - needed for editing choice
        choices = []
        if self._type == FLAG_CHOICE:
            choicelist = FlagChoice.by_flag_id(self.id)
            if choicelist is not None and len(choicelist) > 0:
                for flagchoice in choicelist:
                    choices.append(flagchoice.to_dict())
        return json.dumps(choices)

    def choicelist(self):
        # excludes the choice uuid
        choices = []
        if self._type == FLAG_CHOICE:
            choicelist = FlagChoice.by_flag_id(self.id)
            if choicelist is not None and len(choicelist) > 0:
                for flagchoice in choicelist:
                    choices.append(flagchoice.choice)
        return json.dumps(choices)

    def capture(self, submission):
        if self._type == FLAG_STATIC:
            if self._case_sensitive == 0:
                return (str(self.token).lower().strip() == str(
                    submission).lower().strip())
            else:
                return str(self.token).strip() == str(submission).strip()
        elif self._type == FLAG_REGEX:
            if not self.token.startswith("^(") and not self.token.endswith(
                    ")$"):
                self.token = "^(" + self.token + ")$"
            if self._case_sensitive == 0:
                pattern = re.compile(self.token, re.IGNORECASE)
            else:
                pattern = re.compile(self.token)
            return pattern.match(submission) is not None
        elif self._type == FLAG_FILE:
            return self.token == self.digest(submission)
        elif self._type == FLAG_CHOICE:
            return self.token == submission
        elif self._type == FLAG_DATETIME:
            try:
                return parse(self.token) == parse(submission)
            except:
                return False
        else:
            raise ValueError("Invalid flag type, cannot capture")

    def to_xml(self, parent):
        """ Write attributes to XML doc """
        flag_elem = ET.SubElement(parent, "flag")
        flag_elem.set("type", self._type)
        ET.SubElement(flag_elem, "name").text = self._name
        ET.SubElement(flag_elem, "token").text = self.token
        ET.SubElement(flag_elem, "description").text = self.description
        ET.SubElement(flag_elem, "capture_message").text = self.capture_message
        ET.SubElement(flag_elem, "value").text = str(self.value)
        ET.SubElement(flag_elem,
                      "original_value").text = str(self.original_value)
        if self.lock_id:
            ET.SubElement(flag_elem,
                          "depends_on").text = Flag.by_id(self.lock_id).name
        ET.SubElement(flag_elem,
                      "case_sensitive").text = str(self.case_sensitive)
        attachements_elem = ET.SubElement(flag_elem, "flag_attachments")
        attachements_elem.set("count", str(len(self.flag_attachments)))
        for attachement in self.flag_attachments:
            attachement.to_xml(attachements_elem)
        choice_elem = ET.SubElement(flag_elem, "flag_choices")
        choice_elem.set("count", str(len(self.flag_choice)))
        for choice in self.flag_choice:
            ET.SubElement(choice_elem, "choice").text = choice.choice
        from models.Hint import Hint

        xml_hints = Hint.by_flag_id(self.id)
        hints_elem = ET.SubElement(flag_elem, "hints")
        hints_elem.set("count", str(len(xml_hints)))
        for hint in xml_hints:
            if not hint.flag_id is None:
                hint.to_xml(hints_elem)

    def to_dict(self):
        """ Returns public data as a dict """
        box = Box.by_id(self.box_id)
        if self.lock_id:
            lock_uuid = Flag.by_id(self.lock_id).uuid
        else:
            lock_uuid = ""
        case_sensitive = self.case_sensitive
        if case_sensitive != 0:
            case_sensitive = 1
        return {
            "name": self.name,
            "uuid": self.uuid,
            "description": self.description,
            "capture_message": self.capture_message,
            "value": self.value,
            "original_value": self.original_value,
            "box": box.uuid,
            "token": self.token,
            "lock_uuid": lock_uuid,
            "case-sensitive": case_sensitive,
            "flagtype": self.type,
            "choices": self.choices(),
            "order": self.order,
        }

    def __repr__(self):
        return "<Flag - name:%s, type:%s >" % (self.name, str(self._type))
Ejemplo n.º 42
0
class Track(Base, BaseModel):
    __tablename__ = 'tracks'
    id = Column(Integer, primary_key=True)
    created_time = Column(DateTime)
    user_id = Column(Integer, ForeignKey('users.id'))
    user = relationship('User', backref=backref('tracks', lazy='dynamic'))
    keyname = Column(String(255))
    original_filename = Column(String(255))
    artist = Column(String(255))
    title = Column(String(255))
    year = Column(Integer)
    bpm = Column(Integer, default=0)
    artwork_url = Column(String(255))
    notes = Column(Text)
    
    def set_id3_by_file(self, sourcefile):
        tags = EasyID3(sourcefile)
        if tags:
            try:
                self.artist = tags.get('artist')[0] if tags.get('artist') else ''
                self.title = tags.get('title')[0] if tags.get('title') else ''
                self.year = int(tags.get('date')[0]) if tags.get('date') else 0
                self.bpm = int(tags.get('bpm')[0]) if tags.get('bpm') else 0
            except ValueError:
                pass
        
        
    def generate_keyname(self):
        strip_mp3 = re.sub(r'(\.mp3)|(\.MP3)', '', self.original_filename)
        convert_spaces = re.sub(r'\s+', '_', strip_mp3)
        alphanumericspace = re.sub(r'\W+', '', convert_spaces)
        final = 'trackcircle-dev/tracks/%s/%s-%s.mp3' % \
                (self.user_id, int(time()), alphanumericspace,)
        return final
        
    def __init__(self, user_id, original_filename, artist = '', title = '', notes = ''):
        self.created_time = datetime.utcnow()
        self.user_id = user_id
        self.original_filename = original_filename
        self.artist = artist
        self.title = title
        self.notes = notes
        self.keyname = self.generate_keyname()

    
    def __repr__(self):
        if self.artist == '' and self.title == '':
            return '<Track %s (%r)>' % (self.keyname, self.id,)
        else:
            return '<Track %s - %s (%r)>' % (self.artist, self.title, self.id,)
        
    def __str__(self):
        if self.artist == '' or self.title == '':
            return self.original_filename
        return "%s - %s" % (self.artist, self.title)
    
    def serialized(self):
        dictionary = {}
        dictionary['id'] = self.id
        dictionary['classname'] = 'Track'
        #dictionary['created_time'] = self.created_time;
        dictionary['original_filename'] = self.original_filename
        dictionary['artist'] = self.artist
        dictionary['title'] = self.title
        dictionary['notes'] = self.notes
        dictionary['artwork_url'] = self.artwork_url
        
        fbids = set([1224063, 1208729, 1225500])
        fbid = random.sample(fbids,1)[0]
        dictionary['facebook_id'] = fbid # self.user.facebook_id
        
        names = set(['Drew','Jesse','Kane'])
        name = random.sample(names,1)[0]
        
        dictionary['user_name'] = name # self.user.first_name
        dictionary['keyname'] = self.keyname
        dictionary['url'] = self.url
        dictionary['prettytime'] = self.prettytime
        return dictionary
        
    @property 
    def url(self):
        if not self.keyname:
            self.keyname = self.generatekeyname(self.original_filename)
        return 'http://%s/%s' % (AMAZON_S3_BUCKET, self.keyname,)
    
    @property
    def prettytime(self):
        format = None
        if not format:
            # Monday, January 1 2012
            format = "%A, %B %d %Y"
        return self.created_time.strftime(format)
        
            
Ejemplo n.º 43
0
class Item(Base):
    metadata = MetaData()

    __tablename__ = "item_reg"

    __table_args__ = {"useexisting": True}

    id = Column('id', BIGINT, primary_key=True)
    item_name = Column("item_name", Text, nullable=True)
    description = Column("description", Text)
    avatar = Column("avatar", Text)
    code = Column("code", BIGINT, nullable=False)
    supplier = Column("supplier", BIGINT, nullable=False)
    barcode = Column("barcode", Text, nullable=False)
    amount = Column("amount", NUMERIC(20, 2), nullable=True, default=0.00)
    category = Column("category", BIGINT, nullable=True)
    item_type = Column("item_type", BIGINT, nullable=True)
    #initial_cost=Column("initial_cost", NUMERIC(20, 2), nullable=True)  # The total amount(Sum of subtotal+Taxes)
    unit = Column("unit", BIGINT, nullable=True)
    status = Column('status',
                    BIGINT,
                    ForeignKey(Status.code),
                    nullable=True,
                    default=12)
    price = Column("price", NUMERIC(20, 2),
                   nullable=True)  # The total amount(Sum of subtotal+Taxes)
    #price1 = Column("price1", NUMERIC(20, 2), nullable=True)  # The total amount(Sum of subtotal+Taxes)
    #price2 = Column("price2", NUMERIC(20, 2), nullable=True)  # The total amount(Sum of subtotal+Taxes)
    #price3 = Column("price3", NUMERIC(20, 2), nullable=True)  # The total amount(Sum of subtotal+Taxes)
    #price4 = Column("price4", NUMERIC(20, 2), nullable=True)  # The total amount(Sum of subtotal+Taxes)

    subtotal = Column("subtotal", NUMERIC(20, 2),
                      nullable=True)  # The subtotal(price, without taxes)
    tax = Column("tax", NUMERIC(20, 2), nullable=True)  # The taxes

    Item_tbl = Table(__tablename__, metadata, id, item_name, description, code,
                     item_type, status, amount, subtotal, price, tax, category,
                     avatar, unit, supplier, barcode
                     #,price1,price2,price3,price4,initial_cost
                     )

    # Relationship
    status_rel_cmp = relationship(Status, backref=backref("Item"))

    def __repr__(self):
        return "<Item (id='%s', code='%s', item_name='%s', " \
               "description='%s', item_type='%s', status='%s', amount='%s'," \
               "subtotal='%s', price='%s', tax='%s', category='%s',avatar='%s'," \
               "unit='%s'supplier='%s',barcode='%s')>" % \
               (self.id, self.code, self.item_name, self.description, self.item_type,
                self.status, self.amount, self.subtotal, self.price, self.tax, self.category,
                self.avatar, self.unit, self.supplier,
                self.barcode)

    def __Publish__(self):
        data = {}
        for column in self.__table__.columns.keys():
            value = self.__dict__[self.__table__.columns[column].name]
            if self.__table__.columns[column].type == "BIGINT":
                data[self.__table__.columns[column].name] = int(value)
            elif self.__table__.columns[column].type == "Integer":
                data[self.__table__.columns[column].name] = int(value)

            elif self.__table__.columns[column].type == "NUMERIC":
                data[self.__table__.columns[column].name] = float(value)
            elif self.__table__.columns[column].type == "Decimal":
                data[self.__table__.columns[column].name] = float(value)

            elif self.__table__.columns[column].type == "time":
                data[self.__table__.columns[column].name] = str(
                    value.strftime('%H:%M:%S'))
            elif self.__table__.columns[column].type == "datetime":
                data[self.__table__.columns[column].name] = str(
                    value.strftime('%H:%M:%S'))
            else:
                data[self.__table__.columns[column].name] = str(value)
        return data
Ejemplo n.º 44
0
    def define_tables(cls, metadata):
        global Table1, Table1B, Table2, Table3, Data
        table1 = Table(
            "table1",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("related_id",
                   Integer,
                   ForeignKey("table1.id"),
                   nullable=True),
            Column("type", String(30)),
            Column("name", String(30)),
        )

        table2 = Table(
            "table2",
            metadata,
            Column("id", Integer, ForeignKey("table1.id"), primary_key=True),
        )

        table3 = Table(
            "table3",
            metadata,
            Column("id", Integer, ForeignKey("table1.id"), primary_key=True),
        )

        data = Table(
            "data",
            metadata,
            Column("id",
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True),
            Column("node_id", Integer, ForeignKey("table1.id")),
            Column("data", String(30)),
        )

        # join = polymorphic_union(
        #   {
        #   'table3' : table1.join(table3),
        #   'table2' : table1.join(table2),
        #   'table1' : table1.select(table1.c.type.in_(['table1', 'table1b'])),
        #   }, None, 'pjoin')

        join = table1.outerjoin(table2).outerjoin(table3).alias("pjoin")

        # join = None

        class Table1(object):
            def __init__(self, name, data=None):
                self.name = name
                if data is not None:
                    self.data = data

            def __repr__(self):
                return "%s(%s, %s, %s)" % (
                    self.__class__.__name__,
                    self.id,
                    repr(str(self.name)),
                    repr(self.data),
                )

        class Table1B(Table1):
            pass

        class Table2(Table1):
            pass

        class Table3(Table1):
            pass

        class Data(object):
            def __init__(self, data):
                self.data = data

            def __repr__(self):
                return "%s(%s, %s)" % (
                    self.__class__.__name__,
                    self.id,
                    repr(str(self.data)),
                )

        try:
            # this is how the mapping used to work.  ensure that this raises an
            # error now
            table1_mapper = mapper(
                Table1,
                table1,
                select_table=join,
                polymorphic_on=table1.c.type,
                polymorphic_identity="table1",
                properties={
                    "nxt":
                    relationship(
                        Table1,
                        backref=backref("prev",
                                        foreignkey=join.c.id,
                                        uselist=False),
                        uselist=False,
                        primaryjoin=join.c.id == join.c.related_id,
                    ),
                    "data":
                    relationship(mapper(Data, data)),
                },
            )
            configure_mappers()
            assert False
        except Exception:
            assert True
            clear_mappers()

        # currently, the "eager" relationships degrade to lazy relationships
        # due to the polymorphic load.
        # the "nxt" relationship used to have a "lazy='joined'" on it, but the
        # EagerLoader raises the "self-referential"
        # exception now.  since eager loading would never work for that
        # relationship anyway, its better that the user
        # gets an exception instead of it silently not eager loading.
        # NOTE: using "nxt" instead of "next" to avoid 2to3 turning it into
        # __next__() for some reason.
        table1_mapper = mapper(
            Table1,
            table1,
            # select_table=join,
            polymorphic_on=table1.c.type,
            polymorphic_identity="table1",
            properties={
                "nxt":
                relationship(
                    Table1,
                    backref=backref("prev",
                                    remote_side=table1.c.id,
                                    uselist=False),
                    uselist=False,
                    primaryjoin=table1.c.id == table1.c.related_id,
                ),
                "data":
                relationship(mapper(Data, data),
                             lazy="joined",
                             order_by=data.c.id),
            },
        )

        table1b_mapper = mapper(Table1B,
                                inherits=table1_mapper,
                                polymorphic_identity="table1b")

        table2_mapper = mapper(
            Table2,
            table2,
            inherits=table1_mapper,
            polymorphic_identity="table2",
        )

        table3_mapper = mapper(
            Table3,
            table3,
            inherits=table1_mapper,
            polymorphic_identity="table3",
        )

        configure_mappers()
        assert table1_mapper.primary_key == (
            table1.c.id, ), table1_mapper.primary_key
Ejemplo n.º 45
0

## Mappers

meta.mapper(
    Resource,
    resource_table,
    properties={
        'package':
        orm.relation(
            Package,
            # all resources including deleted
            # formally package_resources_all
            backref=orm.backref(
                'resources_all',
                collection_class=ordering_list('position'),
                cascade='all, delete',
                order_by=resource_table.c.position,
            ),
        )
    },
    extension=[extension.PluginMapperExtension()],
)


def resource_identifier(obj):
    return obj.id


class DictProxy(object):
    def __init__(self, target_key, target_dict, data_type=text_type):
        self.target_key = target_key
Ejemplo n.º 46
0
class Node(Base):
    __tablename__ = 'nodes'
    id = Column(Integer, primary_key=True)
    uuid = Column(String(36),
                  nullable=False,
                  default=lambda: str(uuid.uuid4()),
                  unique=True)
    cluster_id = Column(Integer, ForeignKey('clusters.id'))
    group_id = Column(Integer, ForeignKey('nodegroups.id'), nullable=True)
    name = Column(Unicode(100))
    status = Column(Enum(*consts.NODE_STATUSES, name='node_status'),
                    nullable=False,
                    default=consts.NODE_STATUSES.discover)
    meta = Column(JSON, default={})
    mac = Column(LowercaseString(17), nullable=False, unique=True)
    ip = Column(String(15))
    fqdn = Column(String(255))
    manufacturer = Column(Unicode(50))
    platform_name = Column(String(150))
    kernel_params = Column(Text)
    progress = Column(Integer, default=0)
    os_platform = Column(String(150))
    pending_addition = Column(Boolean, default=False)
    pending_deletion = Column(Boolean, default=False)
    changes = relationship("ClusterChanges", backref="node")
    error_type = Column(Enum(*consts.NODE_ERRORS, name='node_error_type'))
    error_msg = Column(String(255))
    timestamp = Column(DateTime, nullable=False)
    online = Column(Boolean, default=True)
    role_list = relationship("Role",
                             secondary=NodeRoles.__table__,
                             backref=backref("nodes", cascade="all,delete"))
    role_associations = relationship("NodeRoles", viewonly=True)
    pending_role_associations = relationship("PendingNodeRoles", viewonly=True)
    pending_role_list = relationship("Role",
                                     secondary=PendingNodeRoles.__table__,
                                     backref=backref("pending_nodes",
                                                     cascade="all,delete"))
    attributes = relationship("NodeAttributes",
                              backref=backref("node"),
                              uselist=False,
                              cascade="all,delete")
    nic_interfaces = relationship("NodeNICInterface",
                                  backref="node",
                                  cascade="delete",
                                  order_by="NodeNICInterface.name")
    bond_interfaces = relationship("NodeBondInterface",
                                   backref="node",
                                   cascade="delete",
                                   order_by="NodeBondInterface.name")
    # hash function from raw node agent request data - for caching purposes
    agent_checksum = Column(String(40), nullable=True)

    ip_addrs = relationship("IPAddr", viewonly=True)
    replaced_deployment_info = Column(JSON, default=[])
    replaced_provisioning_info = Column(JSON, default={})

    @property
    def interfaces(self):
        return self.nic_interfaces + self.bond_interfaces

    @property
    def uid(self):
        return str(self.id)

    @property
    def offline(self):
        return not self.online

    @property
    def network_data(self):
        # TODO(enchantner): move to object
        from nailgun.network.manager import NetworkManager
        return NetworkManager.get_node_networks(self)

    @property
    def volume_manager(self):
        return VolumeManager(self)

    @property
    def needs_reprovision(self):
        return self.status == 'error' and self.error_type == 'provision' and \
            not self.pending_deletion

    @property
    def needs_redeploy(self):
        return (self.status in ['error', 'provisioned']
                or len(self.pending_roles)) and not self.pending_deletion

    @property
    def needs_redeletion(self):
        return self.status == 'error' and self.error_type == 'deletion'

    @property
    def human_readable_name(self):
        return self.name or self.mac

    @property
    def full_name(self):
        return u'%s (id=%s, mac=%s)' % (self.name, self.id, self.mac)

    @property
    def roles(self):
        return [role.name for role in self.role_list]

    @roles.setter
    def roles(self, new_roles):
        if not self.cluster:
            logger.warning(u"Attempting to assign roles to node "
                           u"'{0}' which isn't added to cluster".format(
                               self.name or self.id))
            return
        if new_roles:
            self.role_list = db().query(Role).filter_by(
                release_id=self.cluster.release_id, ).filter(
                    Role.name.in_(new_roles)).all()
        else:
            self.role_list = []

    @property
    def pending_roles(self):
        return [role.name for role in self.pending_role_list]

    @property
    def all_roles(self):
        """Returns all roles, self.roles and self.pending_roles."""
        return set(self.pending_roles + self.roles)

    @pending_roles.setter
    def pending_roles(self, new_roles):
        if not self.cluster:
            logger.warning(u"Attempting to assign pending_roles to node "
                           u"'{0}' which isn't added to cluster".format(
                               self.name or self.id))
            return
        self.pending_role_list = db().query(Role).filter_by(
            release_id=self.cluster.release_id, ).filter(
                Role.name.in_(new_roles)).all()

    @property
    def admin_interface(self):
        """Iterate over interfaces, if admin subnet include
        ip address of current interface then return this interface.

        :raises: errors.CanNotFindInterface
        """
        # TODO(enchantner): move to object
        from nailgun.network.manager import NetworkManager
        return NetworkManager.get_admin_interface(self)

    def _check_interface_has_required_params(self, iface):
        return bool(iface.get('name') and iface.get('mac'))

    def _clean_iface(self, iface):
        # cleaning up unnecessary fields - set to None if bad
        for param in ["max_speed", "current_speed"]:
            val = iface.get(param)
            if not (isinstance(val, int) and val >= 0):
                val = None
            iface[param] = val
        return iface

    def update_meta(self, data):
        # helper for basic checking meta before updation
        result = []
        if "interfaces" in data:
            for iface in data["interfaces"]:
                if not self._check_interface_has_required_params(iface):
                    logger.warning("Invalid interface data: {0}. "
                                   "Interfaces are not updated.".format(iface))
                    data["interfaces"] = self.meta.get("interfaces")
                    self.meta = data
                    return
                result.append(self._clean_iface(iface))

        data["interfaces"] = result
        self.meta = data

    def create_meta(self, data):
        # helper for basic checking meta before creation
        result = []
        if "interfaces" in data:
            for iface in data["interfaces"]:
                if not self._check_interface_has_required_params(iface):
                    logger.warning("Invalid interface data: {0}. "
                                   "Skipping interface.".format(iface))
                    continue
                result.append(self._clean_iface(iface))

        data["interfaces"] = result
        self.meta = data

    def reset_name_to_default(self):
        """Reset name to default
        TODO(el): move to node REST object which
        will be introduced in 5.0 release
        """
        self.name = u'Untitled ({0})'.format(self.mac[-5:])
Ejemplo n.º 47
0
class SalesOrderLine(db.Model):
    __tablename__ = 'sales_order_line'
    id = Column(Integer, primary_key=True)
    unit_price = Column(Numeric(precision=8, scale=2, decimal_return_scale=2), nullable=False)
    quantity = Column(Numeric(precision=8, scale=2, decimal_return_scale=2), nullable=False)

    sales_order_id = Column(Integer, ForeignKey('sales_order.id'), nullable=False)
    sales_order = relationship('SalesOrder', backref=backref('lines', cascade='all, delete-orphan'))

    external_id = Column(String(), nullable=True)

    product_id = Column(Integer, ForeignKey('product.id'), nullable=False)
    product = relationship('Product', backref=backref('sales_order_lines'))
    remark = Column(Text)

    @hybrid_property
    def discount_amount(self):
        return format_decimal(self.original_amount - self.actual_amount)

    @discount_amount.setter
    def discount_amount(self, adjust_amount):
        pass

    @hybrid_property
    def actual_amount(self):
        return format_decimal(self.unit_price * self.quantity)

    @actual_amount.expression
    def actual_amount(self):
        return select([self.quantity * self.unit_price]).label('line_actual_amount')

    @actual_amount.setter
    def actual_amount(self, actual_amount):
        pass

    @hybrid_property
    def original_amount(self):
        return format_decimal(self.product.retail_price * self.quantity)

    @original_amount.expression
    def original_amount(self):
        from psi.app.models.product import Product
        return (select([SalesOrderLine.quantity * Product.retail_price])
                .where(self.product_id == Product.id).label('line_original_amount'))

    @original_amount.setter
    def original_amount(self, original_amount):
        pass

    @hybrid_property
    def price_discount(self):
        return format_decimal(self.product.retail_price - self.unit_price)

    @price_discount.setter
    def price_discount(self, price_adjust):
        pass

    @hybrid_property
    def retail_price(self):
        return self.product.retail_price

    @retail_price.setter
    def retail_price(self, retail_price):
        pass

    @hybrid_property
    def transient_external_id(self):
        """
        This design is to display a readonly field containing current
        external id information in UI but don't allow user to change it.
        :return: Current external id as a transient property
        """
        return self.external_id

    @transient_external_id.setter
    def transient_external_id(self, val):
        pass

    def __unicode__(self):
        return str(self.id) + ' - ' + self.product.name
Ejemplo n.º 48
0
class OrgUnit(Entity):
    __tablename__ = "orgunit"
    __indexable__ = False
    query_class = OrgUnitQuery

    type = Column(Enum(*TYPE_ENUM, name="orgunit_type"), nullable=False)

    dn = Column(String, unique=True, index=True)
    nom = Column(Unicode, nullable=False, unique=True)
    sigle = Column(Unicode, default="", nullable=False)

    parent_id = Column(Integer, ForeignKey("orgunit.id"))
    parent = relationship(
        "OrgUnit",
        primaryjoin=remote(Entity.id) == foreign(parent_id),
        backref=backref("children",
                        lazy="joined",
                        cascade="all, delete-orphan"),
    )

    wf_settings = Column(JSONDict(), default=dict)

    permettre_reponse_directe = Column(Boolean)
    permettre_soummission_directe = Column(Boolean)

    def __init__(self, **kw):
        super().__init__(**kw)
        self.wf_settings = {}

    def __unicode__(self):
        return f"<OrgUnit type='{self.type}' nom='{self.nom}' id={self.id}>"

    __str__ = __unicode__

    # def __repr__(self):
    #     return unicode(self).encode('utf8')

    @property
    def sigle_ou_nom(self) -> str:
        return self.sigle or self.nom

    @property
    def depth(self) -> int:
        if self.type == EQUIPE:
            return 4
        if self.type == DEPARTEMENT:
            return 3
        if self.type == LABORATOIRE:
            return 2
        if self.type == UFR:
            return 1
        if self.type == POLE_DE_RECHERCHE:
            return 0
        # Should not happen
        return 0

    @property
    def path(self) -> list[str]:
        t = [""] * 5
        for p in self.parents + [self]:
            t[p.depth] = p.nom
        return t

    @property
    def parents(self) -> list[OrgUnit]:
        p = self
        result = []
        while True:
            p = p.parent
            if not p:
                break
            result.append(p)
        result.reverse()
        return result

    def descendants(self) -> list[OrgUnit]:
        if self.type == EQUIPE:
            return []
        if self.type == DEPARTEMENT:
            return list(self.children)

        children = self.children
        result = list(children)
        for c in children:
            result += c.descendants()
        return result

    def get_contacts_dgrtt(self):
        from .mapping_dgrtt import MappingDgrtt

        return MappingDgrtt.query.get_for_ou(self)

    def get_members_with_role(self,
                              role_type: RoleType | None) -> list[Profile]:
        roles = roles_service.get_roles(role_type=role_type, target=self)
        result = [r.profile for r in roles if r.profile]
        return result

    def get_directeurs(self) -> list[Profile]:
        from .roles import RoleType

        result = self.get_members_with_role(RoleType.DIRECTION)
        assert all(p.has_role("directeur") for p in result)

        # On met le "vrai" directeur en premier
        result = [p for p in result if p.is_directeur
                  ] + [p for p in result if not p.is_directeur]

        return result

    def get_gestionnaires(self) -> list[Profile]:
        from .roles import RoleType

        result = self.get_members_with_role(RoleType.GDL)
        assert all(p.has_role("gestionnaire") for p in result)
        return result

    def get_administrateurs(self) -> list[Profile]:
        from .roles import RoleType

        result = self.get_members_with_role(RoleType.ALL)
        assert all(p.has_role("all") for p in result)
        return result

    @property
    def direction(self) -> list[Profile]:
        return self.get_directeurs()

    @property
    def gestionnaires(self) -> list[Profile]:
        return self.get_gestionnaires()

    @property
    def administrateurs(self):
        return self.get_administrateurs()

    def set_roles(self, users: list[Profile], role_type: RoleType) -> None:
        roles = roles_service.get_roles(role_type=role_type, target=self)
        for role in roles:
            db.session.delete(role)
        db.session.flush()
        for user in users:
            roles_service.grant_role(user, role_type, self)

    @property
    def directeur(self) -> Profile | None:

        direction = self.get_directeurs()
        direction = [d for d in direction if d.is_directeur]
        return toolz.get(0, direction, None)

    @property
    def adresse(self):
        if self.directeur and self.directeur.adresse:
            return self.directeur.adresse
        return ""

    def validate(self) -> None:
        if self.type == POLE_DE_RECHERCHE:
            assert self.parent is None

        elif self.type == UFR:
            assert self.parent.type == POLE_DE_RECHERCHE

        elif self.type == LABORATOIRE:
            assert self.parent.type in (POLE_DE_RECHERCHE, UFR)

        elif self.type == DEPARTEMENT:
            assert self.parent.type == LABORATOIRE

        elif self.type == EQUIPE:
            assert self.parent.type in (LABORATOIRE, DEPARTEMENT)

        elif self.type == BUREAU_DGRTT:
            assert self.parent is None

        else:
            raise AssertionError()

    def get_labo(self) -> OrgUnit | None:
        if self.type == LABORATOIRE:
            return self
        if not self.parent:
            return None
        if self.parent.type == LABORATOIRE:
            return self.parent
        if self.parent.parent.type == LABORATOIRE:
            return self.parent.parent
        raise AssertionError("Should not happen")

    @property
    def laboratoire(self) -> OrgUnit | None:
        try:
            return self.get_labo()
        except Exception:
            return None

    @property
    def equipe(self) -> OrgUnit | None:
        if self.type == EQUIPE:
            return self
        return None

    @property
    def departement(self) -> OrgUnit | None:
        if self.type == DEPARTEMENT:
            return self
        if self.type == EQUIPE and self.parent.type == DEPARTEMENT:
            return self.parent
        return None

    @property
    def ufr(self) -> OrgUnit | None:
        if self.type == POLE_DE_RECHERCHE:
            return None
        if self.type == UFR:
            return self

        assert self.laboratoire
        labo: OrgUnit = self.laboratoire
        parent = labo.parent
        if parent.type == UFR:
            return parent
        return None

    @property
    def pole(self) -> OrgUnit | None:
        if self.type == POLE_DE_RECHERCHE:
            return self
        if self.type == UFR:
            return self.parent

        assert self.laboratoire
        labo: OrgUnit = self.laboratoire
        parent: OrgUnit = labo.parent
        if parent.type == POLE_DE_RECHERCHE:
            return parent
        return parent.parent

    def get_membres(self) -> list[Profile]:
        if self.type not in [LABORATOIRE, EQUIPE, DEPARTEMENT]:
            return []

        labo = self.get_labo()
        if labo:
            membres_du_labo = {m for m in labo.membres if m.active}
        else:
            membres_du_labo = set()

        if self.type == LABORATOIRE:
            membres = list(membres_du_labo)

        elif self.type == EQUIPE:
            membres = [m for m in membres_du_labo if m.sous_structure == self]

        else:
            # self.type == DEPARTEMENT
            membres_dict = {
                m
                for m in membres_du_labo if m.sous_structure == self
            }
            for equipe in self.children:
                membres_dict.update(equipe.get_membres())
            membres = list(membres_dict)

        def sorter(profile: Profile) -> tuple[str, str]:
            return profile.nom, profile.prenom

        return sorted(membres, key=sorter)

    def wf_must_validate(self, type: str) -> bool:
        assert type in [LABORATOIRE, DEPARTEMENT, EQUIPE]

        if self.type == LABORATOIRE:
            return True

        wf_settings = self.wf_settings
        if type == LABORATOIRE:
            return wf_settings.get("validation_labo", True)
        elif type == DEPARTEMENT:
            return wf_settings.get("validation_dept", True)
        else:
            # type == EQUIPE
            return wf_settings.get("validation_equipe", True)
Ejemplo n.º 49
0
           index=True),
    Column('created', types.DateTime, default=datetime.now, nullable=False),
    Column('visibility', types.Unicode, default=u'visible'),
    Column('abuse_status',
           types.Integer,
           default=AbuseStatus.unmoderated.value),
)

meta.mapper(
    Issue,
    issue_table,
    properties={
        'user':
        relation(model.User,
                 backref=backref('issues',
                                 cascade='all, delete-orphan',
                                 single_parent=True),
                 primaryjoin=foreign(issue_table.c.user_id) == remote(User.id),
                 uselist=False),
        'assignee':
        relation(model.User,
                 backref=backref('resolved_issues', cascade='all'),
                 primaryjoin=foreign(issue_table.c.assignee_id) == remote(
                     User.id)),
        'dataset':
        relation(model.Package,
                 backref=backref('issues',
                                 cascade='all, delete-orphan',
                                 single_parent=True),
                 primaryjoin=foreign(issue_table.c.dataset_id) == remote(
                     Package.id),
Ejemplo n.º 50
0
class Session(db.Model, TypenameMixin, StatusPredicatesMixin, HasSubjectsMixin,
              UserDetailsMixin, TimespanMixin):

    id = db.Column(db.Integer, primary_key=True)
    logical_id = db.Column(db.String(256), unique=True, index=True)
    updated_at = db.Column(db.DateTime(),
                           onupdate=datetime.now,
                           index=True,
                           nullable=True)

    parent_logical_id = db.Column(db.String(256),
                                  db.ForeignKey('session.logical_id',
                                                ondelete='CASCADE'),
                                  default=None,
                                  index=True)
    children = db.relationship('Session',
                               backref=backref('parent',
                                               remote_side=[logical_id]))
    is_parent_session = db.Column(db.Boolean, server_default='FALSE')
    child_id = db.Column(db.String(20), default=None)

    start_time = db.Column(db.Float, default=get_current_time)
    end_time = db.Column(db.Float, default=None, index=True)
    hostname = db.Column(db.String(100))

    in_pdb = db.Column(db.Boolean, server_default='FALSE')

    infrastructure = db.Column(db.String(50), default=None)

    tests = db.relationship('Test',
                            backref=backref('session', lazy='joined'),
                            cascade='all, delete, delete-orphan')
    errors = db.relationship('Error',
                             backref=backref('session', lazy='joined'))
    comments = db.relationship('Comment',
                               primaryjoin='Comment.session_id==Session.id')
    metadata_items = db.relationship('SessionMetadata',
                                     lazy='dynamic',
                                     cascade='all, delete, delete-orphan')

    subject_instances = db.relationship('SubjectInstance',
                                        secondary=session_subject,
                                        backref=backref('sessions',
                                                        lazy='dynamic'),
                                        lazy='joined',
                                        order_by=session_subject.c.ordinal)

    labels = db.relationship('Label',
                             secondary='session_label',
                             lazy='joined',
                             order_by="Label.name")

    # test counts
    total_num_tests = db.Column(db.Integer, default=None)
    num_failed_tests = db.Column(db.Integer, default=0)
    num_error_tests = db.Column(db.Integer, default=0)
    num_skipped_tests = db.Column(db.Integer, default=0)
    num_finished_tests = db.Column(db.Integer, default=0)
    num_interruptions = db.Column(db.Integer, default=0)
    num_interrupted_tests = db.Column(db.Integer, server_default="0")
    num_warnings = db.Column(db.Integer, nullable=False, server_default="0")
    num_test_warnings = db.Column(db.Integer,
                                  nullable=False,
                                  server_default="0")

    user_id = db.Column(db.Integer,
                        db.ForeignKey('user.id', ondelete='CASCADE'),
                        index=True,
                        nullable=False)
    user = db.relationship('User', lazy='joined', foreign_keys=user_id)
    real_user_id = db.Column(db.Integer,
                             db.ForeignKey('user.id', ondelete='CASCADE'),
                             nullable=True)
    real_user = db.relationship('User',
                                lazy='joined',
                                foreign_keys=real_user_id)

    # status
    num_errors = db.Column(db.Integer, default=0)
    num_failures = db.Column(db.Integer, default=0)
    status = db.Column(db.String(20), nullable=False, default=statuses.STARTED)

    # keepalive
    keepalive_interval = db.Column(db.Integer, nullable=True, default=None)
    next_keepalive = db.Column(db.Float,
                               nullable=True,
                               default=None,
                               index=True)
    reporting_stopped = db.Column(db.Boolean, default=False)

    # activity
    num_comments = db.Column(db.Integer, default=0)

    has_fatal_errors = db.Column(db.Boolean, default=False)

    delete_at = db.Column(db.Float, nullable=True)
    ttl_seconds = db.Column(db.Integer, nullable=True)

    __table_args__ = (
        Index('ix_session_start_time', start_time.desc()),
        Index('ix_session_status_lower', func.lower(status)),
        Index('ix_session_start_time_status_lower', start_time.desc(),
              func.lower(status)),
        Index('ix_session_timespan', 'timespan', postgresql_using='gist'),
        Index('ix_session_delete_at',
              delete_at,
              postgresql_where=(delete_at != None)),
        Index('ix_session_updated_at',
              updated_at.asc(),
              postgresql_where=(updated_at != None)),
    )

    last_comment_obj = db.relationship(
        lambda: Comment,
        primaryjoin=lambda: and_(
            Session.id == Comment.session_id,  # pylint: disable=undefined-variable
            Comment.timestamp == select([func.max(Comment.timestamp)]).where(
                Comment.session_id == Session.id).correlate(Session.__table__)
        ),
        uselist=False,
        lazy='joined')

    @rendered_field
    def last_comment(self):
        comment = self.last_comment_obj
        if comment is None:
            return None

        return {'comment': comment.comment, 'user_email': comment.user.email}

    @rendered_field
    def is_abandoned(self):
        if self.next_keepalive is None:
            return False
        if self.next_keepalive > get_current_time():
            return False
        return self.end_time is None

    # rendered extras
    related_entities = db.relationship('Entity', secondary='session_entity')

    @rendered_field
    def real_email(self):
        user = self.real_user
        if user is None:
            return None
        return user.email

    @rendered_field(name='labels')
    def label_names(self):
        return [l.name for l in self.labels]

    def update_keepalive(self):
        if self.keepalive_interval is not None:
            next_keepalive = flux.current_timeline.time(
            ) + self.keepalive_interval
            self.next_keepalive = next_keepalive
            self.extend_timespan_to(next_keepalive)
            if self.ttl_seconds is not None:
                self.delete_at = self.next_keepalive + self.ttl_seconds

    def notify_subject_activity(self):
        for subject_instance in self.subject_instances:
            subject_instance.subject.last_activity = max(
                subject_instance.subject.last_activity or 0,
                flux.current_timeline.time())
Ejemplo n.º 51
0
        cls.state_info,
        'set',
        lambda t, v, o, i: utils.cut(v, 65500),
        retval=True)

# Many-to-one for 'ActionExecution' and 'TaskExecution'.

ActionExecution.task_execution_id = sa.Column(sa.String(36),
                                              sa.ForeignKey(
                                                  TaskExecution.id,
                                                  ondelete='CASCADE'),
                                              nullable=True)

TaskExecution.action_executions = relationship(
    ActionExecution,
    backref=backref('task_execution', remote_side=[TaskExecution.id]),
    cascade='all, delete-orphan',
    foreign_keys=ActionExecution.task_execution_id,
    lazy='select',
    passive_deletes=True)

sa.Index('%s_task_execution_id' % ActionExecution.__tablename__,
         'task_execution_id')

# Many-to-one for 'WorkflowExecution' and 'TaskExecution'.

WorkflowExecution.task_execution_id = sa.Column(sa.String(36),
                                                sa.ForeignKey(
                                                    TaskExecution.id,
                                                    ondelete='CASCADE'),
                                                nullable=True)
Ejemplo n.º 52
0
class Library(Base, BaseMixin):
    __tablename__ = 'library'

    name = sa.Column(sa.Unicode(1024), nullable=False, server_default="")
    asset = relationship(Asset,
                         primaryjoin='''and_(
                                            Library.id==Asset.library_id, \
                                            Asset.is_deleted==false(), \
                                            Asset.storage!=null() \
                                        )''',
                         cascade="all,delete",
                         backref=backref("library", lazy="joined"))
    is_deleted = sa.Column(sa.Boolean, nullable=False, server_default=false())
    description = sa.Column(sa.Unicode(4096),
                            nullable=False,
                            server_default="")
    license = sa.Column(sa.SmallInteger, nullable=False, server_default='0')
    cover_storage = sa.Column(FileHandleStore, nullable=True)
    is_public = sa.Column(sa.Boolean, nullable=False, server_default=false())
    is_default = sa.Column(sa.Boolean,
                           nullable=False,
                           index=True,
                           server_default=false())
    priority = sa.Column(sa.Integer, nullable=False, server_default='0')
    price = sa.Column(sa.Float,
                      nullable=False,
                      index=True,
                      server_default='-1')
    launched_at = sa.Column(sa.DateTime, nullable=True)
    users = relationship("User",
                         secondary=user_library,
                         lazy='joined',
                         cascade='',
                         backref=backref("libraries", lazy="select"))
    purchased_users = relationship("User",
                                   secondary=user_purchased_library,
                                   primaryjoin='''and_(
            user_purchased_library.c.library_id==Library.id,
            Library.is_deleted==false()
        )''',
                                   lazy='select',
                                   cascade='',
                                   backref=backref("libraries_purchased",
                                                   lazy="select"))

    __table_args__ = (Index('ispublic_launchedat_idx', 'is_public',
                            'launched_at'), )

    @property
    def __acl__(self):
        acl = super(Library, self).__acl__()
        for user in self.users:
            acl = acl + [(Allow, user.email, 'get'),
                         (Allow, user.email, 'set')]
        return acl

    @classmethod
    def create(cls, session, *args, **kwargs):
        # create normal project
        # add default lib and itself to the lib projects list
        self = cls(*args, **kwargs)
        return self

    @property
    def cover_storage_url(self):
        return get_upload_base_url() + self.cover_storage.url \
                if self.cover_storage else self.users[0].avatar_url()

    def import_handle(self, handle):
        self.cover_storage = handle

    def serialize_min(self):
        return {
            'id': self.id,
            'name': self.name,
            'cover': self.cover_storage_url,
            'coverStorage': self.cover_storage_url,  # Deprecated
            'price': self.price,
            'license': self.license,
            'isPublic': self.is_public,
            'isNew': self.is_new,
            'isRecentUpdated': self.is_recent_updated,
            'assetCount': self.asset_count,
        }

    def serialize_og(self):
        return {
            'id': self.id,
            'name': self.name,
            'description': self.description,
            'cover': self.cover_storage_url,
        }

    def serialize(self, user=None):
        serialized_library = self.serialize_min()
        serialized_library['description'] = self.description
        serialized_library['isCollaborator'] = user in self.users
        serialized_library['author'] = self.users[0].serialize_min(
        ) if self.users else None  # Assume user[0] is the author
        serialized_library['updatedAt'] = self.updated_at.isoformat()
        serialized_library['launchedAt'] = self.launched_at.isoformat(
        ) if self.launched_at else None
        serialized_library['isSelected'] = self.has_user_selected(user)
        return serialized_library

    def serialize_profile(self):
        return {
            'id': self.id,
            'name': self.name,
            'cover': self.cover_storage_url,
            'priority': self.priority,
        }

    def serialize_store(self, user=None):
        serialized_store = self.serialize_min()
        serialized_store['isCollaborator'] = user in self.users
        serialized_store['isPurchased'] = self.has_user_purchased(user)
        return serialized_store

    def serialize_store_detail(self, user=None):
        serialized_store = self.serialize_store(user)
        serialized_store['description'] = self.description
        serialized_store['createdAt'] = self.created_at.isoformat()
        serialized_store['updatedAt'] = self.updated_at.isoformat()
        serialized_store['launchedAt'] = self.launched_at.isoformat(
        ) if self.launched_at else None
        serialized_store['author'] = self.users[0].serialize_min(
        ) if self.users else None  # Assume user[0] is the author
        serialized_store['credits'] = [
            u.serialize_credit() for u in self.get_assset_credits()
        ]
        serialized_store['isSelected'] = self.has_user_selected(user)
        return serialized_store

    def get_assset_credits(self):
        asset_users = [a.users for a in self.asset if not a.is_deleted]
        merged = set(itertools.chain.from_iterable(asset_users))
        credits = sorted(merged, key=lambda user: user.display_name)
        return credits

    def has_user_purchased(self, user):
        return user in self.purchased_users

    def has_user_selected(self, user):
        return user in self.selected_users

    @property
    def is_new(self):
        return bool(self.created_at + timedelta(days=7) > datetime.utcnow() \
            or (self.launched_at and self.launched_at + timedelta(days=7) > datetime.utcnow()))

    @property
    def is_recent_updated(self):
        return bool(self.updated_at + timedelta(days=7) > datetime.utcnow())

    @property
    def is_launched(self):
        return bool(self.launched_at)

    @property
    def asset_count(self):
        return AssetQuery(DBSession).count_by_library(self)
Ejemplo n.º 53
0
class DagRun(Base, LoggingMixin):
    """
    DagRun describes an instance of a Dag. It can be created
    by the scheduler (for regular runs) or by an external trigger
    """

    __tablename__ = "dag_run"

    id = Column(Integer, primary_key=True)
    dag_id = Column(String(ID_LEN))
    execution_date = Column(UtcDateTime, default=timezone.utcnow)
    start_date = Column(UtcDateTime, default=timezone.utcnow)
    end_date = Column(UtcDateTime)
    _state = Column('state', String(50), default=State.RUNNING)
    run_id = Column(String(ID_LEN))
    creating_job_id = Column(Integer)
    external_trigger = Column(Boolean, default=True)
    run_type = Column(String(50), nullable=False)
    conf = Column(PickleType)
    # When a scheduler last attempted to schedule TIs for this DagRun
    last_scheduling_decision = Column(UtcDateTime)
    dag_hash = Column(String(32))

    dag = None

    __table_args__ = (
        Index('dag_id_state', dag_id, _state),
        UniqueConstraint('dag_id', 'execution_date'),
        UniqueConstraint('dag_id', 'run_id'),
        Index('idx_last_scheduling_decision', last_scheduling_decision),
    )

    task_instances = relationship(
        TI,
        primaryjoin=and_(TI.dag_id == dag_id,
                         TI.execution_date == execution_date),  # type: ignore
        foreign_keys=(dag_id, execution_date),
        backref=backref('dag_run', uselist=False),
    )

    DEFAULT_DAGRUNS_TO_EXAMINE = airflow_conf.getint(
        'scheduler',
        'max_dagruns_per_loop_to_schedule',
        fallback=20,
    )

    def __init__(
        self,
        dag_id: Optional[str] = None,
        run_id: Optional[str] = None,
        execution_date: Optional[datetime] = None,
        start_date: Optional[datetime] = None,
        external_trigger: Optional[bool] = None,
        conf: Optional[Any] = None,
        state: Optional[str] = None,
        run_type: Optional[str] = None,
        dag_hash: Optional[str] = None,
        creating_job_id: Optional[int] = None,
    ):
        self.dag_id = dag_id
        self.run_id = run_id
        self.execution_date = execution_date
        self.start_date = start_date
        self.external_trigger = external_trigger
        self.conf = conf or {}
        self.state = state
        self.run_type = run_type
        self.dag_hash = dag_hash
        self.creating_job_id = creating_job_id
        super().__init__()

    def __repr__(self):
        return ('<DagRun {dag_id} @ {execution_date}: {run_id}, '
                'externally triggered: {external_trigger}>').format(
                    dag_id=self.dag_id,
                    execution_date=self.execution_date,
                    run_id=self.run_id,
                    external_trigger=self.external_trigger)

    def get_state(self):
        return self._state

    def set_state(self, state):
        if self._state != state:
            self._state = state
            self.end_date = timezone.utcnow(
            ) if self._state in State.finished else None

    @declared_attr
    def state(self):
        return synonym('_state',
                       descriptor=property(self.get_state, self.set_state))

    @provide_session
    def refresh_from_db(self, session: Session = None):
        """
        Reloads the current dagrun from the database

        :param session: database session
        :type session: Session
        """
        DR = DagRun

        exec_date = func.cast(self.execution_date, DateTime)

        dr = session.query(DR).filter(
            DR.dag_id == self.dag_id,
            func.cast(DR.execution_date, DateTime) == exec_date,
            DR.run_id == self.run_id).one()

        self.id = dr.id
        self.state = dr.state

    @classmethod
    def next_dagruns_to_examine(
        cls,
        session: Session,
        max_number: Optional[int] = None,
    ):
        """
        Return the next DagRuns that the scheduler should attempt to schedule.

        This will return zero or more DagRun rows that are row-level-locked with a "SELECT ... FOR UPDATE"
        query, you should ensure that any scheduling decisions are made in a single transaction -- as soon as
        the transaction is committed it will be unlocked.

        :rtype: list[airflow.models.DagRun]
        """
        from airflow.models.dag import DagModel

        if max_number is None:
            max_number = cls.DEFAULT_DAGRUNS_TO_EXAMINE

        # TODO: Bake this query, it is run _A lot_
        query = session.query(cls).filter(
            cls.state == State.RUNNING,
            cls.run_type != DagRunType.BACKFILL_JOB).join(
                DagModel,
                DagModel.dag_id == cls.dag_id,
            ).filter(
                DagModel.is_paused.is_(False),
                DagModel.is_active.is_(True),
            ).order_by(
                nulls_first(cls.last_scheduling_decision, session=session),
                cls.execution_date,
            )

        if not settings.ALLOW_FUTURE_EXEC_DATES:
            query = query.filter(DagRun.execution_date <= func.now())

        return with_row_locks(query.limit(max_number),
                              of=cls,
                              **skip_locked(session=session))

    @staticmethod
    @provide_session
    def find(dag_id: Optional[Union[str, List[str]]] = None,
             run_id: Optional[str] = None,
             execution_date: Optional[datetime] = None,
             state: Optional[str] = None,
             external_trigger: Optional[bool] = None,
             no_backfills: bool = False,
             run_type: Optional[DagRunType] = None,
             session: Session = None,
             execution_start_date: Optional[datetime] = None,
             execution_end_date: Optional[datetime] = None) -> List["DagRun"]:
        """
        Returns a set of dag runs for the given search criteria.

        :param dag_id: the dag_id or list of dag_id to find dag runs for
        :type dag_id: str or list[str]
        :param run_id: defines the run id for this dag run
        :type run_id: str
        :param run_type: type of DagRun
        :type run_type: airflow.utils.types.DagRunType
        :param execution_date: the execution date
        :type execution_date: datetime.datetime or list[datetime.datetime]
        :param state: the state of the dag run
        :type state: str
        :param external_trigger: whether this dag run is externally triggered
        :type external_trigger: bool
        :param no_backfills: return no backfills (True), return all (False).
            Defaults to False
        :type no_backfills: bool
        :param session: database session
        :type session: sqlalchemy.orm.session.Session
        :param execution_start_date: dag run that was executed from this date
        :type execution_start_date: datetime.datetime
        :param execution_end_date: dag run that was executed until this date
        :type execution_end_date: datetime.datetime
        """
        DR = DagRun

        qry = session.query(DR)
        dag_ids = [dag_id] if isinstance(dag_id, str) else dag_id
        if dag_ids:
            qry = qry.filter(DR.dag_id.in_(dag_ids))
        if run_id:
            qry = qry.filter(DR.run_id == run_id)
        if execution_date:
            if isinstance(execution_date, list):
                qry = qry.filter(DR.execution_date.in_(execution_date))
            else:
                qry = qry.filter(DR.execution_date == execution_date)
        if execution_start_date and execution_end_date:
            qry = qry.filter(
                DR.execution_date.between(execution_start_date,
                                          execution_end_date))
        elif execution_start_date:
            qry = qry.filter(DR.execution_date >= execution_start_date)
        elif execution_end_date:
            qry = qry.filter(DR.execution_date <= execution_end_date)
        if state:
            qry = qry.filter(DR.state == state)
        if external_trigger is not None:
            qry = qry.filter(DR.external_trigger == external_trigger)
        if run_type:
            qry = qry.filter(DR.run_type == run_type)
        if no_backfills:
            qry = qry.filter(DR.run_type != DagRunType.BACKFILL_JOB)

        dr = qry.order_by(DR.execution_date).all()

        return dr

    @staticmethod
    def generate_run_id(run_type: DagRunType, execution_date: datetime) -> str:
        """Generate Run ID based on Run Type and Execution Date"""
        return f"{run_type}__{execution_date.isoformat()}"

    @provide_session
    def get_task_instances(self, state=None, session=None):
        """Returns the task instances for this dag run"""
        tis = session.query(TI).filter(
            TI.dag_id == self.dag_id,
            TI.execution_date == self.execution_date,
        )

        if state:
            if isinstance(state, str):
                tis = tis.filter(TI.state == state)
            else:
                # this is required to deal with NULL values
                if None in state:
                    if all(x is None for x in state):
                        tis = tis.filter(TI.state.is_(None))
                    else:
                        not_none_state = [s for s in state if s]
                        tis = tis.filter(
                            or_(TI.state.in_(not_none_state),
                                TI.state.is_(None)))
                else:
                    tis = tis.filter(TI.state.in_(state))

        if self.dag and self.dag.partial:
            tis = tis.filter(TI.task_id.in_(self.dag.task_ids))
        return tis.all()

    @provide_session
    def get_task_instance(self, task_id: str, session: Session = None):
        """
        Returns the task instance specified by task_id for this dag run

        :param task_id: the task id
        :type task_id: str
        :param session: Sqlalchemy ORM Session
        :type session: Session
        """
        ti = session.query(TI).filter(TI.dag_id == self.dag_id,
                                      TI.execution_date == self.execution_date,
                                      TI.task_id == task_id).first()

        return ti

    def get_dag(self):
        """
        Returns the Dag associated with this DagRun.

        :return: DAG
        """
        if not self.dag:
            raise AirflowException(
                "The DAG (.dag) for {} needs to be set".format(self))

        return self.dag

    @provide_session
    def get_previous_dagrun(self,
                            state: Optional[str] = None,
                            session: Session = None) -> Optional['DagRun']:
        """The previous DagRun, if there is one"""
        filters = [
            DagRun.dag_id == self.dag_id,
            DagRun.execution_date < self.execution_date,
        ]
        if state is not None:
            filters.append(DagRun.state == state)
        return session.query(DagRun).filter(*filters).order_by(
            DagRun.execution_date.desc()).first()

    @provide_session
    def get_previous_scheduled_dagrun(self, session: Session = None):
        """The previous, SCHEDULED DagRun, if there is one"""
        dag = self.get_dag()

        return session.query(DagRun).filter(
            DagRun.dag_id == self.dag_id,
            DagRun.execution_date == dag.previous_schedule(
                self.execution_date)).first()

    @provide_session
    def update_state(
        self,
        session: Session = None,
        execute_callbacks: bool = True
    ) -> Tuple[List[TI], Optional[callback_requests.DagCallbackRequest]]:
        """
        Determines the overall state of the DagRun based on the state
        of its TaskInstances.

        :param session: Sqlalchemy ORM Session
        :type session: Session
        :param execute_callbacks: Should dag callbacks (success/failure, SLA etc) be invoked
            directly (default: true) or recorded as a pending request in the ``callback`` property
        :type execute_callbacks: bool
        :return: Tuple containing tis that can be scheduled in the current loop & `callback` that
            needs to be executed
        """
        # Callback to execute in case of Task Failures
        callback: Optional[callback_requests.DagCallbackRequest] = None

        start_dttm = timezone.utcnow()
        self.last_scheduling_decision = start_dttm

        dag = self.get_dag()
        ready_tis: List[TI] = []
        tis = list(
            self.get_task_instances(session=session,
                                    state=State.task_states +
                                    (State.SHUTDOWN, )))
        self.log.debug("number of tis tasks for %s: %s task(s)", self,
                       len(tis))
        for ti in tis:
            ti.task = dag.get_task(ti.task_id)

        unfinished_tasks = [t for t in tis if t.state in State.unfinished]
        finished_tasks = [
            t for t in tis
            if t.state in State.finished | {State.UPSTREAM_FAILED}
        ]
        none_depends_on_past = all(not t.task.depends_on_past
                                   for t in unfinished_tasks)
        none_task_concurrency = all(t.task.task_concurrency is None
                                    for t in unfinished_tasks)
        if unfinished_tasks:
            scheduleable_tasks = [
                ut for ut in unfinished_tasks
                if ut.state in SCHEDULEABLE_STATES
            ]
            self.log.debug("number of scheduleable tasks for %s: %s task(s)",
                           self, len(scheduleable_tasks))
            ready_tis, changed_tis = self._get_ready_tis(
                scheduleable_tasks, finished_tasks, session)
            self.log.debug("ready tis length for %s: %s task(s)", self,
                           len(ready_tis))
            if none_depends_on_past and none_task_concurrency:
                # small speed up
                are_runnable_tasks = ready_tis or self._are_premature_tis(
                    unfinished_tasks, finished_tasks, session) or changed_tis

        duration = (timezone.utcnow() - start_dttm)
        Stats.timing("dagrun.dependency-check.{}".format(self.dag_id),
                     duration)

        leaf_task_ids = {t.task_id for t in dag.leaves}
        leaf_tis = [ti for ti in tis if ti.task_id in leaf_task_ids]

        # if all roots finished and at least one failed, the run failed
        if not unfinished_tasks and any(
                leaf_ti.state in {State.FAILED, State.UPSTREAM_FAILED}
                for leaf_ti in leaf_tis):
            self.log.error('Marking run %s failed', self)
            self.set_state(State.FAILED)
            if execute_callbacks:
                dag.handle_callback(self,
                                    success=False,
                                    reason='task_failure',
                                    session=session)
            else:
                callback = callback_requests.DagCallbackRequest(
                    full_filepath=dag.fileloc,
                    dag_id=self.dag_id,
                    execution_date=self.execution_date,
                    is_failure_callback=True,
                    msg='task_failure')

        # if all leafs succeeded and no unfinished tasks, the run succeeded
        elif not unfinished_tasks and all(
                leaf_ti.state in {State.SUCCESS, State.SKIPPED}
                for leaf_ti in leaf_tis):
            self.log.info('Marking run %s successful', self)
            self.set_state(State.SUCCESS)
            if execute_callbacks:
                dag.handle_callback(self,
                                    success=True,
                                    reason='success',
                                    session=session)
            else:
                callback = callback_requests.DagCallbackRequest(
                    full_filepath=dag.fileloc,
                    dag_id=self.dag_id,
                    execution_date=self.execution_date,
                    is_failure_callback=False,
                    msg='success')

        # if *all tasks* are deadlocked, the run failed
        elif (unfinished_tasks and none_depends_on_past
              and none_task_concurrency and not are_runnable_tasks):
            self.log.error('Deadlock; marking run %s failed', self)
            self.set_state(State.FAILED)
            if execute_callbacks:
                dag.handle_callback(self,
                                    success=False,
                                    reason='all_tasks_deadlocked',
                                    session=session)
            else:
                callback = callback_requests.DagCallbackRequest(
                    full_filepath=dag.fileloc,
                    dag_id=self.dag_id,
                    execution_date=self.execution_date,
                    is_failure_callback=True,
                    msg='all_tasks_deadlocked')

        # finally, if the roots aren't done, the dag is still running
        else:
            self.set_state(State.RUNNING)

        self._emit_duration_stats_for_finished_state()

        session.merge(self)

        return ready_tis, callback

    def _get_ready_tis(
        self,
        scheduleable_tasks: List[TI],
        finished_tasks: List[TI],
        session: Session,
    ) -> Tuple[List[TI], bool]:
        old_states = {}
        ready_tis: List[TI] = []
        changed_tis = False

        if not scheduleable_tasks:
            return ready_tis, changed_tis

        # Check dependencies
        for st in scheduleable_tasks:
            old_state = st.state
            if st.are_dependencies_met(dep_context=DepContext(
                    flag_upstream_failed=True, finished_tasks=finished_tasks),
                                       session=session):
                ready_tis.append(st)
            else:
                old_states[st.key] = old_state

        # Check if any ti changed state
        tis_filter = TI.filter_for_tis(old_states.keys())
        if tis_filter is not None:
            fresh_tis = session.query(TI).filter(tis_filter).all()
            changed_tis = any(ti.state != old_states[ti.key]
                              for ti in fresh_tis)

        return ready_tis, changed_tis

    def _are_premature_tis(
        self,
        unfinished_tasks: List[TI],
        finished_tasks: List[TI],
        session: Session,
    ) -> bool:
        # there might be runnable tasks that are up for retry and for some reason(retry delay, etc) are
        # not ready yet so we set the flags to count them in
        for ut in unfinished_tasks:
            if ut.are_dependencies_met(dep_context=DepContext(
                    flag_upstream_failed=True,
                    ignore_in_retry_period=True,
                    ignore_in_reschedule_period=True,
                    finished_tasks=finished_tasks),
                                       session=session):
                return True
        return False

    def _emit_duration_stats_for_finished_state(self):
        if self.state == State.RUNNING:
            return

        duration = (self.end_date - self.start_date)
        if self.state is State.SUCCESS:
            Stats.timing('dagrun.duration.success.{}'.format(self.dag_id),
                         duration)
        elif self.state == State.FAILED:
            Stats.timing('dagrun.duration.failed.{}'.format(self.dag_id),
                         duration)

    @provide_session
    def verify_integrity(self, session: Session = None):
        """
        Verifies the DagRun by checking for removed tasks or tasks that are not in the
        database yet. It will set state to removed or add the task if required.

        :param session: Sqlalchemy ORM Session
        :type session: Session
        """
        dag = self.get_dag()
        tis = self.get_task_instances(session=session)

        # check for removed or restored tasks
        task_ids = set()
        for ti in tis:
            task_instance_mutation_hook(ti)
            task_ids.add(ti.task_id)
            task = None
            try:
                task = dag.get_task(ti.task_id)
            except AirflowException:
                if ti.state == State.REMOVED:
                    pass  # ti has already been removed, just ignore it
                elif self.state is not State.RUNNING and not dag.partial:
                    self.log.warning(
                        "Failed to get task '%s' for dag '%s'. "
                        "Marking it as removed.", ti, dag)
                    Stats.incr("task_removed_from_dag.{}".format(dag.dag_id),
                               1, 1)
                    ti.state = State.REMOVED

            should_restore_task = (task
                                   is not None) and ti.state == State.REMOVED
            if should_restore_task:
                self.log.info(
                    "Restoring task '%s' which was previously "
                    "removed from DAG '%s'", ti, dag)
                Stats.incr("task_restored_to_dag.{}".format(dag.dag_id), 1, 1)
                ti.state = State.NONE
            session.merge(ti)

        # check for missing tasks
        for task in dag.task_dict.values():
            if task.start_date > self.execution_date and not self.is_backfill:
                continue

            if task.task_id not in task_ids:
                Stats.incr("task_instance_created-{}".format(task.task_type),
                           1, 1)
                ti = TI(task, self.execution_date)
                task_instance_mutation_hook(ti)
                session.add(ti)

        try:
            session.flush()
        except IntegrityError as err:
            self.log.info(str(err))
            self.log.info('Hit IntegrityError while creating the TIs for '
                          f'{dag.dag_id} - {self.execution_date}.')
            self.log.info('Doing session rollback.')
            # TODO[HA]: We probably need to savepoint this so we can keep the transaction alive.
            session.rollback()

    @staticmethod
    def get_run(session: Session, dag_id: str, execution_date: datetime):
        """
        Get a single DAG Run

        :param session: Sqlalchemy ORM Session
        :type session: Session
        :param dag_id: DAG ID
        :type dag_id: unicode
        :param execution_date: execution date
        :type execution_date: datetime
        :return: DagRun corresponding to the given dag_id and execution date
            if one exists. None otherwise.
        :rtype: airflow.models.DagRun
        """
        qry = session.query(DagRun).filter(
            DagRun.dag_id == dag_id,
            DagRun.external_trigger == False,  # noqa pylint: disable=singleton-comparison
            DagRun.execution_date == execution_date,
        )
        return qry.first()

    @property
    def is_backfill(self):
        return self.run_type == DagRunType.BACKFILL_JOB

    @classmethod
    @provide_session
    def get_latest_runs(cls, session=None):
        """Returns the latest DagRun for each DAG"""
        subquery = (session.query(
            cls.dag_id,
            func.max(cls.execution_date).label('execution_date')).group_by(
                cls.dag_id).subquery())
        dagruns = (session.query(cls).join(
            subquery,
            and_(cls.dag_id == subquery.c.dag_id,
                 cls.execution_date == subquery.c.execution_date)).all())
        return dagruns
Ejemplo n.º 54
0
 class Article(self.Base):
     __tablename__ = 'article'
     id = Column(Integer, primary_key=True)
     title = Column(Unicode)
     author_id = Column(Integer, ForeignKey('person.id'))
     author = relationship(Person, backref=backref('articles'))
Ejemplo n.º 55
0
class SalesOrder(db.Model, DataSecurityMixin):
    __tablename__ = 'sales_order'
    id = Column(Integer, primary_key=True)
    logistic_amount = Column(Numeric(precision=8, scale=2, decimal_return_scale=2))
    order_date = Column(DateTime, nullable=False)
    external_id = Column(String(), nullable=True, unique=False)

    customer_id = Column(Integer, ForeignKey('customer.id'), nullable=True)
    customer = relationship('Customer', foreign_keys=[customer_id], backref=backref('sales_orders', uselist=True))

    organization_id = db.Column(Integer, ForeignKey('organization.id'))
    organization = relationship('Organization', foreign_keys=[organization_id])

    type_id = Column(Integer, ForeignKey('enum_values.id'), nullable=False)
    type = relationship('EnumValues', foreign_keys=[type_id])

    status_id = Column(Integer, ForeignKey('enum_values.id'), nullable=False)
    status = relationship('EnumValues', foreign_keys=[status_id])

    remark = Column(Text)

    @hybrid_property
    def actual_amount(self):
        return format_decimal(Decimal(sum(line.actual_amount for line in self.lines)))

    @actual_amount.expression
    def actual_amount(self):
        return (select([func.sum(SalesOrderLine.unit_price * SalesOrderLine.quantity)])
                .where(self.id == SalesOrderLine.sales_order_id).label('actual_amount'))

    @actual_amount.setter
    def actual_amount(self, value):
        pass

    @hybrid_property
    def original_amount(self):
        return format_decimal(Decimal(sum(line.original_amount for line in self.lines)))

    @original_amount.expression
    def original_amount(self):
        return (select([func.sum(SalesOrderLine.original_amount)])
                .where(self.id == SalesOrderLine.sales_order_id)
                .label('original_amount'))

    @original_amount.setter
    def original_amount(self, value):
        pass

    @hybrid_property
    def discount_amount(self):
        return self.original_amount - self.actual_amount

    @discount_amount.setter
    def discount_amount(self, value):
        pass

    def __unicode__(self):
        return str(self.id) + ' - ' + str(self.actual_amount)

    def can_delete(self):
        can = super(SalesOrder, self).can_delete()
        return (can and self.status.code == const.SO_CREATED_STATUS_KEY
                and self.type.code != const.FRANCHISE_SO_TYPE_KEY)

    def can_edit(self, user=current_user):
        can = super(SalesOrder, self).can_edit()
        return can and self.status.code != const.SO_DELIVERED_STATUS_KEY

    @staticmethod
    def status_option_filter():
        from psi.app.models.enum_values import EnumValues
        return EnumValues.type_filter(const.SO_STATUS_KEY)
Ejemplo n.º 56
0
class Section(DiscussionBoundBase):

    """Assembl configurable sections."""

    __tablename__ = "section"
    type = Column(String(60), nullable=False)

    id = Column(Integer, primary_key=True)

    discussion_id = Column(
        Integer,
        ForeignKey(
            'discussion.id',
            ondelete='CASCADE',
            onupdate='CASCADE',
        ),
        nullable=False, index=True)

    discussion = relationship(
        "Discussion",
        backref=backref(
            'sections',
            cascade="all, delete-orphan"),
    )

    title_id = Column(
        Integer(), ForeignKey(LangString.id))
    title = relationship(
        LangString,
        lazy="joined", single_parent=True,
        primaryjoin=title_id == LangString.id,
        backref=backref("section_from_title", lazy="dynamic"),
        cascade="all, delete-orphan")

    url = Column(URLString)

    section_type = Column(
        Enum(*section_types, name='section_types'),
        nullable=False,
        default=SectionTypesEnum.CUSTOM.value,
        server_default=SectionTypesEnum.CUSTOM.value
    )

    order = Column(
        Float, nullable=False, default=0.0)

    def get_discussion_id(self):

        return self.discussion_id or self.discussion.id

    @classmethod
    def get_discussion_conditions(cls, discussion_id, alias_maker=None):
        return (cls.discussion_id == discussion_id,)

    __mapper_args__ = {
        'polymorphic_identity': 'section',
        'polymorphic_on': type,
        'with_polymorphic': '*'
    }

    crud_permissions = CrudPermissions(
        P_ADMIN_DISC, P_READ, P_ADMIN_DISC, P_ADMIN_DISC)
Ejemplo n.º 57
0
class OperationGroup(Base, Exportable):
    """
    We use this group entity, to map in DB a group of operations started 
    in the same time by the user
    """
    __tablename__ = "OPERATION_GROUPS"

    id = Column(Integer, primary_key=True)
    name = Column(String)
    range1 = Column(String)
    range2 = Column(String)
    range3 = Column(String)
    gid = Column(String)
    fk_launched_in = Column(Integer,
                            ForeignKey('PROJECTS.id', ondelete="CASCADE"))
    project = relationship(Project,
                           backref=backref('OPERATION_GROUPS',
                                           order_by=id,
                                           cascade="all,delete"))

    def __init__(self, project_id, name='incomplete', ranges=[]):
        self.name = name
        if len(ranges) > 0:
            self.range1 = ranges[0]
        if len(ranges) > 1:
            self.range2 = ranges[1]
        if len(ranges) > 2:
            self.range3 = ranges[2]
        self.gid = generate_guid()
        self.fk_launched_in = project_id

    def __repr__(self):
        return "<OperationGroup(%s,%s)>" % (self.name, self.gid)

    @property
    def range_references(self):
        """Memorized range starter"""
        ranges = [self.range1]
        if self.range2 and self.range2 != 'null':
            ranges.append(self.range2)
        if self.range3 and self.range3 != 'null':
            ranges.append(self.range3)
        return ranges

    def fill_operationgroup_name(self, entities_in_group):
        """
        Display name for UI.
        """
        new_name = "of " + entities_in_group + " varying "
        if self.range1 is not None:
            new_name += json.loads(self.range1)[0]
        if self.range2 is not None:
            new_name += " x " + json.loads(self.range2)[0]
        if self.range3 is not None:
            new_name += " x " + json.loads(self.range3)[0]

        new_name += " - " + date2string(datetime.datetime.now(),
                                        date_format=LESS_COMPLEX_TIME_FORMAT)
        self.name = new_name

    @staticmethod
    def load_range_numbers(range_value):
        """
        Parse the range values for a given json-like string.

        :return (Boolean_are_all_numbers, range_field_name, array_range_values)
        """
        if range_value is None:
            return None, RANGE_MISSING_STRING, [RANGE_MISSING_STRING]

        loaded_json = json.loads(range_value)
        range_name = loaded_json[0]
        range_values = loaded_json[1]
        can_interpolate_range = True  # Assume this is a numeric range that we can interpolate
        for idx, entry in enumerate(range_values):
            try:
                range_values[idx] = float(entry)
            except ValueError:
                # It's a DataType range
                can_interpolate_range = False
        return can_interpolate_range, range_name, range_values

    @property
    def has_only_numeric_ranges(self):
        """
        :returns True when all range fields are either None or could be parsed into a numeric array.
        """
        is_numeric = [
            self.load_range_numbers(self.range1)[0],
            self.load_range_numbers(self.range2)[0],
            self.load_range_numbers(self.range3)[0]
        ]

        for num in is_numeric:
            if num is False:
                return False
        return True
Ejemplo n.º 58
0
class Indicator(Base):
    __tablename__ = "indicators"

    id = Column(Integer, primary_key=True)
    indicator = Column(UnicodeText, index=True)
    group = Column(String)
    itype = Column(String, index=True)
    tlp = Column(String)
    provider = Column(String, index=True)
    portlist = Column(String)
    asn_desc = Column(UnicodeText, index=True)
    asn = Column(Float)
    cc = Column(String, index=True)
    protocol = Column(Integer)
    reporttime = Column(DateTime, index=True)
    firsttime = Column(DateTime)
    lasttime = Column(DateTime, index=True)
    confidence = Column(Float, index=True)
    timezone = Column(String)
    city = Column(String)
    longitude = Column(String)
    latitude = Column(String)
    peers = Column(UnicodeText)
    description = Column(UnicodeText)
    additional_data = Column(UnicodeText)
    rdata = Column(UnicodeText, index=True)
    count = Column(Integer)
    region = Column(String, index=True)

    tags = relationship('Tag',
                        primaryjoin='and_(Indicator.id==Tag.indicator_id)',
                        backref=backref('tags', uselist=True),
                        lazy='subquery',
                        cascade="all,delete")

    messages = relationship(
        'Message',
        primaryjoin='and_(Indicator.id==Message.indicator_id)',
        backref=backref('messages', uselist=True),
        lazy='subquery',
        cascade="all,delete")

    def __init__(self,
                 indicator=None,
                 itype=None,
                 tlp=None,
                 provider=None,
                 portlist=None,
                 asn=None,
                 asn_desc=None,
                 cc=None,
                 protocol=None,
                 firsttime=None,
                 lasttime=None,
                 reporttime=None,
                 group="everyone",
                 confidence=None,
                 reference=None,
                 reference_tlp=None,
                 application=None,
                 timezone=None,
                 city=None,
                 longitude=None,
                 latitude=None,
                 peers=None,
                 description=None,
                 additional_data=None,
                 rdata=None,
                 msg=None,
                 count=1,
                 region=None,
                 version=None,
                 **kwargs):

        self.indicator = indicator
        self.group = group
        self.itype = itype
        self.tlp = tlp
        self.provider = provider
        self.portlist = str(portlist)
        self.asn = asn
        self.asn_desc = asn_desc
        self.cc = cc
        self.protocol = protocol
        self.reporttime = reporttime
        self.firsttime = firsttime
        self.lasttime = lasttime
        self.confidence = confidence
        self.reference = reference
        self.reference_tlp = reference_tlp
        self.timezone = timezone
        self.city = city
        self.longitude = longitude
        self.latitude = latitude
        self.peers = peers
        self.description = description
        self.additional_data = additional_data
        self.rdata = rdata
        self.count = count
        self.region = region

        if self.reporttime and isinstance(self.reporttime, basestring):
            self.reporttime = arrow.get(self.reporttime).datetime

        if self.lasttime and isinstance(self.lasttime, basestring):
            self.lasttime = arrow.get(self.lasttime).datetime

        if self.firsttime and isinstance(self.firsttime, basestring):
            self.firsttime = arrow.get(self.firsttime).datetime

        if self.peers is not None:
            self.peers = json.dumps(self.peers)

        if self.additional_data is not None:
            self.additional_data = json.dumps(self.additional_data)
Ejemplo n.º 59
0
class Menu(BaseModel):
    '''model for Menus'''

    __tablename__ = 'menu'

    id = Column(Integer, primary_key=True)
    date = Column(DateTime, default=datetime.utcnow().date(), unique=True)
    meals = relationship('Meal',
                         secondary='menu_meals',
                         backref=backref('menu_meals', lazy=True,
                                         uselist=True))

    def __init__(self, date=None):
        if date:
            self.date = date
        else:
            today = datetime.utcnow().date()
            self.date = datetime(year=today.year,
                                 month=today.month,
                                 day=today.day)

    def add_meal(self, meal, date=None):
        '''Add meal to menu'''
        if not date:
            today = datetime.utcnow().date()
            date = datetime(year=today.year, month=today.month, day=today.day)

        menu = Menu.query.filter_by(date=date).first()
        if not menu:
            menu = Menu(date=date)
        if isinstance(meal, Meal):
            meal = [meal]
        self.put('meals', meal)
        self.save()

    def view(self):
        '''display menu'''
        meals = []
        if self.meals:
            meals = [{
                'meal_id': meal.meal_id,
                'name': meal.name,
                'price': meal.price,
                'description': meal.description,
                'caterer': meal.caterer.username
            } for meal in self.meals]
        return {'id': self.id, 'date': self.date.timestamp(), 'meals': meals}

    @staticmethod
    def get_by_date(date):
        menu = Menu.get(date=date)
        if menu:
            menu = menu.view()
        else:
            menu = {'date': date.timestamp(), 'meals': []}
        default_meals = Meal.query.filter_by(default=True).all()
        default_meals = [meal.view() for meal in default_meals]
        menu['meals'].extend(default_meals)
        if menu['meals']:
            return menu
        return None
Ejemplo n.º 60
0
class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
    id = Column(db.Integer, primary_key=True)
    org_id = Column(db.Integer, db.ForeignKey("organizations.id"))
    org = db.relationship(Organization)
    data_source_id = Column(db.Integer, db.ForeignKey("data_sources.id"))
    data_source = db.relationship(DataSource, backref=backref("query_results"))
    query_hash = Column(db.String(32), index=True)
    query_text = Column("query", db.Text)
    _data = Column("data", db.Text)
    runtime = Column(postgresql.DOUBLE_PRECISION)
    retrieved_at = Column(db.DateTime(True))

    __tablename__ = "query_results"

    def __str__(self):
        return "%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)

    def to_dict(self):
        return {
            "id": self.id,
            "query_hash": self.query_hash,
            "query": self.query_text,
            "data": self.data,
            "data_source_id": self.data_source_id,
            "runtime": self.runtime,
            "retrieved_at": self.retrieved_at,
        }

    @classmethod
    def unused(cls, days=7):
        age_threshold = datetime.datetime.now() - datetime.timedelta(days=days)
        return (cls.query.filter(
            Query.id.is_(None),
            cls.retrieved_at < age_threshold).outerjoin(Query)).options(
                load_only("id"))

    @classmethod
    def get_latest(cls, data_source, query, max_age=0):
        query_hash = utils.gen_query_hash(query)

        if max_age == -1:
            query = cls.query.filter(cls.query_hash == query_hash,
                                     cls.data_source == data_source)
        else:
            query = cls.query.filter(
                cls.query_hash == query_hash,
                cls.data_source == data_source,
                (db.func.timezone("utc", cls.retrieved_at) +
                 datetime.timedelta(seconds=max_age) >= db.func.timezone(
                     "utc", db.func.now())),
            )

        return query.order_by(cls.retrieved_at.desc()).first()

    @classmethod
    def store_result(cls, org, data_source, query_hash, query, data, run_time,
                     retrieved_at):
        query_result = cls(
            org_id=org,
            query_hash=query_hash,
            query_text=query,
            runtime=run_time,
            data_source=data_source,
            retrieved_at=retrieved_at,
            data=data,
        )

        db.session.add(query_result)
        logging.info("Inserted query (%s) data; id=%s", query_hash,
                     query_result.id)

        return query_result

    @property
    def groups(self):
        return self.data_source.groups