Example #1
0
    def test_undefer_star(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(
            Order,
            orders,
            properties=util.OrderedDict(
                [
                    ("userident", deferred(orders.c.user_id)),
                    ("description", deferred(orders.c.description)),
                    ("opened", deferred(orders.c.isopen)),
                ]
            ),
        )

        sess = create_session()
        q = sess.query(Order).options(Load(Order).undefer("*"))
        self.assert_compile(
            q,
            "SELECT orders.user_id AS orders_user_id, "
            "orders.description AS orders_description, "
            "orders.isopen AS orders_isopen, "
            "orders.id AS orders_id, "
            "orders.address_id AS orders_address_id FROM orders",
        )
Example #2
0
    def test_commits_state(self):
        """
        When deferred elements are loaded via a group, they get the proper
        CommittedState and don't result in changes being committed

        """

        orders, Order = self.tables.orders, self.classes.Order

        mapper(
            Order,
            orders,
            properties={
                "userident": deferred(orders.c.user_id, group="primary"),
                "description": deferred(orders.c.description, group="primary"),
                "opened": deferred(orders.c.isopen, group="primary"),
            },
        )

        sess = create_session()
        o2 = sess.query(Order).get(3)

        # this will load the group of attributes
        eq_(o2.description, "order 3")
        assert o2 not in sess.dirty
        # this will mark it as 'dirty', but nothing actually changed
        o2.description = "order 3"
        # therefore the flush() shouldnt actually issue any SQL
        self.assert_sql_count(testing.db, sess.flush, 0)
Example #3
0
    def test_preserve_changes(self):
        """A deferred load operation doesn't revert modifications on attributes"""

        orders, Order = self.tables.orders, self.classes.Order

        mapper(
            Order,
            orders,
            properties={
                "userident": deferred(orders.c.user_id, group="primary"),
                "description": deferred(orders.c.description, group="primary"),
                "opened": deferred(orders.c.isopen, group="primary"),
            },
        )
        sess = create_session()
        o = sess.query(Order).get(3)
        assert "userident" not in o.__dict__
        o.description = "somenewdescription"
        eq_(o.description, "somenewdescription")

        def go():
            eq_(o.opened, 1)

        self.assert_sql_count(testing.db, go, 1)
        eq_(o.description, "somenewdescription")
        assert o in sess.dirty
    def test_undefer_group(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order, orders, properties=util.OrderedDict([
            ('userident', deferred(orders.c.user_id, group='primary')),
            ('description', deferred(orders.c.description, group='primary')),
            ('opened', deferred(orders.c.isopen, group='primary'))
            ]
            ))

        sess = create_session()
        q = sess.query(Order).order_by(Order.id)
        def go():
            l = q.options(undefer_group('primary')).all()
            o2 = l[2]
            eq_(o2.opened, 1)
            eq_(o2.userident, 7)
            eq_(o2.description, 'order 3')

        self.sql_eq_(go, [
            ("SELECT orders.user_id AS orders_user_id, "
             "orders.description AS orders_description, "
             "orders.isopen AS orders_isopen, "
             "orders.id AS orders_id, "
             "orders.address_id AS orders_address_id "
             "FROM orders ORDER BY orders.id",
             {})])
Example #5
0
    def test_group(self):
        """Deferred load with a group"""

        orders, Order = self.tables.orders, self.classes.Order

        mapper(
            Order,
            orders,
            properties=util.OrderedDict(
                [
                    ("userident", deferred(orders.c.user_id, group="primary")),
                    ("addrident", deferred(orders.c.address_id, group="primary")),
                    ("description", deferred(orders.c.description, group="primary")),
                    ("opened", deferred(orders.c.isopen, group="primary")),
                ]
            ),
        )

        sess = create_session()
        q = sess.query(Order).order_by(Order.id)

        def go():
            l = q.all()
            o2 = l[2]
            eq_(o2.opened, 1)
            eq_(o2.userident, 7)
            eq_(o2.description, "order 3")

        self.sql_eq_(
            go,
            [
                ("SELECT orders.id AS orders_id " "FROM orders ORDER BY orders.id", {}),
                (
                    "SELECT orders.user_id AS orders_user_id, "
                    "orders.address_id AS orders_address_id, "
                    "orders.description AS orders_description, "
                    "orders.isopen AS orders_isopen "
                    "FROM orders WHERE orders.id = :param_1",
                    {"param_1": 3},
                ),
            ],
        )

        o2 = q.all()[2]
        eq_(o2.description, "order 3")
        assert o2 not in sess.dirty
        o2.description = "order 3"

        def go():
            sess.flush()

        self.sql_count_(0, go)
    def test_unsaved_group_2(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order, orders, order_by=orders.c.id, properties=dict(
            description=deferred(orders.c.description, group='primary'),
            opened=deferred(orders.c.isopen, group='primary')))

        sess = create_session()
        o = Order()
        sess.add(o)
        def go():
            o.description = "some description"
        self.sql_count_(0, go)
def mapAll(metadata , allclasses):
    
    clear_mappers()
    
    for tablename, oeclass in allclasses.iteritems():
        table = metadata.tables[tablename]
        for parentname in oeclass.parents :
            table.c['id_'+parentname].append_foreign_key( ForeignKey(parentname+'.id') ) 
    
    
    
    for tablename, oeclass in allclasses.iteritems():
        table = metadata.tables[tablename]
        properties = { }
        for fieldname, fieldtype in oeclass.fields :
            if fieldtype == numpy.ndarray :
                properties[fieldname+'_shape'] = deferred( table.columns[fieldname+'_shape'] , group = fieldname)
                properties[fieldname+'_dtype'] = deferred( table.columns[fieldname+'_dtype'] , group = fieldname)
                properties[fieldname+'_blob'] = deferred( table.columns[fieldname+'_blob'] , group = fieldname)
        
        for child in oeclass.children :
            #~ properties['_'+child+'s'] = relationship(allclasses[child] , )
            #~ print tablename , child
            properties['_'+child+'s'] = relationship(allclasses[child] ,
                            primaryjoin = table.c.id==metadata.tables[child].c['id_'+tablename], 
                            order_by = metadata.tables[child].c['id'],
                            
                            
                            backref=backref(tablename),
                            
                            # FIXME
                            #~ cascade="all, delete, delete-orphan",
                            #~ cascade="all, delete, delete-orphan",
                            cascade="all, delete",
                            #~ cascade="all",
                            
                            #~ lazy = True,
                                        )
        
        mapper(oeclass , table , properties = properties , )
        
         #non_primary=True to create a non primary Mapper.  clear_mappers()
        
    # set numpy.ndarray field property for all classes
    for tablename, oeclass in allclasses.iteritems():
            for fieldname, fieldtype in oeclass.fields :
                if fieldtype == numpy.ndarray :
                    setattr(oeclass, fieldname, property( NumpyField(fieldname).getfield , NumpyField(fieldname).setfield) )
Example #8
0
 def created(cls):
     return deferred(
         Column(
             UTCDateTime(timezone=False),
             default=cls.timestamp
         )
     )
def make_deferred_properties(columns, defer_group, except_column_names):
    """Make a deferred group covering all columns except those specified.

       SQLAlchemy has a 'deferred' feature that allows you to avoid loading
       large infrequently-used columns until you explicitly access them.
       Typically the deferred columns appear only on detail pages, while the
       underferred columns also appear in indexes and simple searches.
       SQLAlchemy normally requires you to list deferred columns explicitly.
       This convenience function builds the list for you, deferring all
       columns not listed as undeferred.

       'columns': pass the .columns attribute from a SQLAlchemy Table.
       'defer_group' is the name of the defer group to create.
       'except_column_names' is a list of column names not to defer.

       Usage:

           _properties = make_deferred_properties(t_mytable.columns, "details",
               ["id", "title", "author"])
           sqlalchemy.orm.mapper(MyClass, t_mytable, properties=_properties)

           # Example query
           q = Session.query(MyClass)
           if details:
                q = q.option(sqlalchemy.orm.undefer_group("details"))
           records = q.all()
    """
    ret = {}
    for col in columns:
        if col.name not in except_column_names:
            ret[col.name] = orm.deferred(col, group=defer_group)
    return ret
 def created(cls):
     return deferred(
         Column(
             types.DateTime(timezone=True),
             default=utils.utcnow
         )
     )
 def modified(cls):
     return deferred(
         Column(
             types.DateTime(timezone=True),
             onupdate=utils.utcnow
         )
     )
Example #12
0
    def test_basic(self):
        """A basic deferred load."""

        Order, orders = self.classes.Order, self.tables.orders


        mapper(Order, orders, order_by=orders.c.id, properties={
            'description': deferred(orders.c.description)})

        o = Order()
        self.assert_(o.description is None)

        q = create_session().query(Order)
        def go():
            l = q.all()
            o2 = l[2]
            x = o2.description

        self.sql_eq_(go, [
            ("SELECT orders.id AS orders_id, "
             "orders.user_id AS orders_user_id, "
             "orders.address_id AS orders_address_id, "
             "orders.isopen AS orders_isopen "
             "FROM orders ORDER BY orders.id", {}),
            ("SELECT orders.description AS orders_description "
             "FROM orders WHERE orders.id = :param_1",
             {'param_1':3})])
Example #13
0
    def test_locates_col_rowproc_only(self):
        """changed in 1.0 - we don't search for deferred cols in the result
        now.

        Because the loading for ORM Query and Query from a core select
        is now split off, we test loading from a plain select()
        separately.

        """

        orders, Order = self.tables.orders, self.classes.Order


        mapper(Order, orders, properties={
            'description': deferred(orders.c.description)})

        sess = create_session()
        stmt = sa.select([Order]).order_by(Order.id)
        o1 = (sess.query(Order).
              from_statement(stmt).all())[0]
        def go():
            eq_(o1.description, 'order 1')
        # prior to 1.0 we'd search in the result for this column
        # self.sql_count_(0, go)
        self.sql_count_(1, go)
Example #14
0
    def test_deep_options(self):
        users, items, order_items, Order, Item, User, orders = (self.tables.users,
                                self.tables.items,
                                self.tables.order_items,
                                self.classes.Order,
                                self.classes.Item,
                                self.classes.User,
                                self.tables.orders)

        mapper(Item, items, properties=dict(
            description=deferred(items.c.description)))
        mapper(Order, orders, properties=dict(
            items=relationship(Item, secondary=order_items)))
        mapper(User, users, properties=dict(
            orders=relationship(Order, order_by=orders.c.id)))

        sess = create_session()
        q = sess.query(User).order_by(User.id)
        l = q.all()
        item = l[0].orders[1].items[1]
        def go():
            eq_(item.description, 'item 4')
        self.sql_count_(1, go)
        eq_(item.description, 'item 4')

        sess.expunge_all()
        l = q.options(undefer('orders.items.description')).all()
        item = l[0].orders[1].items[1]
        def go():
            eq_(item.description, 'item 4')
        self.sql_count_(0, go)
        eq_(item.description, 'item 4')
def make_table(name, columns, base=None, **table_args):
    """Generate an ORM mapping class from a simplified schema format.

    Columns named 'id' (int) and 'meta' (object) are added automatically.

    Parameters
    ----------
    name : str
        Name of the table, used to set __tablename__ in the new class
    base : class or None
        Base class on which to build the new table class
    table_args : keyword arguments
        Extra keyword arguments are used to set __table_args__ in the new class
    columns : list of tuple
        List of column specifications. Each column is given as a tuple:
        ``(col_name, data_type, comment, {options})``. Where *col_name* and *comment* 
        are strings, *data_type* is a key in the column_data_types global, and
        *options* is a dict providing extra initialization arguments to the sqlalchemy
        Column (for example: 'index', 'unique'). Optionally, *data_type* may be a 'tablename.id'
        string indicating that this column is a foreign key referencing another table.
    """
    props = {
        '__tablename__': name,
        '__table_args__': table_args,
        'id': Column(Integer, primary_key=True),
    }

    for column in columns:
        colname, coltype = column[:2]
        kwds = {} if len(column) < 4 else column[3]
        kwds['comment'] = None if len(column) < 3 else column[2]
        defer_col = kwds.pop('deferred', False)
        ondelete = kwds.pop('ondelete', None)

        if coltype not in column_data_types:
            if not coltype.endswith('.id'):
                raise ValueError("Unrecognized column type %s" % coltype)
            props[colname] = Column(Integer, ForeignKey(coltype, ondelete=ondelete), **kwds)
        else:
            ctyp = column_data_types[coltype]
            props[colname] = Column(ctyp, **kwds)

        if defer_col:
            props[colname] = deferred(props[colname])

    # props['time_created'] = Column(DateTime, default=func.now())
    # props['time_modified'] = Column(DateTime, onupdate=func.current_timestamp())
    props['meta'] = Column(column_data_types['object'])

    if base is None:
        return type(name, (ORMBase,), props)
    else:
        # need to jump through a hoop to allow __init__ on table classes;
        # see: https://docs.sqlalchemy.org/en/latest/orm/constructors.html
        if hasattr(base, '_init_on_load'):
            @reconstructor
            def _init_on_load(self, *args, **kwds):
                base._init_on_load(self)
            props['_init_on_load'] = _init_on_load
        return type(name, (base,ORMBase), props)
Example #16
0
    def test_state_deferred_to_col(self):
        """Behavioral test to verify the current activity of loader callables."""

        users, User = self.tables.users, self.classes.User

        mapper(User, users, properties={"name": deferred(users.c.name)})

        sess = create_session()
        u1 = sess.query(User).options(undefer(User.name)).first()
        assert "name" not in attributes.instance_state(u1).callables

        # mass expire, the attribute was loaded,
        # the attribute gets the callable
        sess.expire(u1)
        assert isinstance(attributes.instance_state(u1).callables["name"], state.InstanceState)

        # load it, callable is gone
        u1.name
        assert "name" not in attributes.instance_state(u1).callables

        # mass expire, attribute was loaded but then deleted,
        # the callable goes away - the state wants to flip
        # it back to its "deferred" loader.
        sess.expunge_all()
        u1 = sess.query(User).options(undefer(User.name)).first()
        del u1.name
        sess.expire(u1)
        assert "name" not in attributes.instance_state(u1).callables

        # single attribute expire, the attribute gets the callable
        sess.expunge_all()
        u1 = sess.query(User).options(undefer(User.name)).first()
        sess.expire(u1, ["name"])
        assert isinstance(attributes.instance_state(u1).callables["name"], state.InstanceState)
Example #17
0
 def modified(cls):
     return deferred(
         Column(
             UTCDateTime(timezone=False),
             onupdate=cls.timestamp
         )
     )
Example #18
0
def sessionmaker(dbconfig):
    dbconfig = dbconfig.copy()
    conn_str = dbconfig.pop('url')
    if 'schema' in dbconfig:
        schema = dbconfig.pop('schema')
    else:
        schema = None

    engine = create_engine(conn_str, **dbconfig)
    mappers, tables, Session = reflect(engine, models, schema)

    # add mapper relationships
    mappers['Data'].add_properties({
        'versions': relationship(models.Version,
                                 lazy='dynamic',
                                 backref=backref('ref',
                                                 lazy='joined'))
    })

    mappers['Version'].add_properties({
        'data': deferred(tables['version'].c['data']),
        'size': column_property(func.length(tables['version'].c['data']))
    })

    Session.class_.mappers = mappers

    return Session
Example #19
0
def Doc(models):

    if models.lang == 'ru':
        id = Column(Integer, primary_key=True)
    else:
        id = Column(Integer, ForeignKey(models.DocRu.id),
                    primary_key=True, autoincrement=False)

    date = Column(DateTime, nullable=False, default=datetime.now, index=True)
    title = Column(Html(String(1000)), nullable=False, default='')

    summary = Column(Html(Text), nullable=False, default='')
    body = deferred(Column(ExpandableHtml(MediumText), nullable=False,
                    default=ExpandableMarkup('')))

    _photos, photos_edit, photos = editable_ordered_relation(
            models.Doc_Photo, 'photo', use_property=False)
    _photo_sets, photo_sets_edit, photo_sets = editable_ordered_relation(
            models.Doc_PhotoSet, 'photo_set', use_property=False)

    link_blocks_edit = relationship(
            models.DocLinkBlock,
            order_by=[models.DocLinkBlock.order_position],
            collection_class=ordering_list('order_position'),
            cascade='all, delete-orphan')
    # do not display blocks without links
    link_blocks = FilteredProperty('link_blocks_edit', has_links=True)

    sections = relationship(
            models.Section,
            secondary=models.Doc_Section.__table__)

    __mapper_args__ = {'order_by': desc(date)}

    def __unicode__(self):
        if self.id is None:
            return u'Новый материал'
        return u'Материал: {}'.format(self.title)

    @cached_property
    def index_photo(self):
        if self.photos:
            return self.photos[0]
        elif self.photo_sets:
            return self.photo_sets[0].index_photo
        else:
            return None

    @cached_property
    def all_photos(self):
        photos = sum([x.photos for x in self.photo_sets], []) + self.photos
        return list(collections.OrderedDict.fromkeys(photos))

    @cached_property
    def links_count(self):
        return sum([len(x.links) for x in self.link_blocks])

    @cached_property
    def date_formatted(self):
        return format_datetime(self.date, locale=self.models.lang)
Example #20
0
 def is_updated(self):
     return deferred(
         sa.Column(
             sa.TIMESTAMP, nullable=False, default=datetime.datetime.now,
             server_default=sqlaexp.text('0'),
             onupdate=datetime.datetime.now,
             server_onupdate=sqlafunc.current_timestamp(),
             ))
Example #21
0
    def test_unsaved_group(self):
        """Deferred loading doesn't kick in when just PK cols are set"""

        orders, Order = self.tables.orders, self.classes.Order


        mapper(Order, orders, order_by=orders.c.id, properties=dict(
            description=deferred(orders.c.description, group='primary'),
            opened=deferred(orders.c.isopen, group='primary')))

        sess = create_session()
        o = Order()
        sess.add(o)
        o.id = 7
        def go():
            o.description = "some description"
        self.sql_count_(0, go)
Example #22
0
def EditLog(models):

    id = Column(Integer, primary_key=True)
    stream_name = Column(String(100), nullable=False, default='')
    type = Column(String(50), nullable=False, default='edit')
    # object id can be string, so we use string here
    object_id = Column(String(100), nullable=True)
    global_id = Column(String(100), nullable=False, default='')
    edit_session = Column(String(50), nullable=False, default='')

    before = deferred(Column(MediumPickleType, default=list), group='data')
    after = deferred(Column(MediumPickleType, default=list), group='data')
    #diff = Column(Html(MediumText), nullable=True)

    creation_time = Column(DateTime, default=datetime.now, nullable=False)
    update_time = Column(DateTime, default=datetime.now, nullable=False)

    # there is a case when log item can be authored by multiple users:
    # if draft has been made by one user and than corrected and saved by other
    users = relationship(models.AdminUser,
                          secondary=models.EditLogAdminUser.__table__)

    @cached_property
    def data_changed(self):
        return self.before != self.after

    @classmethod
    def query_for_item(cls, db, item):
        ''''''
        global_id = ItemLock.item_global_id(item)
        return db.query(cls).filter_by(global_id=global_id)

    @classmethod
    def last_for_item(cls, db, stream_name, item, user, edit_session):
        ''''''
        # XXX filter by update_time?
        log = db.query(cls)\
                .filter(cls.users.contains(user))\
                .filter_by(stream_name=stream_name,
                           object_id=item.id)\
                .order_by(desc(cls.update_time))\
                .first()
        if log and log.edit_session == edit_session and log.type=='edit':
            return log

    __mapper_args__ = {'order_by': desc(update_time)}
Example #23
0
File: DB.py Project: gonicus/clacks
    def __update_table_names(self):
        """
        Adds all tables to the internal table list.
        """
        pfx = self._table_prefix
        self._table_list = []

        # Workflow table.
        table = Table(pfx + 'workflow',
                      self.db_metadata,
                      Column('id',     Integer,      primary_key = True),
                      Column('handle', String(200),  unique      = True),
                      Column('name',   String(50)),
                      Column('xml',    Text),
                      mysql_engine = 'INNODB')
        if not self._initialized:
            mapper = orm.mapper(WorkflowInfo, table)
        self.__add_table(table)

        # Job table.
        table = Table(pfx + 'job',
                      self.db_metadata,
                      Column('id',          Integer,      primary_key = True),
                      Column('workflow_id', Integer,      index       = True),
                      Column('status',      String(50)),
                      Column('last_change', DateTime()),
                      Column('instance',    PickleType()),
                      ForeignKeyConstraint(['workflow_id'],
                                           [pfx + 'workflow.id'],
                                           ondelete = 'CASCADE'),
                      mysql_engine = 'INNODB')
        if not self._initialized:
            mapper = orm.mapper(JobInfo,
                                table,
                                properties = {
                                    'instance': orm.deferred(table.c.instance)
                                })
        self.__add_table(table)

        # TaskSpec table.
        table = Table(pfx + 'task',
                      self.db_metadata,
                      Column('id',          Integer,      primary_key = True),
                      Column('job_id',      Integer,      index       = True),
                      Column('node_id',     Integer,      index       = True),
                      Column('name',        String(230)),
                      Column('status',      Integer),
                      Column('last_change', DateTime()),
                      ForeignKeyConstraint(['job_id'],
                                           [pfx + 'job.id'],
                                           ondelete = 'CASCADE'),
                      mysql_engine = 'INNODB')
        if not self._initialized:
            mapper = orm.mapper(TaskInfo, table)
        self.__add_table(table)

        self._initialized = True
Example #24
0
    def test_save(self):
        Order, orders = self.classes.Order, self.tables.orders

        m = mapper(Order, orders, properties={"description": deferred(orders.c.description)})

        sess = create_session()
        o2 = sess.query(Order).get(2)
        o2.isopen = 1
        sess.flush()
 def _deferred_config_fixture(self):
     User, Address = self.classes.User, self.classes.Address
     mapper(User, self.tables.users, properties={
         'name':deferred(self.tables.users.c.name),
         'addresses':relationship(Address, lazy="subquery"),
     })
     mapper(Address, self.tables.addresses)
     sess = Session(autoflush=False)
     return User, Address, sess
Example #26
0
    def test_defer_primary_key(self):
        """what happens when we try to defer the primary key?"""

        Order, orders = self.classes.Order, self.tables.orders

        mapper(Order, orders, order_by=orders.c.id, properties={"id": deferred(orders.c.id)})

        # right now, it's not that graceful :)
        q = create_session().query(Order)
        assert_raises_message(sa.exc.NoSuchColumnError, "Could not locate", q.first)
Example #27
0
    def test_synonym_group_bug(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order, orders, properties={
            'isopen':synonym('_isopen', map_column=True),
            'description':deferred(orders.c.description, group='foo')
        })

        sess = create_session()
        o1 = sess.query(Order).get(1)
        eq_(o1.description, "order 1")
Example #28
0
    def test_preserve_changes(self):
        """A deferred load operation doesn't revert modifications on attributes"""

        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order, orders, properties = {
            'userident': deferred(orders.c.user_id, group='primary'),
            'description': deferred(orders.c.description, group='primary'),
            'opened': deferred(orders.c.isopen, group='primary')
        })
        sess = create_session()
        o = sess.query(Order).get(3)
        assert 'userident' not in o.__dict__
        o.description = 'somenewdescription'
        eq_(o.description, 'somenewdescription')
        def go():
            eq_(o.opened, 1)
        self.assert_sql_count(testing.db, go, 1)
        eq_(o.description, 'somenewdescription')
        assert o in sess.dirty
Example #29
0
 def create_col( self ):
     if self.column_created:
         return
     self.column = schema.Column( self.colname, self.type, *self.args, **self.kwargs )
     self.column_created = True
     if self.deferred:
         group = None
         if isinstance(self.deferred, basestring):
             group = self.deferred
         self.column = orm.deferred( self.column, group = group )            
     self.entity._descriptor.add_column( self.kwargs.get( 'key', self.name ), self.column )
Example #30
0
    def test_undefer_group_from_relationship_joinedload_colexpr(self):
        users, Order, User, orders = \
            (self.tables.users,
             self.classes.Order,
             self.classes.User,
             self.tables.orders)

        mapper(User, users, properties=dict(
            orders=relationship(Order, order_by=orders.c.id)))
        mapper(
            Order, orders, properties=util.OrderedDict([
                ('userident', deferred(orders.c.user_id, group='primary')),
                ('lower_desc', deferred(
                    sa.func.lower(orders.c.description).label(None),
                    group='primary')),
                ('opened', deferred(orders.c.isopen, group='primary'))
            ])
        )

        sess = create_session()
        q = sess.query(User).filter(User.id == 7).options(
            joinedload(User.orders).undefer_group('primary')
        )

        def go():
            result = q.all()
            o2 = result[0].orders[1]
            eq_(o2.opened, 1)
            eq_(o2.userident, 7)
            eq_(o2.lower_desc, 'order 3')
        self.sql_eq_(go, [
            ("SELECT users.id AS users_id, users.name AS users_name, "
             "orders_1.user_id AS orders_1_user_id, "
             "lower(orders_1.description) AS lower_1, "
             "orders_1.isopen AS orders_1_isopen, orders_1.id AS orders_1_id, "
             "orders_1.address_id AS orders_1_address_id, "
             "orders_1.description AS orders_1_description FROM users "
             "LEFT OUTER JOIN orders AS orders_1 ON users.id = "
             "orders_1.user_id WHERE users.id = :id_1 "
             "ORDER BY orders_1.id", {"id_1": 7})]
        )
Example #31
0
class Subnet(BASEV2, models.HasId, IsHazTags):
    """Upstream model for IPs.

    Subnet -> has_many(IPAllocationPool)
    IPAllocationPool -> has_many(IPAvailabilityRange)
        As well as first and last _ip markers for some unknown reason
        first_ip is min(ranges), last_ip is max(ranges)
    IPAvailabilityRange -> belongs_to(IPAllocationPool)
        Also has first and last _ip, but only for the range
    IPAllocation -> belongs_to(port, subnet, network) but NOT IPAllocationPool

    IPAllocationPool and Range seem superfluous. Just create intelligent CIDRs
    for your subnet
    """
    __tablename__ = "quark_subnets"
    id = sa.Column(sa.String(36), primary_key=True)
    name = sa.Column(sa.String(255))
    network_id = sa.Column(sa.String(36), sa.ForeignKey('quark_networks.id'))
    _cidr = sa.Column(sa.String(64), nullable=False)
    _allocation_pool_cache = orm.deferred(sa.Column(sa.Text(), nullable=True))
    tenant_id = sa.Column(sa.String(255), index=True)
    segment_id = sa.Column(sa.String(255), index=True)

    @hybrid.hybrid_property
    def cidr(self):
        return self._cidr

    @hybrid.hybrid_property
    def allocation_pools(self):
        _cache = self.get("_allocation_pool_cache")
        if _cache:
            pools = json.loads(_cache)
            return pools
        else:
            ip_policy_cidrs = IPPolicy.get_ip_policy_cidrs(self)
            cidr = netaddr.IPSet([netaddr.IPNetwork(self["cidr"])])
            allocatable = cidr - ip_policy_cidrs
            pools = _pools_from_cidr(allocatable)
            return pools

    @cidr.setter
    def cidr(self, val):
        self._cidr = val
        preip = netaddr.IPNetwork(val)
        self.ip_version = preip.version
        ip = netaddr.IPNetwork(val).ipv6()
        self.first_ip = ip.first
        self.last_ip = ip.last
        self.next_auto_assign_ip = self.first_ip

    @cidr.expression
    def cidr(cls):
        return Subnet._cidr

    first_ip = sa.Column(custom_types.INET())
    last_ip = sa.Column(custom_types.INET())
    ip_version = sa.Column(sa.Integer())
    next_auto_assign_ip = sa.Column(custom_types.INET())

    allocated_ips = orm.relationship(IPAddress,
                                     primaryjoin='and_(Subnet.id=='
                                     'IPAddress.subnet_id,'
                                     'IPAddress._deallocated != 1)')
    generated_ips = orm.relationship(IPAddress,
                                     primaryjoin='Subnet.id=='
                                     'IPAddress.subnet_id')
    routes = orm.relationship(Route,
                              primaryjoin="Route.subnet_id==Subnet.id",
                              backref='subnet',
                              cascade='delete')
    enable_dhcp = sa.Column(sa.Boolean(), default=False)
    dns_nameservers = orm.relationship(
        DNSNameserver,
        primaryjoin="DNSNameserver.subnet_id==Subnet.id",
        backref='subnet',
        cascade='delete')
    ip_policy_id = sa.Column(sa.String(36),
                             sa.ForeignKey("quark_ip_policy.id"))
    # Legacy data
    do_not_use = sa.Column(sa.Boolean(), default=False)
Example #32
0
class Host(Base):
    """ The Host class captures the configuration profile of a machine.

        Putting a physical machine into a chassis and powering it up leaves it
        in a state with a few more attributes not filled in: what Branch
        configures this host? If Ownership is captured, this is the place for
        it.

        Post DNS changes the class name "Host", and it's current existence may
        not make much sense. In the interest of keeping the scope of changes
        somewhat limited (compared to how much else is changing), the class is
        being left in this intermediate state, for the time being. The full
        expression of the changes would be to call the class "MachineProfile",
        and remove any machine specific information. This would provide a more
        normalized schema, rather than individual machines having all of the
        rows below, which potentially would need to be nullable.
    """

    __tablename__ = _TN
    _instance_label = 'fqdn'

    machine_id = Column(Integer,
                        ForeignKey('machine.machine_id',
                                   name='host_machine_fk'),
                        primary_key=True)

    branch_id = Column(Integer,
                       ForeignKey('branch.id', name='host_branch_fk'),
                       nullable=False)

    sandbox_author_id = Column(Integer,
                               ForeignKey('user_principal.id',
                                          name='host_sandbox_author_fk'),
                               nullable=True)

    personality_id = Column(Integer,
                            ForeignKey('personality.id',
                                       name='host_prsnlty_fk'),
                            nullable=False)

    lifecycle_id = Column(Integer,
                          ForeignKey('hostlifecycle.id',
                                     name='host_lifecycle_fk'),
                          nullable=False)

    operating_system_id = Column(Integer,
                                 ForeignKey('operating_system.id',
                                            name='host_os_fk'),
                                 nullable=False)

    owner_eon_id = Column(Integer,
                          ForeignKey('grn.eon_id',
                                     name='%s_owner_grn_fk' % _TN),
                          nullable=False)

    # something to retain the advertised status of the host
    advertise_status = Column(Boolean(name="%s_advertise_status_valid_ck" %
                                      _TN),
                              nullable=False,
                              default=False)

    creation_date = deferred(
        Column(DateTime, default=datetime.now, nullable=False))

    comments = Column(String(255), nullable=True)

    # Deletion of a machine deletes the host. When this is 'machine profile'
    # this should no longer be the case as it will be many to one as opposed to
    # one to one as it stands now.
    # This is a one-to-one relation, so we need uselist=False on the backref
    machine = relation(Machine,
                       lazy=False,
                       innerjoin=True,
                       backref=backref('host',
                                       uselist=False,
                                       lazy=False,
                                       cascade='all'))

    branch = relation(Branch, innerjoin=True, backref='hosts')
    sandbox_author = relation(UserPrincipal)
    personality = relation(Personality, innerjoin=True)
    status = relation(HostLifecycle, innerjoin=True)
    operating_system = relation(OperatingSystem, innerjoin=True)
    owner_grn = relation(Grn, innerjoin=True)
    grns = association_proxy('_grns', 'grn', creator=_hgm_creator)

    __table_args__ = (Index('host_prsnlty_idx', personality_id),
                      Index('%s_branch_idx' % _TN, branch_id))

    @property
    def fqdn(self):
        return self.machine.fqdn

    @property
    def archetype(self):
        """ proxy in our archetype attr """
        return self.personality.archetype

    @property
    def authored_branch(self):
        """ return a string representation of sandbox author/branch name """
        if self.sandbox_author:
            return "%s/%s" % (self.sandbox_author.name, self.branch.name)
        return str(self.branch.name)
Example #33
0
class Test(DeclarativeBase):
    __tablename__ = 'tests'

    id = Column(Integer, primary_key=True)

    name = Column(Unicode(255), nullable=True, default=None)

    visibility = Column(Enum('invisible',
                             'result_only',
                             'data_only',
                             'visible',
                             name='test_visibility'),
                        nullable=False,
                        default='visible')

    _visible = Column('visible', Boolean, nullable=True, default=False)
    '''Whether test is shown to user or not'''

    input_type = Column(Enum(u'stdin', u'file', name='test_input_type'),
                        nullable=False,
                        default=u'stdin')
    '''Input data type'''
    output_type = Column(Enum(u'stdout', u'file', name='test_output_type'),
                         nullable=False,
                         default=u'stdout')
    '''Output data type'''

    input_filename = Column(Unicode(255))
    '''Input data filename'''
    output_filename = Column(Unicode(255))
    '''Output data filename'''

    argv = deferred(Column(Unicode(255)), group='data')
    '''Command line arguments

    Possible variables are:
        {path}: Absolute path to temporary working directory
        {infile}: Full path to test input file
        {outfile}: Full path to test output file
    '''

    input_data = deferred(Column(Unicode(10485760)), group='data')
    output_data = deferred(Column(Unicode(10485760)), group='data')

    _timeout = Column('timeout', Float)

    # Validator options

    # Output ignore options
    ignore_case = Column(Boolean, nullable=False, default=True)
    '''Call .lower() on output before comparison'''
    ignore_returncode = Column(Boolean, nullable=False, default=True)
    '''Ignore test process returncode'''
    comment_prefix = Column(Unicode(16), nullable=True, default=u'#')
    '''Ignore all lines that start with comment_prefix'''

    show_partial_match = Column(Boolean, nullable=False, default=True)
    '''Recognize partial match'''

    # Output splitting options
    separator = Column(Unicode(16), default=None)
    '''The separator string to use for .split()
    Defaults to None (whitespace)'''
    splitlines = Column(Boolean, nullable=False, default=False)
    '''Call .splitlines() on full output before comparison'''
    split = Column(Boolean, nullable=False, default=True)
    '''Call .split() on full output of output before comparison
    or on each line from .splitlines() if splitlines is set'''
    sort = Column(Boolean, nullable=False, default=False)
    '''Sort output and test data before comparison
    Parsing is performed first, if enabled
    Results depends on whether splitlines and/or split are set:
    if split and splitlines:
        2-dimensional array in which only the second dimension
        is sorted (e.g. [[3, 4], [1, 2]])
    if only split or only splitlines:
        1-dimensional list is sorted by the types default comparator
    '''
    parallel_sort = Column(Boolean, nullable=False, default=False)
    ''' if set, output will be sorted with the help of the thread id inside of '[]' '''

    # Output parsing options
    parse_int = Column(Boolean, nullable=False, default=False)
    '''Parse every substring in output to int before comparison'''
    parse_float = Column(Boolean, nullable=False, default=False)
    '''Parse every substring in output to float before comparison'''
    float_precision = Column(Integer, nullable=True)
    '''The precision (number of decimal digits) to compare for floats'''

    assignment_id = Column(Integer,
                           ForeignKey('assignments.id'),
                           nullable=False,
                           index=True)
    assignment = relationship('Assignment',
                              backref=backref('tests',
                                              order_by=id,
                                              cascade='all, delete-orphan'))
    '''Assignment this test belongs to'''

    user_id = Column(Integer, ForeignKey('users.id'))
    user = relationship(
        'User',
        #backref=backref('tests',
        #    cascade='all, delete-orphan')
    )
    '''User who created this test'''
    def __unicode__(self):
        return u'Test %s for Assignment %s' % (self.id
                                               or '', self.assignment.id or '')

    @property
    def visible(self):
        warn('Test.visible', DeprecationWarning, stacklevel=2)
        if self.visibility is not None:
            return self.visibility == 'visible'
        else:
            return self._visible

    @visible.setter
    def visible(self, visible):
        warn('Test.visible', DeprecationWarning, stacklevel=2)
        self._visible = visible
        self.visibility = 'visible' if visible else 'invisible'

    @property
    def parent(self):
        return self.assignment

    def convert(self, data):
        '''Performs all conversion options specified'''
        data = data.strip()
        # Normalize the values from database since they might be ''
        if self.separator:
            separator = self.separator
        else:
            separator = None

        if self.comment_prefix:
            data = '\n'.join(l.strip() for l in data.splitlines()
                             if not l.strip().startswith(self.comment_prefix))
        else:
            data = '\n'.join(l.strip() for l in data.splitlines())

        if self.ignore_case:
            data = data.lower()

# if we need to sort output for parallel
        if self.parallel_sort:
            tmp = data.splitlines()
            liste = {}
            rest = []
            result = ""
            for i in tmp:
                if str(i).find("[") > -1 and str(i).find("]") > -1:
                    pos = int(str(i)[str(i).find("[") + 1:str(i).find("]")])
                    liste[pos] = i
                else:
                    rest.append(i)
            for i in rest:
                result += str(i) + "\n"
            for i in liste:
                result += str(liste[i]) + "\n"
            data = result

        if self.splitlines and self.split:
            d = [[ll for ll in l.split(separator) if ll]
                 for l in data.splitlines()]
        elif self.splitlines:
            d = [l for l in data.splitlines()]
        elif self.split:
            d = [l for l in data.split(separator) if l]
        else:
            d = data

        #TODO: If an element is not parsable, do not fail but leave element unparsed

        if self.parse_float:
            if self.splitlines and self.split:
                d = [[float(b) for b in a] for a in d]
            elif self.splitlines or self.split:
                d = [float(a) for a in d]
            else:
                d = float(d)
        if self.parse_int:
            if self.splitlines and self.split:
                d = [[int(b) for b in a] for a in d]
            elif self.splitlines or self.split:
                d = [int(a) for a in d]
            else:
                d = int(d)

        if self.sort:
            if self.splitlines and self.split:
                d = [sorted(a) for a in d]
            elif self.splitlines or self.split:
                d = sorted(d)

        return d

    def unconvert(self, data):
        '''Reverts the conversions from convert'''

        sep = self.separator or u' '

        def fmt(obj):
            if self.parse_float and self.float_precision:
                try:
                    return (u'%%.%df' % self.float_precision) % obj
                except:
                    log.warn('Error converting float to string with precision',
                             exc_info=True)
                    return unicode(obj)
            else:
                return unicode(obj)

        if self.splitlines and self.split:
            d = '\n'.join([sep.join(map(fmt, a)) for a in data])
        elif self.splitlines:
            d = '\n'.join(map(fmt, data))
        elif self.split:
            d = sep.join(map(fmt, data))
        else:
            d = fmt(data)

        # Convert to unicode again, just to be sure
        return unicode(d)

    def validate(self, output_data):
        ''''''

        if self.output_data:
            test_output_data = self.test_output_data
        else:
            test_output_data = u''

        try:
            output_test = test_output_data
            output_data = self.unconvert(self.convert(output_data)).strip()
        except Exception as e:
            log.warn('Error converting test data', exc_info=True)
            msg = u'''
There was an error converting the test data:
%s

This could be a fault in the test case,
please notify someone about this error.
''' % unicode(e.message, errors='ignore')
            return (False, False, output_test, output_data, msg)

        if output_test == output_data:
            result, partial = True, False
        elif self.show_partial_match and output_data and output_test.startswith(
                output_data):
            result, partial = False, True
        else:
            result, partial = False, False

        return (result, partial, output_test, output_data, u'')

    @property
    def test_output_data(self):
        '''Returns processed expected output data'''
        return self.unconvert(self.convert(self.output_data)).strip()

    @property
    def timeout(self):
        '''Return test timeout

        If not set on this test, the value from the assignment is used
        '''
        return self._timeout or self.assignment.timeout
Example #34
0
        class User(decl_base):
            __tablename__ = "users"

            id: Mapped[int] = mapped_column(primary_key=True)
            data: Mapped[str] = deferred(mapped_column())
Example #35
0
class User(Base, Stndrd, Age_times):

    __tablename__ = "users"
    id = Column(Integer, primary_key=True)
    username = Column(String, default=None)
    email = Column(String, default=None)
    passhash = deferred(Column(String, default=None))
    created_utc = Column(Integer, default=0)
    admin_level = Column(Integer, default=0)
    is_activated = Column(Boolean, default=False)
    over_18 = Column(Boolean, default=False)
    creation_ip = Column(String, default=None)
    submissions = relationship("Submission",
                               lazy="dynamic",
                               primaryjoin="Submission.author_id==User.id",
                               backref="author_rel")
    comments = relationship("Comment",
                            lazy="dynamic",
                            primaryjoin="Comment.author_id==User.id")
    votes = relationship("Vote", lazy="dynamic", backref="users")
    commentvotes = relationship("CommentVote", lazy="dynamic", backref="users")
    bio = Column(String, default="")
    bio_html = Column(String, default="")
    badges = relationship("Badge", lazy="dynamic", backref="user")
    real_id = Column(String, default=None)
    notifications = relationship("Notification",
                                 lazy="dynamic",
                                 backref="user")
    referred_by = Column(Integer, default=None)
    is_banned = Column(Integer, default=0)
    unban_utc = Column(Integer, default=0)
    ban_reason = Column(String, default="")
    feed_nonce = Column(Integer, default=0)
    login_nonce = Column(Integer, default=0)
    title_id = Column(Integer, ForeignKey("titles.id"), default=None)
    title = relationship("Title", lazy="joined")
    has_profile = Column(Boolean, default=False)
    has_banner = Column(Boolean, default=False)
    reserved = Column(String(256), default=None)
    is_nsfw = Column(Boolean, default=False)
    tos_agreed_utc = Column(Integer, default=0)
    profile_nonce = Column(Integer, default=0)
    banner_nonce = Column(Integer, default=0)
    last_siege_utc = Column(Integer, default=0)
    mfa_secret = deferred(Column(String(16), default=None))
    hide_offensive = Column(Boolean, default=False)
    show_nsfl = Column(Boolean, default=False)
    is_private = Column(Boolean, default=False)
    read_announcement_utc = Column(Integer, default=0)
    #discord_id=Column(Integer, default=None)
    unban_utc = Column(Integer, default=0)
    is_deleted = Column(Boolean, default=False)
    delete_reason = Column(String(500), default='')
    filter_nsfw = Column(Boolean, default=False)

    patreon_id = Column(String(64), default=None)
    patreon_access_token = Column(String(128), default='')
    patreon_refresh_token = Column(String(128), default='')
    patreon_pledge_cents = Column(Integer, default=0)
    patreon_name = Column(String(64), default='')

    is_nofollow = Column(Boolean, default=False)

    moderates = relationship("ModRelationship", lazy="dynamic")
    banned_from = relationship("BanRelationship",
                               primaryjoin="BanRelationship.user_id==User.id")
    subscriptions = relationship("Subscription", lazy="dynamic")
    boards_created = relationship("Board", lazy="dynamic")
    contributes = relationship(
        "ContributorRelationship",
        lazy="dynamic",
        primaryjoin="ContributorRelationship.user_id==User.id")
    board_blocks = relationship("BoardBlock", lazy="dynamic")

    following = relationship("Follow", primaryjoin="Follow.user_id==User.id")
    followers = relationship("Follow", primaryjoin="Follow.target_id==User.id")

    blocking = relationship("UserBlock",
                            lazy="dynamic",
                            primaryjoin="User.id==UserBlock.user_id")
    blocked = relationship("UserBlock",
                           lazy="dynamic",
                           primaryjoin="User.id==UserBlock.target_id")

    _applications = relationship("OauthApp", lazy="dynamic")
    authorizations = relationship("ClientAuth", lazy="dynamic")

    # properties defined as SQL server-side functions
    energy = deferred(Column(Integer, server_default=FetchedValue()))
    comment_energy = deferred(Column(Integer, server_default=FetchedValue()))
    referral_count = deferred(Column(Integer, server_default=FetchedValue()))
    follower_count = deferred(Column(Integer, server_default=FetchedValue()))

    def __init__(self, **kwargs):

        if "password" in kwargs:

            kwargs["passhash"] = self.hash_password(kwargs["password"])
            kwargs.pop("password")

        kwargs["created_utc"] = int(time.time())

        super().__init__(**kwargs)

    def has_block(self, target):

        return g.db.query(UserBlock).filter_by(user_id=self.id,
                                               target_id=target.id).first()

    def is_blocked_by(self, user):

        return g.db.query(UserBlock).filter_by(user_id=user.id,
                                               target_id=self.id).first()

    def any_block_exists(self, other):

        return g.db.query(UserBlock).filter(
            or_(
                and_(UserBlock.user_id == self.id,
                     UserBlock.target_id == other.id),
                and_(UserBlock.user_id == other.id,
                     UserBlock.target_id == self.id))).first()

    def has_blocked_guild(self, board):

        return g.db.query(BoardBlock).filter_by(user_id=self.id,
                                                board_id=board.id).first()

    def validate_2fa(self, token):

        x = pyotp.TOTP(self.mfa_secret)
        return x.verify(token, valid_window=1)

    @property
    def boards_subscribed(self):

        boards = [
            x.board for x in self.subscriptions
            if x.is_active and not x.board.is_banned
        ]
        return boards

    @property
    def age(self):
        return int(time.time()) - self.created_utc

    @cache.memoize(timeout=300)
    def idlist(self, sort="hot", page=1, t=None, **kwargs):

        posts = g.db.query(Submission.id).options(lazyload('*')).filter_by(
            is_banned=False, is_deleted=False, stickied=False)

        if not self.over_18:
            posts = posts.filter_by(over_18=False)

        if self.hide_offensive:
            posts = posts.filter_by(is_offensive=False)

        if not self.show_nsfl:
            posts = posts.filter_by(is_nsfl=False)

        board_ids = g.db.query(Subscription.board_id).filter_by(
            user_id=self.id, is_active=True).subquery()
        user_ids = g.db.query(Follow.user_id).filter_by(user_id=self.id).join(
            Follow.target).filter(User.is_private == False,
                                  User.is_nofollow == False).subquery()

        posts = posts.filter(
            or_(Submission.board_id.in_(board_ids),
                Submission.author_id.in_(user_ids)))

        if self.admin_level < 4:
            # admins can see everything

            m = g.db.query(ModRelationship.board_id).filter_by(
                user_id=self.id, invite_rescinded=False).subquery()
            c = g.db.query(ContributorRelationship.board_id).filter_by(
                user_id=self.id).subquery()
            posts = posts.filter(
                or_(Submission.author_id == self.id,
                    Submission.post_public == True, Submission.board_id.in_(m),
                    Submission.board_id.in_(c)))

            blocking = g.db.query(
                UserBlock.target_id).filter_by(user_id=self.id).subquery()
            blocked = g.db.query(
                UserBlock.user_id).filter_by(target_id=self.id).subquery()

            posts = posts.filter(Submission.author_id.notin_(blocking),
                                 Submission.author_id.notin_(blocked))

        if t:
            now = int(time.time())
            if t == 'day':
                cutoff = now - 86400
            elif t == 'week':
                cutoff = now - 604800
            elif t == 'month':
                cutoff = now - 2592000
            elif t == 'year':
                cutoff = now - 31536000
            else:
                cutoff = 0
            posts = posts.filter(Submission.created_utc >= cutoff)

        gt = kwargs.get("gt")
        lt = kwargs.get("lt")

        if gt:
            posts = posts.filter(Submission.created_utc > gt)

        if lt:
            posts = posts.filter(Submission.created_utc < lt)

        if sort == "hot":
            posts = posts.order_by(Submission.score_best.desc())
        elif sort == "new":
            posts = posts.order_by(Submission.created_utc.desc())
        elif sort == "disputed":
            posts = posts.order_by(Submission.score_disputed.desc())
        elif sort == "top":
            posts = posts.order_by(Submission.score_top.desc())
        elif sort == "activity":
            posts = posts.order_by(Submission.score_activity.desc())
        else:
            abort(422)

        return [x[0] for x in posts.offset(25 * (page - 1)).limit(26).all()]

    @cache.memoize(300)
    def userpagelisting(self, v=None, page=1):

        submissions = g.db.query(Submission.id).options(
            lazyload('*')).filter_by(author_id=self.id)

        if not (v and v.over_18):
            submissions = submissions.filter_by(over_18=False)

        if v and v.hide_offensive:
            submissions = submissions.filter_by(is_offensive=False)

        if not (v and (v.admin_level >= 3)):
            submissions = submissions.filter_by(is_deleted=False)

        if not (v and (v.admin_level >= 3 or v.id == self.id)):
            submissions = submissions.filter_by(is_banned=False)

        if v and v.admin_level >= 4:
            pass
        elif v:
            m = g.db.query(ModRelationship.board_id).filter_by(
                user_id=v.id, invite_rescinded=False).subquery()
            c = g.db.query(ContributorRelationship.board_id).filter_by(
                user_id=v.id).subquery()
            submissions = submissions.filter(
                or_(Submission.author_id == v.id,
                    Submission.post_public == True, Submission.board_id.in_(m),
                    Submission.board_id.in_(c)))
        else:
            submissions = submissions.filter(Submission.post_public == True)

        listing = [
            x[0] for x in submissions.order_by(
                Submission.created_utc.desc()).offset(25 *
                                                      (page - 1)).limit(26)
        ]

        return listing

    @cache.memoize(300)
    def commentlisting(self, v=None, page=1):
        comments = self.comments.options(lazyload('*')).filter(
            Comment.parent_submission is not None).join(Comment.post)

        if not (v and v.over_18):
            comments = comments.filter(Submission.over_18 == False)

        if v and v.hide_offensive:
            comments = comments.filter(Comment.is_offensive == False)

        if v and not v.show_nsfl:
            comments = comments.filter(Submission.is_nsfl == False)

        if (not v) or v.admin_level < 3:
            comments = comments.filter(Comment.is_deleted == False)

        if not (v and (v.admin_level >= 3 or v.id == self.id)):
            comments = comments.filter(Comment.is_banned == False)

        if v and v.admin_level >= 4:
            pass
        elif v:
            m = v.moderates.filter_by(invite_rescinded=False).subquery()
            c = v.contributes.subquery()

            comments = comments.join(m,
                                     m.c.board_id == Submission.board_id,
                                     isouter=True).join(
                                         c,
                                         c.c.board_id == Submission.board_id,
                                         isouter=True).join(
                                             Board,
                                             Board.id == Submission.board_id)
            comments = comments.filter(
                or_(Comment.author_id == v.id, Submission.post_public == True,
                    Board.is_private == False, m.c.board_id != None,
                    c.c.board_id != None))
        else:
            comments = comments.join(Board,
                                     Board.id == Submission.board_id).filter(
                                         or_(Submission.post_public == True,
                                             Board.is_private == False))

        comments = comments.options(contains_eager(Comment.post))

        comments = comments.order_by(Comment.created_utc.desc())
        comments = comments.offset(25 * (page - 1)).limit(26)

        listing = [c.id for c in comments]
        return listing

    @property
    @lazy
    def mods_anything(self):

        return bool(self.moderates.filter_by(accepted=True).first())

    @property
    def boards_modded(self):

        z = [
            x.board for x in self.moderates
            if x and x.board and x.accepted and not x.board.is_banned
        ]
        z = sorted(z, key=lambda x: x.name)

        return z

    @property
    @cache.memoize(timeout=3600)  # 1hr cache time for user rep
    def karma(self):
        return 503 if self.id == 1 else int(self.energy) - self.post_count

    @property
    @cache.memoize(timeout=3600)
    def comment_karma(self):
        return 0 if self.id == 1 else int(
            self.comment_energy) - self.comments.filter(
                Comment.parent_submission is not None).filter_by(
                    is_banned=False).count()

    @property
    @cache.memoize(timeout=3600)
    def true_score(self):
        return max((self.karma + self.comment_karma), -5)

    @property
    def base36id(self):
        return base36encode(self.id)

    @property
    def fullname(self):
        return f"t1_{self.base36id}"

    @property
    @cache.memoize(timeout=60)
    def has_report_queue(self):
        board_ids = [
            x.board_id for x in self.moderates.filter_by(accepted=True).all()
        ]
        return bool(
            g.db.query(Submission).filter(Submission.board_id.in_(board_ids),
                                          Submission.mod_approved == 0,
                                          Submission.is_banned == False).join(
                                              Submission.reports).first())

    @property
    def banned_by(self):

        if not self.is_banned:
            return None

        return g.db.query(User).filter_by(id=self.is_banned).first()

    def has_badge(self, badgedef_id):
        return self.badges.filter_by(badge_id=badgedef_id).first()

    def vote_status_on_post(self, post):

        return post.voted

    def vote_status_on_comment(self, comment):

        return comment.voted

    def hash_password(self, password):
        return generate_password_hash(password,
                                      method='pbkdf2:sha512',
                                      salt_length=8)

    def verifyPass(self, password):
        return check_password_hash(self.passhash, password)

    @property
    def feedkey(self):

        return generate_hash(
            f"{self.username}{self.id}{self.feed_nonce}{self.created_utc}")

    @property
    def formkey(self):

        if "session_id" not in session:
            session["session_id"] = token_hex(16)

        msg = f"{session['session_id']}+{self.id}+{self.login_nonce}"

        return generate_hash(msg)

    def validate_formkey(self, formkey):

        return validate_hash(
            f"{session['session_id']}+{self.id}+{self.login_nonce}", formkey)

    @property
    def url(self):
        return f"/@{self.username}"

    @property
    def permalink(self):
        return self.url

    def __repr__(self):
        return f"<User(username={self.username})>"

    def notification_commentlisting(self, page=1, all_=False):

        notifications = self.notifications.join(Notification.comment).filter(
            Comment.is_banned == False, Comment.is_deleted == False)

        if not all_:
            notifications = notifications.filter(Notification.read == False)

        notifications = notifications.options(
            contains_eager(Notification.comment))

        notifications = notifications.order_by(Notification.id.desc()).offset(
            25 * (page - 1)).limit(26)

        output = []
        for x in notifications:
            x.read = True
            g.db.add(x)
            output.append(x.comment_id)

        g.db.commit()
        return output

    @property
    @cache.memoize(30)
    def notifications_count(self):

        return self.notifications.filter_by(read=False).join(
            Notification.comment).filter(Comment.is_banned == False,
                                         Comment.is_deleted == False).count()

    @property
    def post_count(self):

        return self.submissions.filter_by(is_banned=False).count()

    @property
    def comment_count(self):

        return self.comments.filter(
            Comment.parent_submission != None).filter_by(
                is_banned=False, is_deleted=False).count()

    @property
    #@cache.memoize(timeout=60)
    def badge_pairs(self):

        output = []

        badges = [x for x in self.badges.all()]

        while badges:

            to_append = [badges.pop(0)]

            if badges:
                to_append.append(badges.pop(0))

            output.append(to_append)

        return output

    @property
    def alts(self):

        alts1 = g.db.query(User).join(
            Alt, Alt.user2 == User.id).filter(Alt.user1 == self.id).all()
        alts2 = g.db.query(User).join(
            Alt, Alt.user1 == User.id).filter(Alt.user2 == self.id).all()

        output = list(set([x for x in alts1] + [y for y in alts2]))
        output = sorted(output, key=lambda x: x.username)

        return output

    def has_follower(self, user):

        return g.db.query(Follow).filter_by(target_id=self.id,
                                            user_id=user.id).first()

    def set_profile(self, file):

        self.del_profile()
        self.profile_nonce += 1

        aws.upload_file(
            name=f"users/{self.username}/profile-{self.profile_nonce}.png",
            file=file,
            resize=(100, 100))
        self.has_profile = True
        g.db.add(self)

    def set_banner(self, file):

        self.del_banner()
        self.banner_nonce += 1

        aws.upload_file(
            name=f"users/{self.username}/banner-{self.banner_nonce}.png",
            file=file)

        self.has_banner = True
        g.db.add(self)

    def del_profile(self):

        aws.delete_file(
            name=f"users/{self.username}/profile-{self.profile_nonce}.png")
        self.has_profile = False
        g.db.add(self)

    def del_banner(self):

        aws.delete_file(
            name=f"users/{self.username}/banner-{self.banner_nonce}.png")
        self.has_banner = False
        g.db.add(self)

    @property
    def banner_url(self):

        if self.has_banner:
            return f"https://i.ruqqus.com/users/{self.username}/banner-{self.banner_nonce}.png"
        else:
            return "/assets/images/profiles/default_bg.png"

    @property
    def profile_url(self):

        if self.has_profile:
            return f"https://i.ruqqus.com/users/{self.username}/profile-{self.profile_nonce}.png"
        else:
            return "/assets/images/profiles/default-profile-pic.png"

    @property
    def available_titles(self):

        locs = {"v": self, "Board": Board, "Submission": Submission}

        titles = [
            i for i in g.db.query(Title).order_by(text("id asc")).all()
            if eval(i.qualification_expr, {}, locs)
        ]
        return titles

    @property
    def can_make_guild(self):
        return (self.true_score >= 250
                or self.created_utc <= 1592974538 and self.true_score >= 50 or
                (self.patreon_pledge_cents and self.patreon_pledge_cents >= 500
                 )) and len(self.boards_modded) < 10

    @property
    def can_join_gms(self):
        return len(self.boards_modded) < 10

    @property
    def can_siege(self):

        if self.is_banned:
            return False

        now = int(time.time())

        return now - max(self.last_siege_utc,
                         self.created_utc) > 60 * 60 * 24 * 30

    @property
    def can_submit_image(self):
        return (self.patreon_pledge_cents and self.patreon_pledge_cents >= 500
                ) or self.true_score >= 1000 or (self.created_utc <= 1592974538
                                                 and self.true_score >= 500)

    @property
    def can_upload_avatar(self):
        return self.patreon_pledge_cents or self.true_score >= 300 or self.created_utc <= 1592974538

    @property
    def can_upload_banner(self):
        return self.patreon_pledge_cents or self.true_score >= 500 or self.created_utc <= 1592974538

    @property
    def json(self):

        if self.is_banned:
            return {
                'username': self.username,
                'permalink': self.permalink,
                'is_banned': True,
                'ban_reason': self.ban_reason,
                'id': self.base36id
            }

        elif self.is_deleted:
            return {
                'username': self.username,
                'permalink': self.permalink,
                'is_deleted': True,
                'id': self.base36id
            }

        return {
            'username': self.username,
            'permalink': self.permalink,
            'is_banned': False,
            'created_utc': self.created_utc,
            'post_rep': int(self.karma),
            'comment_rep': int(self.comment_karma),
            'badges': [x.json for x in self.badges],
            'id': self.base36id,
            'profile_url': self.profile_url,
            'banner_url': self.banner_url,
            'post_count': self.post_count,
            'comment_count': self.comment_count,
            'title': self.title.json if self.title else None,
            'bio': self.bio,
            'bio_html': self.bio_html
        }

    @property
    def total_karma(self):

        return 503 if self.id == 1 else max(self.karma +
                                            self.comment_karma, -5)

    @property
    def can_use_darkmode(self):
        return True
        # return self.referral_count or self.has_earned_darkmode or
        # self.has_badge(16) or self.has_badge(17)

    def ban(self, admin=None, reason=None, include_alts=True, days=0):

        if days > 0:
            ban_time = int(time.time()) + (days * 86400)
            self.unban_utc = ban_time

        else:
            # Takes care of all functions needed for account termination
            self.unban_utc = 0
            if self.has_banner:
                self.del_banner()
            if self.has_profile:
                self.del_profile()

        self.is_banned = admin.id if admin else 1
        if reason:
            self.ban_reason = reason

        g.db.add(self)

        if include_alts:
            for alt in self.alts:

                if alt.is_banned:
                    continue

                # suspend alts
                if days:
                    alt.ban(admin=admin,
                            reason=reason,
                            include_alts=False,
                            days=days)

                # ban alts
                else:
                    alt.ban(admin=admin, include_alts=False)

    def unban(self, include_alts=False):

        # Takes care of all functions needed for account reinstatement.

        self.is_banned = 0
        self.unban_utc = 0

        g.db.add(self)

        if include_alts:
            for alt in self.alts:
                # ban alts
                alt.unban()

    @property
    def is_suspended(self):
        return (self.is_banned
                and (self.unban_utc == 0 or self.unban_utc > time.time()))

    @property
    def is_blocking(self):
        return self.__dict__.get('_is_blocking', 0)

    @property
    def is_blocked(self):
        return self.__dict__.get('_is_blocked', 0)

    def refresh_selfset_badges(self):

        # check self-setting badges
        badge_types = g.db.query(BadgeDef).filter(
            BadgeDef.qualification_expr.isnot(None)).all()
        for badge in badge_types:
            if eval(badge.qualification_expr, {}, {'v': self}):
                if not self.has_badge(badge.id):
                    new_badge = Badge(user_id=self.id,
                                      badge_id=badge.id,
                                      created_utc=int(time.time()))
                    g.db.add(new_badge)

            else:
                bad_badge = self.has_badge(badge.id)
                if bad_badge:
                    g.db.delete(bad_badge)

        g.db.add(self)

    @property
    def applications(self):
        return [
            x for x in self._applications.order_by(OauthApp.id.asc()).all()
        ]
Example #36
0
class Submission(Base, Stndrd, Age_times, Scores, Fuzzing):
 
    __tablename__="submissions"

    id = Column(BigInteger, primary_key=True)
    submission_aux=relationship("SubmissionAux", lazy="joined", uselist=False, innerjoin=True, primaryjoin="Submission.id==SubmissionAux.id")
    author_id = Column(BigInteger, ForeignKey("users.id"))
    repost_id = Column(BigInteger, ForeignKey("submissions.id"), default=0)
    edited_utc = Column(BigInteger, default=0)
    created_utc = Column(BigInteger, default=0)
    is_banned = Column(Boolean, default=False)
    is_deleted=Column(Boolean, default=False)
    distinguish_level=Column(Integer, default=0)
    created_str=Column(String(255), default=None)
    stickied=Column(Boolean, default=False)
    _comments=relationship("Comment", lazy="dynamic", primaryjoin="Comment.parent_submission==Submission.id", backref="submissions")
    domain_ref=Column(Integer, ForeignKey("domains.id"))
    domain_obj=relationship("Domain")
    flags=relationship("Flag", backref="submission")
    is_approved=Column(Integer, ForeignKey("users.id"), default=0)
    approved_utc=Column(Integer, default=0)
    board_id=Column(Integer, ForeignKey("boards.id"), default=None)
    original_board_id=Column(Integer, ForeignKey("boards.id"), default=None)
    over_18=Column(Boolean, default=False)
    original_board=relationship("Board", primaryjoin="Board.id==Submission.original_board_id")
    creation_ip=Column(String(64), default="")
    mod_approved=Column(Integer, default=None)
    accepted_utc=Column(Integer, default=0)
    is_image=Column(Boolean, default=False)
    has_thumb=Column(Boolean, default=False)
    post_public=Column(Boolean, default=True)
    score_hot=Column(Float, default=0)
    score_disputed=Column(Float, default=0)
    score_top=Column(Float, default=1)
    score_activity=Column(Float, default=0)
    is_offensive=Column(Boolean, default=False)
    is_nsfl=Column(Boolean, default=False)
    board=relationship("Board", lazy="joined", innerjoin=True, primaryjoin="Submission.board_id==Board.id")
    author=relationship("User", lazy="joined", innerjoin=True, primaryjoin="Submission.author_id==User.id")
    is_pinned=Column(Boolean, default=False)
    score_best=Column(Float, default=0)
    reports=relationship("Report", backref="submission")

    upvotes = Column(Integer, default=1)
    downvotes = Column(Integer, default=0)

    approved_by=relationship("User", uselist=False, primaryjoin="Submission.is_approved==User.id")

    # not sure if we need this
    reposts = relationship("Submission", lazy="joined", remote_side=[id])


    #These are virtual properties handled as postgres functions server-side
    #There is no difference to SQLAlchemy, but they cannot be written to

    ups = deferred(Column(Integer, server_default=FetchedValue()))
    downs=deferred(Column(Integer, server_default=FetchedValue()))
    #age=deferred(Column(Integer, server_default=FetchedValue()))
    comment_count=Column(Integer, server_default=FetchedValue())
    #flag_count=deferred(Column(Integer, server_default=FetchedValue()))
    #report_count=deferred(Column(Integer, server_default=FetchedValue()))
    score=deferred(Column(Float, server_default=FetchedValue()))
    #is_public=deferred(Column(Boolean, server_default=FetchedValue()))

    rank_hot=deferred(Column(Float, server_default=FetchedValue()))
    rank_fiery=deferred(Column(Float, server_default=FetchedValue()))
    rank_activity=deferred(Column(Float, server_default=FetchedValue())) 
    rank_best=deferred(Column(Float, server_default=FetchedValue()))  

    def __init__(self, *args, **kwargs):

        if "created_utc" not in kwargs:
            kwargs["created_utc"]=int(time.time())
            kwargs["created_str"]=time.strftime("%I:%M %p on %d %b %Y", time.gmtime(kwargs["created_utc"]))

        kwargs["creation_ip"]=request.remote_addr

        super().__init__(*args, **kwargs)
        
    def __repr__(self):
        return f"<Submission(id={self.id})>"

    @property
    @lazy
    def board_base36id(self):
        return base36encode(self.board_id)

    @property
    def is_repost(self):
        return bool(self.repost_id)

    @property
    def is_archived(self):
        return int(time.time()) - self.created_utc > 60*60*24*180

    @property
    @lazy
    def fullname(self):
        return f"t2_{self.base36id}"

    @property
    @lazy
    def permalink(self):

        output=self.title.lower()

        output=re.sub('&\w{2,3};', '', output)

        output=[re.sub('\W', '', word) for word in output.split()[0:6]]

        output='-'.join(output)

        if not output:
            output='-'


        return f"/post/{self.base36id}/{output}"

    @property
    def is_archived(self):

        now=int(time.time())

        cutoff=now-(60*60*24*180)

        return self.created_utc < cutoff
                                      
    def rendered_page(self, comment=None, comment_info=None, v=None):

        #check for banned
        if self.is_deleted:
            template="submission_deleted.html"
        elif v and v.admin_level>=3:
            template="submission.html"
        elif self.is_banned:
            template="submission_banned.html"
        else:
            template="submission.html"

        private=not self.is_public and not self.is_pinned and not self.board.can_view(v)

        if private and (not v or not self.author_id==v.id):
            abort(403)
        elif private:
            self.__dict__["replies"]=[]
        else:
            #load and tree comments
            #calling this function with a comment object will do a comment permalink thing
            self.tree_comments(comment=comment)

        
        #return template
        is_allowed_to_comment = self.board.can_comment(v) and not self.is_archived

        return render_template(template,
                               v=v,
                               p=self,
                               sort_method=request.args.get("sort","Hot").capitalize(),
                               linked_comment=comment,
                               comment_info=comment_info,
                               is_allowed_to_comment=is_allowed_to_comment,
                               render_replies=True,
                               is_guildmaster=self.board.has_mod(v)
                               )


    @property
    @lazy
    def domain(self):

        if not self.url:
            return "text post"
        domain= urlparse(self.url).netloc
        if domain.startswith("www."):
            domain=domain.split("www.")[1]
        return domain



    def tree_comments(self, comment=None, v=None):

                
        
        comments=self._preloaded_comments

        index={}
        for c in comments:
            if c.parent_fullname in index:
                index[c.parent_fullname].append(c)
            else:
                index[c.parent_fullname]=[c]

        for c in comments:
            c.__dict__["replies"]=index.get(c.fullname, [])

        if comment:
            self.__dict__["replies"]=[comment]
        else:
            self.__dict__["replies"]=index.get(self.fullname, [])
        


    @property
    def active_flags(self):
        if self.is_approved:
            return 0
        else:
            return len(self.flags)

    @property
    def active_reports(self):
        if self.mod_approved:
            return 0
        else:
            return self.reports.filter(Report.created_utc>self.accepted_utc).count()


    @property
    #@lazy
    def thumb_url(self):
    
        if self.has_thumb:
            return f"https://i.ruqqus.com/posts/{self.base36id}/thumb.png"
        elif self.is_image:
            return self.url
        else:
            return None

    def visibility_reason(self, v):

        if v and self.author_id==v.id:
            return "this is your content."
        elif self.is_pinned:
            return "a guildmaster has pinned it."
        elif self.board.has_mod(v):
            return f"you are a guildmaster of +{self.board.name}."
        elif self.board.has_contributor(v):
            return f"you are an approved contributor in +{self.board.name}."
        elif v.admin_level >= 4:
            return "you are a Ruqqus admin."


    def determine_offensive(self):

        for x in g.db.query(BadWord).all():
            if (self.body and x.check(self.body)) or x.check(self.title):
                self.is_offensive=True
                break
        else:
            self.is_offensive=False


    @property
    def json(self):

        if self.is_banned:
            return {'is_banned':True,
                    'is_deleted':self.is_deleted,
                    'ban_reason': self.ban_reason,
                    'id':self.base36id,
                    'title':self.title,
                    'permalink':self.permalink,
                    'guild_name':self.board.name
                    }
        elif self.is_deleted:
            return {'is_banned':bool(self.is_banned),
                    'is_deleted':True,
                    'id':self.base36id,
                    'title':self.title,
                    'permalink':self.permalink,
                    'guild_name':self.board.name
                    }
        data= {'author':self.author.username if not self.author.is_deleted else None,
                'permalink':self.permalink,
                'is_banned':False,
                'is_deleted':False,
                'created_utc':self.created_utc,
                'id':self.base36id,
                'fullname':self.fullname,
                'title':self.title,
                'is_nsfw':self.over_18,
                'is_nsfl':self.is_nsfl,
                'thumb_url':self.thumb_url,
                'domain':self.domain,
                'is_archived':self.is_archived,
                'url':self.url,
                'body':self.body,
                'body_html':self.body_html,
                'created_utc':self.created_utc,
                'edited_utc':self.edited_utc or 0,
                'guild_name':self.board.name,
                'embed_url':self.embed_url,
                'is_archived':self.is_archived,
                'author_title':self.author.title.json if self.author.title else None,
                'original_guild_name':self.original_board.name,
                'comment_count':self.comment_count,
                'score':self.score_fuzzed,
                'upvotes':self.upvotes_fuzzed,
                'downvotes':self.downvotes_fuzzed
                }

        if "_voted" in self.__dict__:
            data["voted"]=self._voted

        return data

    @property
    def voted(self):
        return self._voted if "_voted" in self.__dict__ else 0

    @property
    def user_title(self):
        return self._title if "_title" in self.__dict__ else self.author.title
    
    @property
    def title(self):
        return self.submission_aux.title

    @title.setter
    def title_set(self, x):
        self.submission_aux.title=x
        g.db.add(self.submission_aux)

    @property
    def url(self):
        return self.submission_aux.url

    @url.setter
    def url(self, x):
        self.submission_aux.url=x
        g.db.add(self.submission_aux)
    
    @property
    def body(self):
        return self.submission_aux.body

    @body.setter
    def body(self, x):
        self.submission_aux.body=x
        g.db.add(self.submission_aux)
    
    @property
    def body_html(self):
        return self.submission_aux.body_html

    @body_html.setter
    def body_html(self, x):
        self.submission_aux.body_html=x
        g.db.add(self.submission_aux)
    
    @property
    def ban_reason(self):
        return self.submission_aux.ban_reason

    @ban_reason.setter
    def ban_reason(self, x):
        self.submission_aux.ban_reason=x
        g.db.add(self.submission_aux)

    @property
    def embed_url(self):
        return self.submission_aux.embed_url

    @embed_url.setter
    def embed_url(self, x):
        self.submission_aux.embed_url=x
        g.db.add(self.submission_aux)

    @property
    def is_guildmaster(self):
        return self.__dict__.get('_is_guildmaster', False)

    @property
    def is_blocking_guild(self):
        return self.__dict__.get('_is_blocking_guild', False)

    @property
    def is_blocked(self):
        return self.__dict__.get('_is_blocked', False)

    @property
    def is_blocking(self):
        return self.__dict__.get('_is_blocking', False)

    @property
    def is_subscribed(self):
        return self.__dict__.get('_is_subscribed', False)
    
    
    @property
    def is_public(self):
        return self.post_public or not self.board.is_private

    @property
    def flag_count(self):
        return len(self.flags)

    @property
    def report_count(self):
        return len(self.reports)
Example #37
0
 def active(cls):
     return deferred(Column(Boolean, default=True))
Example #38
0
class VulnerabilityGitCommits(MainBase):
    __tablename__ = "vulnerability_git_commits"

    commit_hash = Column(String(255), nullable=False, index=True)
    _commit_link = Column("commit_link", String(255), nullable=False)
    repo_name = Column(String(255), nullable=False)
    repo_owner = Column(String(255))
    # URL to a *.git Git repository (if applicable).
    _repo_url = Column("repo_url", String(255))
    vulnerability_details_id = Column(Integer,
                                      ForeignKey("vulnerability.id",
                                                 name='fk_vuln'),
                                      nullable=False)
    vulnerability = relationship("Vulnerability",
                                 foreign_keys=[vulnerability_details_id])
    # Used to store/cache the repository tree files with hashes.
    tree_cache = deferred(Column(LONGTEXT()))

    repository_files = relationship(
        RepositoryFiles,
        backref="commit",
        cascade="all, delete-orphan",
        single_parent=True,
    )
    # link to comments through RepositoryFiles
    comments = relationship(
        RepositoryFileComments,
        backref="commit",
        secondary=RepositoryFiles.__table__,
        primaryjoin="VulnerabilityGitCommits.id==RepositoryFiles.commit_id",
        secondaryjoin=
        "and_(RepositoryFiles.id==RepositoryFileComments.repository_file_id, "
        "RepositoryFileComments.active==True)",
    )
    # link to markers through RepositoryFiles
    markers = relationship(
        RepositoryFileMarkers,
        backref="commit",
        secondary=RepositoryFiles.__table__,
        primaryjoin="VulnerabilityGitCommits.id==RepositoryFiles.commit_id",
        secondaryjoin=
        "and_(RepositoryFiles.id==RepositoryFileMarkers.repository_file_id, "
        "RepositoryFileMarkers.active==True)",
    )

    @property
    def num_files(self):
        # TODO: This should be refactored as it is incredibly inefficient.
        #       We should use a count on the database side instead.
        return len(self.repository_files)

    @property
    def num_comments(self):
        # TODO: see comment regarding performance above.
        return len(self.comments)

    @property
    def num_markers(self):
        # TODO: see comment regarding performance above.
        return len(self.markers)

    @property
    def repo_url(self):
        if not self._repo_url:
            # TODO: Refactor this apporach of retrieving github.com urls.
            if self.commit_link and "github.com" in self.commit_link:
                if self.repo_owner and self.repo_name:
                    return ("https://github.com/" + self.repo_owner + "/" +
                            self.repo_name)
        return self._repo_url

    @repo_url.setter
    def repo_url(self, repo_url):
        self._repo_url = repo_url

    @property
    def commit_link(self):
        return self._commit_link

    @commit_link.setter
    def commit_link(self, commit_link):
        # TODO: Add commit link sanitization back here. We're currently
        #  skipping it as on object creation (populate) there might be no
        #  repo_url set and the commit_link might be just a VCS UI link to the
        #  patch. We should still always require a separate repository link and
        #  commit hash if it's not a simple Github entry.
        # if not self.repo_url and commit_link:
        # vcs_handler = get_vcs_handler(None, commit_link)
        # if not vcs_handler:
        #   raise InvalidIdentifierException(
        #               'Please provide a valid commit link.')
        if commit_link:
            if not commit_link.startswith("http"):
                raise InvalidIdentifierException(
                    "Please provide a valid commit link.")

        self._commit_link = commit_link

    def __init__(
        self,
        commit_link=None,
        repo_owner=None,
        repo_name=None,
        repo_url=None,
        commit_hash=None,
    ):
        super().__init__()
        self.repo_owner = repo_owner
        self.repo_name = repo_name
        if repo_url:
            vcs_handler = get_vcs_handler(None, repo_url)
            if not vcs_handler:
                raise InvalidIdentifierException(
                    "Please provide a valid git repo URL.")
            self.repo_url = repo_url
        self.commit_link = commit_link
        self.commit_hash = commit_hash

    def to_json(self):
        """Serialize object properties as dict."""
        return {
            'commit_link': self.commit_link,
            'repo_owner': self.repo_owner,
            'repo_name': self.repo_name,
            'repo_url': self.repo_url,
            'commit_hash': self.commit_hash,
            'relevant_files': self.get_relevant_files()
        }

    def get_relevant_files(self):
        """Extracts the relevant files from tree_cache"""
        relevant_files = []

        if self.tree_cache is None:
            return relevant_files

        tree = json.loads(self.tree_cache)
        if "commit" in tree:
            commit_data = tree["commit"]
            master_commit_files = commit_data["files"]

            for patched_files in master_commit_files:
                relevant_file_path = "./" + patched_files["path"]
                relevant_files.append(relevant_file_path)

        return relevant_files

    def copy(self):
        new_commit = copy_obj(self)
        new_commit.repository_files = []
        for repo_file in self.repository_files:
            new_commit.repository_files.append(repo_file.copy())
        # N.B. comments and markers are copied in repository_files not here.
        return new_commit
Example #39
0
class Track(Base):
    __tablename__ = "track"

    id = Column(String(), primary_key=True)
    duration = Column(Float(),
                      nullable=False,
                      default=0,
                      doc="Duration in seconds")
    source_filename = Column(Unicode(), nullable=True)

    tags = relationship("Tag",
                        secondary=TrackTagMapping.__table__)  # , lazy="joined"
    attachments = relationship("Attachment",
                               secondary=TrackAttachmentMapping.__table__)
    #lyrics = relationship("Lyrics", cascade="all, delete-orphan")
    lyrics = deferred(Column(UnicodeText(), nullable=True))

    time_updated = Column(DateTime(), nullable=False, default=now)

    def tags_with_parents_dict(self):
        t = {None: [tag.name for tag in self.tags if not tag.parent]}
        for parent_name, tag_name in [(tag.parent.name, tag.name)
                                      for tag in self.tags if tag.parent]:
            if not t.get(parent_name):
                t[parent_name] = [tag_name]
            else:
                t[parent_name].append(tag_name)
        return t

    def get_tag(self, parent):
        tags_found = set()
        for tag in self.tags:
            if tag.parent and tag.parent.name == parent:
                tags_found.add(tag.name)
        return ' - '.join(sorted(tags_found))

    @property
    def title(self):
        """
        'title' is a tag
        tracks COULD have more than one title (english name and japanise name)
        This just returns the first one matched

        TODO - Event to activate before save to DB to render the title from tags
        """
        try:
            return next(
                filter(
                    lambda tag: tag.parent.name == 'title'
                    if tag.parent else False, self.tags)).name
        except StopIteration:
            return self.source_filename

    #@title.setter
    #def title(self, value):
    #    self._x = value
    #@title.deleter
    #def title(self):
    #    #del self._x

    @property
    def image(self):
        for attachment in self.attachments:
            if attachment.type == 'image':
                return attachment.location
        return ''

    __to_dict__ = copy.deepcopy(Base.__to_dict__)
    __to_dict__.update({
        'default': {
            #Base.to_dict_setup(self, list_type='default', field_processors={
            'id': None,
            'title': None,
            'duration': None,
        },
    })

    __to_dict__.update({'full': copy.deepcopy(__to_dict__['default'])})
    __to_dict__['full'].update({
        #Base.to_dict_setup(self, list_type='full', clone_list='default', filed_processors={
        'description':
        None,
        'attachments':
        lambda track:
        [attachment.to_dict() for attachment in track.attachments],
        'tags':
        lambda track: track.tags_with_parents_dict(),
        'lyrics':
        None,
        'image':
        None,
        'source_filename':
        None,
    })

    @staticmethod
    def before_update_listener(mapper, connection, target):
        """
        TODO: This may not be the whole story ...
          when tags/lyrics/attachments change then we want this to update as well
          Investigation needed.
          I think this is irrelevent any change will update the id and a
          new record will be created, so this can never happen.
        """
        target.time_updated = now()
Example #40
0
 def data(cls):
     return deferred(Column('data', String(50)))
Example #41
0
class Company(DBBASE, PersistentACLMixin):
    """
        Company model
        Store all company specific stuff (headers, logos, RIB, ...)
    """
    __tablename__ = 'company'
    __table_args__ = default_table_args
    id = Column("id", Integer, primary_key=True)
    name = Column("name", String(150), nullable=False)
    goal = deferred(Column(
        "object",
        String(255),
        default="",
    ), group='edit')
    email = deferred(
        Column("email", String(255)),
        group='edit',
    )
    phone = deferred(Column("phone", String(20), default=""), group='edit')
    mobile = deferred(Column("mobile", String(20)), group='edit')
    comments = deferred(Column("comments", Text), group='edit')

    created_at = deferred(
        Column(
            Date(),
            default=datetime.date.today,
            nullable=False,
        ), )
    updated_at = deferred(
        Column(
            Date(),
            default=datetime.date.today,
            onupdate=datetime.date.today,
            nullable=False,
        ))
    active = deferred(Column(Boolean(), default=True))
    RIB = deferred(Column("RIB", String(255)), group='edit')
    IBAN = deferred(Column("IBAN", String(255)), group='edit')

    code_compta = deferred(
        Column(String(30), default=""),
        group="edit",
    )
    contribution = deferred(Column(Float), group='edit')

    header_id = Column(
        ForeignKey('file.id'),
        info={
            'colanderalchemy': {
                'exclude': True
            },
            'export': {
                'exclude': True
            }
        },
    )
    logo_id = Column(
        ForeignKey('file.id'),
        info={
            'colanderalchemy': {
                'exclude': True
            },
            'export': {
                'exclude': True
            }
        },
    )
    cgv = deferred(
        Column(Text, default=''),
        group='edit',
    )

    # sequences related, used for counters initialization on migration from
    # another system (eg: WinScop). Contain the latest index already assigned.
    month_company_sequence_init_value = deferred(Column(Integer), )

    month_company_sequence_init_date = deferred(Column(Date), )

    # Relationships
    header_file = relationship(
        "File",
        primaryjoin="File.id==Company.header_id",
        # backref utilisé pour le calcul des acls
        backref=backref(
            "company_header_backref",
            uselist=False,
            info={
                'colanderalchemy': {
                    'exclude': True
                },
                'export': {
                    'exclude': True
                }
            },
        ),
        info={
            'colanderalchemy': {
                'exclude': True
            },
            'export': {
                'exclude': True
            }
        },
    )

    logo_file = relationship(
        "File",
        primaryjoin="File.id==Company.logo_id",
        # backref utilisé pour le calcul des acls
        backref=backref(
            "company_logo_backref",
            uselist=False,
            info={
                'colanderalchemy': {
                    'exclude': True
                },
                'export': {
                    'exclude': True
                }
            },
        ),
        uselist=False,
        info={
            'colanderalchemy': {
                'exclude': True
            },
            'export': {
                'exclude': True
            }
        },
    )

    activities = relationship(
        "CompanyActivity",
        secondary=COMPANY_ACTIVITY,
        backref=backref(
            'companies',
            info={
                'colanderalchemy': {
                    'exclude': True
                },
                'export': {
                    'exclude': True
                },
            },
        ),
        info={
            'colanderalchemy': {
                'title': u'Activités',
            },
            'export': {
                'exclude': True
            },
        },
    )

    customers = relationship("Customer",
                             order_by="Customer.code",
                             back_populates="company",
                             info={
                                 'colanderalchemy': {
                                     'exclude': True
                                 },
                                 "export": {
                                     'exclude': True
                                 }
                             })

    projects = relationship(
        "Project",
        order_by="Project.id",
        back_populates="company",
        info={
            'colanderalchemy': {
                'exclude': True
            },
            "export": {
                'exclude': True
            }
        },
    )
    tasks = relationship(
        "Task",
        primaryjoin="Task.company_id==Company.id",
        order_by='Task.date',
        back_populates="company",
        info={
            'colanderalchemy': {
                'exclude': True
            },
            'export': {
                'exclude': True
            },
        },
    )
    employees = relationship(
        "User",
        secondary=COMPANY_EMPLOYEE,
        back_populates="companies",
        info={
            'colanderalchemy': get_excluded_colanderalchemy(u'Employés'),
            'export': {
                'exclude': True
            }
        },
    )
    sale_catalog = relationship("SaleProductCategory",
                                order_by="SaleProductCategory.title",
                                back_populates="company",
                                info={
                                    'export': {
                                        'exclude': True
                                    },
                                })
    expense = relationship("ExpenseSheet",
                           order_by="ExpenseSheet.month",
                           cascade="all, delete-orphan",
                           back_populates="company",
                           info={
                               'export': {
                                   'exclude': True
                               },
                           })

    _autonomie_service = CompanyService

    def get_company_id(self):
        """
            Return the current company id
            Allows company id access through request's context
        """
        return self.id

    @property
    def header(self):
        return self.header_file

    @header.setter
    def header(self, appstruct):
        if self.header_file is None:
            from autonomie.models.files import File
            self.header_file = File()

        self.header_file.name = appstruct.get('name', 'header.png')
        for key, value in appstruct.items():
            setattr(self.header_file, key, value)

        self.header_file.description = 'Header'

    @property
    def logo(self):
        return self.logo_file

    @logo.setter
    def logo(self, appstruct):
        if self.logo_file is None:
            from autonomie.models.files import File
            self.logo_file = File()

        self.logo_file.name = appstruct.get('name', 'logo.png')
        for key, value in appstruct.items():
            setattr(self.logo_file, key, value)
        self.logo_file.description = 'Logo'

    @classmethod
    def query(cls, keys=None, active=True):
        """
            Return a query
        """
        if keys:
            query = DBSESSION().query(*keys)
        else:
            query = super(Company, cls).query()
        if active:
            query = query.filter(cls.active == True)
        return query.order_by(cls.name)

    def __json__(self, request):
        """
            return a dict representation
        """
        customers = [customer.__json__(request) for customer in self.customers]
        projects = [project.__json__(request) for project in self.projects]
        return dict(id=self.id,
                    name=self.name,
                    goal=self.goal,
                    email=self.email,
                    phone=self.phone,
                    mobile=self.mobile,
                    comments=self.comments,
                    RIB=self.RIB,
                    IBAN=self.IBAN,
                    customers=customers,
                    projects=projects)

    def get_tasks(self):
        """
        Get all tasks for this company, as a list
        """
        return self._autonomie_service.get_tasks(self)

    def get_recent_tasks(self, page_nb, nb_per_page):
        """
        :param int nb_per_page: how many to return
        :param int page_nb: pagination index

        .. todo:: this is naive, use sqlalchemy pagination

        :return: pagination for wanted tasks, total nb of tasks
        """
        count = self.get_tasks().count()
        offset = page_nb * nb_per_page
        items = self._autonomie_service.get_tasks(self,
                                                  offset=offset,
                                                  limit=nb_per_page)
        return items, count

    def get_estimations(self, valid=False):
        """
        Return the estimations of the current company
        """
        return self._autonomie_service.get_estimations(self, valid)

    def get_invoices(self, valid=False):
        """
        Return the invoices of the current company
        """
        return self._autonomie_service.get_invoices(self, valid)

    def get_cancelinvoices(self, valid=False):
        """
        Return the cancelinvoices of the current company
        """
        return self._autonomie_service.get_cancelinvoices(self, valid)

    def has_invoices(self):
        """
            return True if this company owns invoices
        """
        return self.get_invoices(self, valid=True).count() > 0 or \
            self.get_cancelinvoices(self, valid=True).count() > 0

    def get_real_customers(self, year):
        """
        Return the real customers (with invoices)
        """
        return self._autonomie_service.get_customers(self, year)

    def get_late_invoices(self):
        """
        Return invoices waiting for more than 45 days
        """
        return self._autonomie_service.get_late_invoices(self)

    def get_customer_codes_and_names(self):
        """
        Return current company's customer codes and names
        """
        return self._autonomie_service.get_customer_codes_and_names(self)

    def get_project_codes_and_names(self):
        """
        Return current company's project codes and names
        """
        return self._autonomie_service.get_project_codes_and_names(self)

    def get_next_estimation_index(self):
        """
        Return the next estimation index
        """
        return self._autonomie_service.get_next_estimation_index(self)

    def get_next_invoice_index(self):
        """
        Return the next invoice index
        """
        return self._autonomie_service.get_next_invoice_index(self)

    def get_next_cancelinvoice_index(self):
        """
        Return the next cancelinvoice index
        """
        return self._autonomie_service.get_next_cancelinvoice_index(self)

    def get_turnover(self, year):
        """
        Retrieve the annual turnover for the current company

        :param int year: The current year
        """
        ca = self._autonomie_service.get_turnover(self, year)
        return math_utils.integer_to_amount(ca, precision=5)

    @classmethod
    def label_query(cls):
        return cls._autonomie_service.label_query(cls)

    @classmethod
    def get_id_by_analytical_account(cls, analytical_account):
        return cls._autonomie_service.get_id_by_analytical_account(
            cls, analytical_account)
Example #42
0
 class MyDefMixin:
     foo = deferred(Column('foo', String))
Example #43
0
class VulnerabilityGitCommits(MainBase):
    __tablename__ = "vulnerability_git_commits"

    commit_hash = Column(String(255), nullable=False, index=True)
    _commit_link = Column("commit_link", String(255), nullable=False)
    repo_name = Column(String(255), nullable=False)
    repo_owner = Column(String(255))
    # URL to a *.git Git repository (if applicable).
    _repo_url = Column("repo_url", String(255))
    vulnerability_details_id = Column(Integer,
                                      ForeignKey("vulnerability.id",
                                                 name="fk_vuln"),
                                      nullable=False)
    vulnerability = relationship("Vulnerability",
                                 foreign_keys=[vulnerability_details_id])
    # Used to store/cache the repository tree files with hashes.
    tree_cache = deferred(Column(LONGTEXT()))

    repository_files = relationship(
        RepositoryFiles,
        backref="commit",
        cascade="all, delete-orphan",
        single_parent=True,
    )
    # link to comments through RepositoryFiles
    comments = relationship(
        RepositoryFileComments,
        backref="commit",
        secondary=RepositoryFiles.__table__,
        primaryjoin="VulnerabilityGitCommits.id==RepositoryFiles.commit_id",
        secondaryjoin=
        ("and_(RepositoryFiles.id==RepositoryFileComments.repository_file_id,"
         "RepositoryFileComments.active==True)"),
    )
    # link to markers through RepositoryFiles
    markers = relationship(
        RepositoryFileMarkers,
        backref="commit",
        secondary=RepositoryFiles.__table__,
        primaryjoin="VulnerabilityGitCommits.id==RepositoryFiles.commit_id",
        secondaryjoin=(
            "and_(RepositoryFiles.id==RepositoryFileMarkers.repository_file_id,"
            "RepositoryFileMarkers.active==True)"),
    )

    @property
    def num_files(self):
        # TODO: This should be refactored as it is incredibly inefficient.
        #       We should use a count on the database side instead.
        return len(self.repository_files)

    @property
    def num_comments(self):
        # TODO: see comment regarding performance above.
        return len(self.comments)

    @property
    def num_markers(self):
        # TODO: see comment regarding performance above.
        return len(self.markers)

    @property
    def repo_url(self):
        if not self._repo_url:
            # TODO: Refactor this apporach of retrieving github.com urls.
            if self.commit_link and "github.com" in self.commit_link:
                if self.repo_owner and self.repo_name:
                    return f"https://github.com/{self.repo_owner}/{self.repo_name}"  # pylint: disable=line-too-long
        return self._repo_url

    @repo_url.setter
    def repo_url(self, repo_url):
        self._repo_url = repo_url

    @property
    def commit_link(self):
        return self._commit_link

    @commit_link.setter
    def commit_link(self, commit_link):
        # TODO: Recheck link sanitization when other links then GitHub are
        # allowed again. There might be no repo_url set and the commit_link
        # might be just a VCS UI link to the patch. We should still always
        # require a separate repository link and commit hash if it's not a
        # simple Github entry.

        (commit_link, repo_url,
         commit_hash) = self._parse_commit_link(commit_link)
        self._commit_link = commit_link
        if repo_url:
            self.repo_url = repo_url
        self.commit_hash = commit_hash

    @staticmethod
    def _parse_commit_link(
            commit_link) -> Tuple[str, Optional[str], Optional[str]]:
        vcs_handler = get_vcs_handler(None, commit_link)
        if not vcs_handler:
            raise InvalidIdentifierException(
                "Please specify a valid commit link")

        return commit_link, vcs_handler.repo_url, vcs_handler.commit_hash

    def __init__(
        self,
        commit_link=None,
        repo_owner=None,
        repo_name=None,
        repo_url=None,
        commit_hash=None,
    ):
        super().__init__()
        self.repo_owner = repo_owner
        self.repo_name = repo_name
        if commit_link:
            (
                commit_link,
                parsed_repo_url,
                parsed_commit_hash,
            ) = self._parse_commit_link(commit_link)

            self.commit_link = commit_link
            if parsed_repo_url is not None:
                repo_url = parsed_repo_url
            if parsed_commit_hash is not None:
                commit_hash = parsed_commit_hash
        if repo_url or commit_hash:
            vcs_handler = get_vcs_handler_by_repo_hash(None, repo_url,
                                                       commit_hash)
            if not vcs_handler:
                raise InvalidIdentifierException(
                    "Please specify a valid repo_url and commit_hash")
            self.commit_hash = commit_hash
            self.repo_url = repo_url
            if commit_link is None:
                self.commit_link = vcs_handler.commit_link

    def to_json(self):
        """Serialize object properties as dict."""
        return {
            "commit_link": self.commit_link,
            "repo_owner": self.repo_owner,
            "repo_name": self.repo_name,
            "repo_url": self.repo_url,
            "commit_hash": self.commit_hash,
            "relevant_files": self.get_relevant_files(),
        }

    def get_relevant_files(self):
        """Extracts the relevant files from tree_cache"""
        relevant_files = []

        if self.tree_cache is None:
            return relevant_files

        tree = json.loads(self.tree_cache)
        if "commit" in tree:
            commit_data = tree["commit"]
            master_commit_files = commit_data["files"]

            for patched_files in master_commit_files:
                relevant_file_path = "./" + patched_files["path"]
                relevant_files.append(relevant_file_path)

        return relevant_files

    def copy(self):
        new_commit = copy_obj(self)
        new_commit.repository_files = []
        for repo_file in self.repository_files:
            new_commit.repository_files.append(repo_file.copy())
        # N.B. comments and markers are copied in repository_files not here.
        return new_commit
Example #44
0
 def add_geometry_column(cls):
     if not hasattr(cls, 'geom'):
         cls.geom = deferred(
             Column(Geometry(geometry_type='LINESTRING', srid=config.SRID)))
Example #45
0
class DBStoredFile(Base):
    """ :class:`depot.io.interfaces.StoredFile` implementation that stores
    file data in SQL database.

    Can be used together with :class:`kotti.filedepot.DBFileStorage` to
    implement blobs storage in the database.
    """

    __tablename__ = "blobs"

    #: Primary key column in the DB
    #: (:class:`sqlalchemy.types.Integer`)
    id = Column(Integer(), primary_key=True)
    #: Unique file id given to this blob
    #: (:class:`sqlalchemy.types.String`)
    file_id = Column(String(36), index=True)
    #: The original filename it had when it was uploaded.
    #: (:class:`sqlalchemy.types.String`)
    filename = Column(Unicode(100))
    #: MIME type of the blob
    #: (:class:`sqlalchemy.types.String`)
    content_type = Column(String(100))
    #: Size of the blob in bytes
    #: (:class:`sqlalchemy.types.Integer`)
    content_length = Column(Integer())
    #: Date / time the blob was created or last modified
    #: (:class:`sqlalchemy.types.DateTime`)
    last_modified = Column(DateTime())
    #: The binary data itself
    #: (:class:`sqlalchemy.types.LargeBinary`)
    data = deferred(Column('data', LargeBinary()))

    _cursor = 0
    _data = _marker

    public_url = None

    def __init__(self,
                 file_id,
                 filename=None,
                 content_type=None,
                 last_modified=None,
                 content_length=None,
                 **kwds):
        self.file_id = file_id
        self.filename = filename
        self.content_type = content_type
        self.last_modified = last_modified or datetime.now()
        self.content_length = content_length

        for k, v in kwds.items():
            setattr(self, k, v)

    def read(self, n=-1):
        """Reads ``n`` bytes from the file.

        If ``n`` is not specified or is ``-1`` the whole
        file content is read in memory and returned
        """
        if self._data is _marker:
            file_id = DBSession.merge(self).file_id
            self._data = DBSession.query(DBStoredFile.data).\
                filter_by(file_id=file_id).scalar()

        if n == -1:
            result = self._data[self._cursor:]
        else:
            result = self._data[self._cursor:self._cursor + n]

        self._cursor += len(result)

        return result

    @staticmethod
    def close(*args, **kwargs):
        """Implement :meth:`StoredFile.close`.
        :class:`DBStoredFile` never closes.
        """
        return

    @staticmethod
    def closed():
        """Implement :meth:`StoredFile.closed`.
        """
        return False

    @staticmethod
    def writable():
        """Implement :meth:`StoredFile.writable`.
        """
        return False

    @staticmethod
    def seekable():
        """Implement :meth:`StoredFile.seekable`.
        """
        return True

    def seek(self, offset, whence=0):
        """ Change stream position.

        Change the stream position to the given byte offset. The offset is
        interpreted relative to the position indicated by whence.

        :param n: Position for the cursor
        :type n: int

        :param whence: * 0 -- start of stream (the default);
                              offset should be zero or positive
                       * 1 -- current stream position; offset may be negative
                       * 2 -- end of stream; offset is usually negative
        :type whence: int
        """
        if whence == 0:
            self._cursor = offset
        elif whence in (1, 2):
            self._cursor = self._cursor + offset
        else:
            raise ValueError('whence must be 0, 1 or 2')

    def tell(self):
        """ Returns current position of file cursor

        :result: Current file cursor position.
        :rtype: int
        """
        return self._cursor

    @property
    def name(self):
        """Implement :meth:`StoredFile.name`.

        :result: the filename of the saved file
        :rtype: string
        """
        return self.filename

    @classmethod
    def __declare_last__(cls):
        """ Executed by SQLAlchemy as part of mapper configuration

        When the data changes, we want to reset the cursor position of target
        instance, to allow proper streaming of data.
        """
        event.listen(DBStoredFile.data, 'set', handle_change_data)
Example #46
0
class Commit(Base):
    '''
    Schema representation of a change in the code repository.
    '''
    __tablename__ = 'commit'
    id = Column(Integer, primary_key=True)
    commit_id = Column(String, nullable=False)
    branch = Column(String, nullable=False)

    repository_id = Column(Integer, ForeignKey('repository.id'))
    repository = relationship('Repository', back_populates='commits')

    # commit_type is an extra metadata around the content of the commit:
    #   1) DEVELOP - a standard change by a developer
    #   2) SYNTHETIC - a synthetic alteration
    #   3) RESET - a commit that resets synthetic alteration commits
    commit_type = Column(String, nullable=False)

    # For synthetic commits, we have a base commit that we break down into
    # smaller commits to determine blame.  The parent commit is the base commit
    # and all smaller commits the revert the diffs of the parent commit have
    # this attribute set.
    # parent_commit_id = Column(Integer, ForeignKey('commit.id'), nullable=True)
    # parent_commit = relationship(
    #     'Commit',
    #     remote_side=[parent_commit_id])
    # child_commits = relationship(
    #     'Commit',
    #     back_populates='parent_commit')

    # this is kind of a hack.  Synthetic commits are created from synthetic
    # diffs.  Retrieving the powerset of a set of diffs from the database is
    # prohibitively expensive.  So we will hash the sorted list of synthetic
    # diff ids and use that hash to retrieve the 'child' commits of a commit.
    synthetic_diff_hash = Column(Integer, nullable=True)

    # this stores the commit tensor for determining which tests are going to
    # fail.  We used the 'deferred' function so that it's loaded lazily and not
    # always brought into memory.  For more information, read:
    #   https://docs.sqlalchemy.org/en/13/orm/loading_columns.html
    _commit_tensor_binary = deferred(
        Column('commit_tensor', Binary, nullable=True))

    test_runs = relationship('TestRun',
                             back_populates='commit',
                             cascade='all, delete, delete-orphan')

    # the corresponding functions histories created in this commit
    function_histories = relationship('FunctionHistory',
                                      back_populates='commit',
                                      cascade='all, delete, delete-orphan')

    # the corresponding functions histories created in this commit
    diffs = relationship('Diff',
                         back_populates='commit',
                         cascade='all, delete, delete-orphan')

    _commit_tensor_numpy = None

    # the raw numpy vector output from the model
    test_result_prediction_data = None

    def __init__(
        self,
        repository: Repository,
        commit_id: str,
        branch: str,
        commit_type: str = SYNTHETIC_CHANGE,
    ):  # Commit
        '''
        Creates a new TestResults instance
        '''
        if not commit_id:
            msg = 'Tried creating commit without a commit_id'
            raise BugBuddyError(msg)

        self.repository = repository
        self.commit_id = commit_id
        self.branch = branch
        self.commit_type = commit_type

    def get_matching_test_result(self, test_result):
        '''
        Retuns the corresponding test output
        '''
        test_run = self.test_runs[0]
        matching_test_results = [
            commit_test_result for commit_test_result in test_run.test_results
            if commit_test_result.test.id == test_result.test.id
        ]
        if not matching_test_results:
            import pdb
            pdb.set_trace()
            msg = ('Could not find a matching test result for {} at {}'.format(
                test_result, self))
            raise BugBuddyError(msg)

        if len(matching_test_results) > 1:
            import pdb
            pdb.set_trace()
            msg = ('Found multiple matching test_results for {} at {}'.format(
                test_result, self))
            raise BugBuddyError(msg)

        return matching_test_results[0]

    def causes_test_failures(self):
        '''
        Returns a bool if the commit causes any test failures
        '''
        return bool(self.test_failures)

    def get_function_histories(self, file_path: str, start_range: int,
                               end_range: int):
        '''
        Retrieves the function histories the match a given file_path and range
        '''
        return [
            function_history for function_history in self.function_histories
            if (function_history.function.file_path == file_path
                and function_history.first_line <= start_range
                and function_history.last_line >= end_range)
        ]

    def get_corresponding_function(self, file_path: str, start_range: int,
                                   end_range: int):
        '''
        Retrieves the the function history that most tightly matches a given
        file_path and range

        def func:
            def cat:
                ## edit here
                x = 1
            x = 2

        get_corresponding_function would return 'def cat' for the matching
        function unlike get_function_histories which would return func and cat
        '''
        matching_functions = self.get_function_histories(
            file_path, start_range, end_range)

        matching_function = None
        matching_function_difference = sys.maxsize

        for function_history in matching_functions:
            function_difference = (start_range - function_history.first_line +
                                   function_history.last_line - end_range)

            if function_difference < matching_function_difference:
                matching_function_difference = function_difference
                matching_function = function_history

        return matching_function

    def get_function_for_node(self, node):
        '''
        Given a node it will return the corresponding function history
        '''
        return self.get_corresponding_function(file_path=node.file_path,
                                               start_range=node.first_line,
                                               end_range=node.last_line)

    @property
    def is_synthetic(self):
        '''
        Returns a boolean whether or not the commit is synthetic
        '''
        return self.commit_type == SYNTHETIC_CHANGE

    @property
    def blames(self):
        '''
        Returns the diffs in the commit
        '''
        blames = []
        for test_run in self.test_runs:
            for test_result in test_run.test_results:
                blames.extend(test_result.blames)

        return blames

    @property
    def latest_test_run(self):
        '''
        Returns the most recent test run
        '''
        return self.test_runs[-1]

    @property
    def earliest_test_run(self):
        '''
        Returns the most recent test run
        '''
        return self.test_runs[0]

    @property
    def commit_tensor(self):
        '''
        Returns the commit in tensor form as a numpy array
        '''
        if self._commit_tensor_numpy is None:
            if self._commit_tensor_binary:
                self._commit_tensor_numpy = numpy.fromstring(
                    self._commit_tensor_binary)

        return numpy.reshape(self._commit_tensor_numpy, self.input_shape)

    def needs_blaming(self):
        '''
        Whether the commit needs to undergo the blame process
        '''
        return self.causes_test_failures() and not self.blames

    @property
    def num_tests(self):
        '''
        Returns the number of tests present for the commit
        '''
        return len(self.repository.tests)

    @property
    def functions(self):
        '''
        Returns the functions associated with the commit in order
        '''
        functions = self.repository.functions
        functions.sort(key=lambda func: func.id, reverse=False)
        return functions

    @property
    def test_failures(self):
        '''
        Returns a list of tests that failed in the latest test run for the
        commit
        '''
        test_failures = []
        test_run = self.latest_test_run
        for test_result in test_run.test_results:
            if test_result.status == TEST_OUTPUT_FAILURE:
                test_failures.append(test_result.test)

        return test_failures

    @property
    def num_test_failures(self):
        '''
        Returns a list of tests that failed in the latest test run for the
        commit
        '''
        return len(self.test_failures)

    @property
    def failed_test_results(self):
        '''
        Returns a list of test results of failed tests in the latest test run
        for the commit
        '''
        failed_test_results = []
        test_run = self.latest_test_run
        for test_result in test_run.test_results:
            if test_result.status == TEST_OUTPUT_FAILURE:
                failed_test_results.append(test_result)

        return failed_test_results

    @property
    def num_functions(self):
        '''
        Returns the number of functions present for the commit
        '''
        return len(self.repository.functions)

    @property
    def num_features(self):
        '''
        Returns the number of features for a given function-test union
        '''
        return 3

    @property
    def input_shape(self):
        '''
        Returns the input shape of a commit
        '''
        return (self.num_functions, self.num_tests, self.num_features)

    @property
    def test_result_prediction(self):
        '''
        Returns test result prediction data
        '''
        if self.test_result_prediction_data is None:
            msg = 'Requested prediction data but it does not exist'
            raise BugBuddyError(msg)

        return dict(zip(self.sorted_tests, self.test_result_prediction_data))

    @property
    def sorted_tests(self):
        '''
        Returns the tests sorted by id
        '''
        sorted_tests = self.repository.tests
        sorted_tests.sort(key=lambda test: test.id, reverse=False)
        return sorted_tests

    def has_same_test_result_output(self, test_result, status: str = None):
        '''
        Returns true if this commit had the same a test_result output

        @param test_failure
        '''
        matching_test_result = self.get_matching_test_result(test_result)
        if not matching_test_result:
            return False

        if status:
            return (matching_test_result.status == status
                    and test_result.status == status)

        return matching_test_result.status == test_result.status

    def summary(self, indent=0, blame=True, prediction=True, edits=True):
        '''
        Prints a summary to terminal about this commit
        '''
        print(' ' * indent + str(self))
        print(' ' * indent +
              'Number of test runs: {}'.format(len(self.test_runs)))

        if edits:
            print('-' * 10 + ' E D I T S ' + '-' * 10)
            for diff in self.diffs:
                print(' ' * (indent + 2) + str(diff))
            print('\n')

        if blame:
            self.blame_summary()

        if prediction:
            self.prediction_summary()

    def blame_summary(self, indent=0):
        '''
        Prints a summary to terminal about the actual blames of the commit
        '''
        print('-' * 10 + ' A C T U A L ' + '-' * 10)
        function_to_test_map = {}
        for diff in self.diffs:
            function_to_test_map[diff.function] = []

        for test_failure_result in self.earliest_test_run.test_failures:
            for blame in test_failure_result.blames:
                function_to_test_map[blame.function].append(
                    test_failure_result.test)

        ordered_pairing = sorted(function_to_test_map.items(),
                                 key=lambda kv: kv[0].id)
        for function, failed_tests in ordered_pairing:
            print(' ' * (indent + 2) + str(function))
            for failed_test in failed_tests:
                print(' ' * (indent + 4) + str(failed_test))
        print('\n')

    def prediction_summary(self, indent=0):
        '''
        Prints a summary to terminal about the predicted blames of the commit
        '''
        print('-' * 10 + ' P R E D I C T I O N ' + '-' * 10)
        function_to_test_map = defaultdict(list)

        for test_failure_result in self.latest_test_run.test_failures:
            prediction = test_failure_result.predicted_blamed_functions
            for (blamed_function, confidence) in prediction:
                function_to_test_map[blamed_function].append({
                    'test':
                    test_failure_result.test,
                    'confidence':
                    confidence,
                })

        for function, failed_test_data_list in function_to_test_map.items():
            print(' ' * (indent + 2) + str(function))
            for failed_test_data in failed_test_data_list:
                failed_test = failed_test_data['test']
                confidence = failed_test_data['confidence']
                print(' ' * (indent + 4) + str(failed_test) +
                      ' | {}'.format(confidence))
        print('\n')

    def __repr__(self):
        '''
        Converts the repository into a string
        '''
        return ('<Commit {id} | {commit_id} | {repository_name} | {branch} '
                '| {commit_type} />'.format(
                    id=self.id,
                    commit_id=self.commit_id,
                    repository_name=self.repository.name,
                    branch=self.branch,
                    commit_type=self.commit_type))
Example #47
0
class Product(db.Model):
    id = db.Column(db.Text, primary_key=True)
    doi = db.Column(db.Text)
    orcid_id = db.Column(db.Text, db.ForeignKey('person.orcid_id'))
    created = db.Column(db.DateTime)

    title = db.Column(db.Text)
    journal = db.Column(db.Text)
    type = db.Column(db.Text)
    year = db.Column(db.Text)
    authors = deferred(db.Column(db.Text))
    authors_short = db.Column(db.Text)
    url = db.Column(db.Text)
    arxiv = db.Column(db.Text)
    orcid_put_code = db.Column(db.Text)
    orcid_importer = db.Column(db.Text)

    orcid_api_raw_json = deferred(db.Column(JSONB))
    altmetric_api_raw = deferred(db.Column(JSONB))
    # mendeley_api_raw = deferred(db.Column(JSONB)) #  @todo go back to this when done exploring
    mendeley_api_raw = db.Column(JSONB)

    altmetric_id = db.Column(db.Text)
    altmetric_score = db.Column(db.Float)
    # post_counts = db.Column(MutableDict.as_mutable(JSONB))  # don't store post_counts anymore, just calculate them
    post_details = db.Column(MutableDict.as_mutable(JSONB))
    poster_counts = db.Column(MutableDict.as_mutable(JSONB))
    event_dates = db.Column(MutableDict.as_mutable(JSONB))

    user_supplied_fulltext_url = db.Column(db.Text)
    fulltext_url = db.Column(db.Text)
    license = db.Column(db.Text)
    evidence = db.Column(db.Text)

    error = db.Column(db.Text)

    def __init__(self, **kwargs):
        self.id = shortuuid.uuid()[0:10]
        self.created = datetime.datetime.utcnow().isoformat()
        super(Product, self).__init__(**kwargs)

    def set_oa_from_user_supplied_fulltext_url(self, url):
        if url:
            self.user_supplied_fulltext_url = url
            self.fulltext_url = url
            self.evidence = "user supplied fulltext url"
            self.license = "unknown"

    def set_biblio_from_orcid(self):
        if not self.orcid_api_raw_json:
            print u"no self.orcid_api_raw_json for product {}".format(self.id)
        orcid_biblio_dict = self.orcid_api_raw_json
        set_biblio_from_biblio_dict(self, orcid_biblio_dict)

    def set_data_from_altmetric(self, high_priority=False):
        # set_altmetric_api_raw catches its own errors, but since this is the method
        # called by the thread from Person.set_data_from_altmetric_for_all_products
        # want to have defense in depth and wrap this whole thing in a try/catch too
        # in case errors in calculate or anything else we add.
        try:
            self.set_altmetric_api_raw(high_priority)
            self.calculate_altmetric_attributes()
        except (KeyboardInterrupt, SystemExit):
            # let these ones through, don't save anything to db
            raise
        except Exception:
            logging.exception("exception in set_data_from_altmetric")
            self.error = "error in set_data_from_altmetric"
            print self.error
            print u"in generic exception handler, so rolling back in case it is needed"
            db.session.rollback()

    def calculate_altmetric_attributes(self):
        start_time = time()

        if self.doi:
            self.url = u"http://doi.org/{}".format(self.doi)

        self.set_altmetric_score()
        self.set_altmetric_id()
        # self.set_post_counts()  don't store post counts for now, just calculate them
        self.set_poster_counts()
        self.set_post_details()
        self.set_event_dates()

    @property
    def display_authors(self):
        return self.authors_short

    @property
    def has_fulltext_url(self):
        return (self.fulltext_url != None)

    def set_altmetric_score(self):
        self.altmetric_score = 0
        try:
            self.altmetric_score = self.altmetric_api_raw["score"]
            # print u"set score to", self.altmetric_score
        except (KeyError, TypeError):
            pass

    def set_data_from_mendeley(self, high_priority=False):
        # set_altmetric_api_raw catches its own errors, but since this is the method
        # called by the thread from Person.set_data_from_altmetric_for_all_products
        # want to have defense in depth and wrap this whole thing in a try/catch too
        # in case errors in calculate or anything else we add.
        try:
            self.mendeley_api_raw = set_mendeley_data(self)
        except (KeyboardInterrupt, SystemExit):
            # let these ones through, don't save anything to db
            raise
        except Exception:
            logging.exception("exception in set_data_from_mendeley")
            self.error = "error in set_data_from_mendeley"
            print self.error
            print u"in generic exception handler, so rolling back in case it is needed"
            db.session.rollback()

    @property
    def matches_open_url_fragment(self):
        if not self.url:
            return False

        for open_url_fragment in open_url_fragments:
            if open_url_fragment in self.url:
                return True

        return False

    def set_data_from_oadoi(self, high_priority=False):
        # print u"starting set_data_from_oadoi with {}".format(self.doi)
        start_time = time()

        if self.guess_genre() in ["preprint", "dataset", "report"]:
            # print u"{} is genre {} so it has fulltext, don't need to call oadoi".format(self.id, self.guess_genre())
            self.fulltext_url = self.url

        # sometimes genre is "other" or similar but is still in an open place like figshare.  call these open.
        if self.matches_open_url_fragment:
            # print u"{} is open url fragment, so don't need to call oadoi".format(self.id)
            self.fulltext_url = self.url

        if not self.doi:
            return

        # set_altmetric_api_raw catches its own errors, but since this is the method
        # called by the thread from Person.set_data_from_altmetric_for_all_products
        # want to have defense in depth and wrap this whole thing in a try/catch too
        # in case errors in calculate or anything else we add.
        try:
            # url = u"http://*****:*****@impactstory.org"
            url = u"http://api.unpaywall.org/v2/{}[email protected]".format(
                self.doi)

            r = requests.get(url)
            if r and r.status_code == 200:
                data = r.json()
                if not self.journal:
                    self.journal = data["journal_name"]
                if not self.year:
                    self.year = data["year"]
                best_oa_location = data["best_oa_location"]
                if best_oa_location:
                    if not self.user_supplied_fulltext_url:
                        self.fulltext_url = best_oa_location["url"]
                    self.license = best_oa_location["license"]
                    self.evidence = best_oa_location["evidence"]
            else:
                pass
                # print u"in set_data_from_oadoi: bad status_code={} for product {}. skipping.".format(
                #     r.status_code, self.id)
        except (KeyboardInterrupt, SystemExit):
            # let these ones through, don't save anything to db
            raise
        except IndexError:
            print u"IndexError in set_data_from_oadoi on product {}. skipping.".format(
                self.id)
            print r.json()
        except Exception:
            logging.exception(
                u"exception in set_data_from_oadoi on product {}".format(
                    self.id))
            self.error = "error in set_data_from_oadoi"
            print self.error
            print u"in generic exception handler for product {}, so rolling back in case it is needed".format(
                self.id)
            db.session.rollback()
        # print u"finished set_data_from_oadoi with {} in {}".format(self.doi, elapsed(start_time, 2))

    def get_abstract(self):
        try:
            abstract = self.altmetric_api_raw["citation"]["abstract"]
        except (KeyError, TypeError):
            abstract = None
        return abstract

    def get_abstract_using_mendeley(self):
        abstract = None
        if self.mendeley_api_raw and "abstract" in self.mendeley_api_raw:
            abstract = self.mendeley_api_raw["abstract"]
        else:
            try:
                abstract = self.altmetric_api_raw["citation"]["abstract"]
            except (KeyError, TypeError):
                pass

        return abstract

    # don't store post_counts anymore, just calculate them
    @property
    def post_counts(self):
        my_dict = defaultdict(int)
        for post in self.posts:
            my_dict[post["source"]] += 1
        return my_dict

    def post_counts_by_source(self, source):
        if not self.posts:
            return 0
        count = len([post for post in self.posts if post["source"] == source])
        return count

    @property
    def num_posts(self):
        return len(self.posts)

    @property
    def posts(self):
        if self.post_details and "list" in self.post_details:
            ret = []
            for post in self.post_details["list"]:
                if post["source"] in sources_metadata:
                    ret.append(post)
            return ret
        return []

    def set_post_details(self):
        if not self.altmetric_api_raw or \
                ("posts" not in self.altmetric_api_raw) or \
                (not self.altmetric_api_raw["posts"]):
            return

        all_post_dicts = []

        for (source, posts) in self.altmetric_api_raw["posts"].iteritems():
            for post in posts:
                post_dict = {}
                post_dict["source"] = source

                if source == "twitter":
                    if "author" in post:
                        if "id_on_source" in post["author"]:
                            post_dict["twitter_handle"] = post["author"][
                                "id_on_source"]
                            post_dict["attribution"] = post["author"][
                                "id_on_source"]
                        if "followers" in post["author"]:
                            post_dict["followers"] = post["author"][
                                "followers"]

                # useful parts
                if "posted_on" in post:
                    post_dict["posted_on"] = post["posted_on"]

                if "author" in post and "name" in post["author"]:
                    post_dict["attribution"] = post["author"]["name"]

                if "page_url" in post:
                    # for wikipedia.  we want this one not what is under url
                    post_dict["url"] = post["page_url"]
                elif "url" in post:
                    post_dict["url"] = post["url"]

                # title or summary depending on post type
                if source in [
                        "blogs", "f1000", "news", "q&a", "reddit", "wikipedia"
                ] and "title" in post:
                    post_dict["title"] = post["title"]
                    if source == "wikipedia" and "summary" in post:
                        post_dict["summary"] = post["summary"]
                elif "summary" in post:
                    title = post["summary"]
                    # remove urls.  From http://stackoverflow.com/a/11332580/596939
                    title = re.sub(r'^https?:\/\/.*[\r\n]*',
                                   '',
                                   title,
                                   flags=re.MULTILINE)
                    if not title:
                        title = "No title."
                    if len(title.split()) > 15:
                        first_few_words = title.split()[:15]
                        title = u" ".join(first_few_words)
                        title = u"{} \u2026".format(title)
                    post_dict["title"] = title
                else:
                    post_dict["title"] = ""

                all_post_dicts.append(post_dict)

        all_post_dicts = sorted(all_post_dicts,
                                key=lambda k: k["posted_on"],
                                reverse=True)
        all_post_dicts = sorted(all_post_dicts, key=lambda k: k["source"])

        self.post_details = {"list": all_post_dicts}
        return self.post_details

    # don't store post_counts anymore, just calculate them
    # def set_post_counts(self):
    #     self.post_counts = {}
    #
    #     if not self.altmetric_api_raw or "counts" not in self.altmetric_api_raw:
    #         return
    #
    #     exclude_keys = ["total", "readers"]
    #     for k in self.altmetric_api_raw["counts"]:
    #         if k not in exclude_keys:
    #             source = k
    #             count = int(self.altmetric_api_raw["counts"][source]["posts_count"])
    #             self.post_counts[source] = count
    #             print u"setting posts for {source} to {count} for {doi}".format(
    #                 source=source,
    #                 count=count,
    #                 doi=self.doi)

    def set_poster_counts(self):
        self.poster_counts = {}
        if not self.altmetric_api_raw or "counts" not in self.altmetric_api_raw:
            return

        exclude_keys = ["total", "readers"]
        for k in self.altmetric_api_raw["counts"]:
            if k not in exclude_keys:
                source = k
                count = int(self.altmetric_api_raw["counts"][source]
                            ["unique_users_count"])
                self.poster_counts[source] = count
                # print u"setting posters for {source} to {count} for {doi}".format(
                #     source=source,
                #     count=count,
                #     doi=self.doi)

    # @property
    # def tweeters(self):
    #     if self.tweeter_details and "list" in self.tweeter_details:
    #         return self.tweeter_details["list"]
    #     return []
    #
    # def set_tweeter_details(self):
    #     if not self.altmetric_api_raw or \
    #             ("posts" not in self.altmetric_api_raw) or \
    #             (not self.altmetric_api_raw["posts"]):
    #         return
    #
    #     if not "twitter" in self.altmetric_api_raw["posts"]:
    #         return
    #
    #     tweeter_dicts = {}
    #
    #     for post in self.altmetric_api_raw["posts"]["twitter"]:
    #         twitter_handle = post["author"]["id_on_source"]
    #
    #         if twitter_handle not in tweeter_dicts:
    #             tweeter_dict = {}
    #             tweeter_dict["url"] = u"http://twitter.com/{}".format(twitter_handle)
    #
    #             if "name" in post["author"]:
    #                 tweeter_dict["name"] = post["author"]["name"]
    #
    #             if "image" in post["author"]:
    #                 tweeter_dict["img"] = post["author"]["image"]
    #
    #             if "description" in post["author"]:
    #                 tweeter_dict["description"] = post["author"]["description"]
    #
    #             if "followers" in post["author"]:
    #                 tweeter_dict["followers"] = post["author"]["followers"]
    #
    #             tweeter_dicts[twitter_handle] = tweeter_dict
    #
    #     self.tweeter_details = {"list": tweeter_dicts.values()}

    @property
    def event_days_ago(self):
        if not self.event_dates:
            return {}
        resp = {}
        for source, date_list in self.event_dates.iteritems():
            resp[source] = [
                days_ago(event_date_string) for event_date_string in date_list
            ]
        return resp

    def set_event_dates(self):
        self.event_dates = {}

        if not self.altmetric_api_raw or "posts" not in self.altmetric_api_raw:
            return
        if self.altmetric_api_raw["posts"] == []:
            return

        for source, posts in self.altmetric_api_raw["posts"].iteritems():
            for post in posts:
                post_date = post["posted_on"]
                if source not in self.event_dates:
                    self.event_dates[source] = []
                self.event_dates[source].append(post_date)

        # now sort them all
        for source in self.event_dates:
            self.event_dates[source].sort(reverse=False)
            # print u"set event_dates for {} {}".format(self.doi, source)

    @property
    def first_author_family_name(self):
        first_author = None
        if self.authors:
            try:
                first_author = self.authors.split(u",")[0]
            except UnicodeEncodeError:
                print u"unicode error on", self.authors
        return first_author

    def set_doi_from_crossref_biblio_lookup(self, high_priority=False):
        if self.doi:
            return None

        if self.title and self.first_author_family_name:
            # print u"self.first_author_family_name", self.first_author_family_name
            url_template = u"""http://doi.crossref.org/servlet/[email protected]&qdata= <?xml version="1.0"?> <query_batch version="2.0" xsi:schemaLocation="http://www.crossref.org/qschema/2.0 http://www.crossref.org/qschema/crossref_query_input2.0.xsd" xmlns="http://www.crossref.org/qschema/2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <head> <email_address>[email protected]</email_address><doi_batch_id>ABC_123_fff </doi_batch_id> </head> <body> <query enable-multiple-hits="true" secondary-query="author-title-multiple-hits">   <article_title match="exact">{title}</article_title>    <author search-all-authors="true" match="exact">{first_author}</author> </query> </body></query_batch>"""
            url = url_template.format(
                title=self.title, first_author=self.first_author_family_name)
            # print u"url: {}".format(url)
            try:
                r = requests.get(url, timeout=5)
                if r.status_code == 200 and r.text and u"|" in r.text:
                    doi = r.text.rsplit(u"|", 1)[1]
                    if doi and doi.startswith(u"10."):
                        doi = doi.strip()
                        if doi:
                            print u"got a doi! {}".format(doi)
                            self.doi = doi
                            return doi
            except requests.Timeout:
                # print u"timeout"
                pass

        # print ".",
        return None

    def set_altmetric_api_raw(self, high_priority=False):
        self.error = "not calling altmetric.com until we handle ratelimiting"
        print self.error
        return

        # try:
        #     start_time = time()
        #     self.error = None
        #     self.altmetric_api_raw = None
        #
        #     if not self.doi:
        #         return
        #
        #     url = u"http://api.altmetric.com/v1/fetch/doi/{doi}?key={key}".format(
        #         doi=self.clean_doi,
        #         key=os.getenv("ALTMETRIC_KEY")
        #     )
        #     # might throw requests.Timeout
        #     r = requests.get(url, timeout=10)  #timeout in seconds
        #
        #     # handle rate limit stuff
        #     if r.status_code == 429:
        #         print u"over altmetric.com rate limit (got 429) so calling without twitter"
        #
        #         url = u"http://api.altmetric.com/v1/fetch/doi/{doi}?key={key}&exclude_sources=twitter".format(
        #             doi=self.clean_doi,
        #             key=os.getenv("ALTMETRIC_KEY")
        #         )
        #         r = requests.get(url, timeout=10)  #timeout in seconds
        #
        #
        #     # Altmetric.com doesn't have this DOI, so the DOI has no metrics.
        #     if r.status_code == 404:
        #         # altmetric.com doesn't have any metrics for this doi
        #         self.altmetric_api_raw = {"error": "404"}
        #     elif r.status_code == 403:
        #         if r.text == "You must have a commercial license key to use this call.":
        #             # this is the error we get when we have a bad doi with a # in it.  Record, but don't throw error
        #             self.altmetric_api_raw = {"error": "403. Altmetric.com says must have a commercial license key to use this call"}
        #         else:
        #             self.error = 'got a 403 for unknown reasons'
        #     elif r.status_code == 420:
        #         self.error = "hard-stop rate limit error setting altmetric.com metrics"
        #     elif r.status_code == 429:
        #         self.error = "rate limit error setting altmetric.com metrics"
        #     elif r.status_code == 400:
        #         self.altmetric_api_raw = {"error": "400. Altmetric.com says bad doi"}
        #     elif r.status_code == 200:
        #         # we got a good status code, the DOI has metrics.
        #         self.altmetric_api_raw = r.json()
        #         # print u"yay nonzero metrics for {doi}".format(doi=self.doi)
        #     else:
        #         self.error = u"got unexpected altmetric status_code code {}".format(r.status_code)
        #
        #     # print u"after parsing in altmetric: {}s for {}".format(
        #     #     elapsed(start_time, 2), url)
        #
        # except (KeyboardInterrupt, SystemExit):
        #     # let these ones through, don't save anything to db
        #     raise
        # except requests.Timeout:
        #     self.error = "timeout from requests when getting altmetric.com metrics"
        #     print self.error
        # except Exception:
        #     logging.exception("exception in set_altmetric_api_raw")
        #     self.error = "misc error in set_altmetric_api_raw"
        #     print u"in generic exception handler, so rolling back in case it is needed"
        #     db.session.rollback()
        # finally:
        #     if self.error:
        #         print u"ERROR on {doi} profile {orcid_id}: {error}, calling {url}".format(
        #             doi=self.clean_doi,
        #             orcid_id=self.orcid_id,
        #             error=self.error,
        #             url=url)

    def set_altmetric_id(self):
        try:
            self.altmetric_id = self.altmetric_api_raw["altmetric_id"]
        except (KeyError, TypeError):
            self.altmetric_id = None

    @property
    def sources(self):
        sources = []
        for source_name in sources_metadata:
            source = Source(source_name, [self])
            if source.posts_count > 0:
                sources.append(source)
        return sources

    @property
    def events_last_week_count(self):
        events_last_week_count = 0
        for source in self.sources:
            events_last_week_count += source.events_last_week_count
        return events_last_week_count

    @cached_property
    def normalized_title(self):
        return normalize(self.display_title)

    @property
    def display_title(self):
        if self.title:
            return self.title
        else:
            return "No title"

    @property
    def year_int(self):
        if not self.year:
            return 0
        return int(self.year)

    @property
    def countries(self):
        return [
            get_name_from_iso(my_country)
            for my_country in self.post_counts_by_iso_country.keys()
        ]

    @property
    def countries_using_mendeley(self):
        return [
            my_country for my_country in
            self.post_counts_by_country_using_mendeley.keys()
        ]

    @property
    def mendeley_url(self):
        try:
            return self.mendeley_api_raw["mendeley_url"]
        except (KeyError, TypeError):
            return None

    @property
    def mendeley_job_titles(self):
        resp = defaultdict(int)
        title_lookup = {
            "Librarian": "Librarian",
            "Student  > Bachelor": "Undergrad Student",
            "Student (Bachelor)": "Undergrad Student",
            "Student (Master)": "Masters Student",
            "Student (Postgraduate)": "Masters Student",
            "Student  > Master": "Masters Student",
            "Student  > Postgraduate": "Masters Student",
            "Doctoral Student": "PhD Student",
            "Ph.D. Student": "PhD Student",
            "Student  > Doctoral Student": "PhD Student",
            "Student  > Ph. D. Student": "PhD Student",
            "Post Doc": "Postdoc",
            "Professor": "Faculty",
            "Associate Professor": "Faculty",
            "Assistant Professor": "Faculty",
            "Professor > Associate Professor": "Faculty",
            "Professor > Assistant Professor": "Faculty",
            "Senior Lecturer": "Faculty",
            "Lecturer > Senior Lecturer": "Faculty",
            "Lecturer": "Faculty",
            "Researcher (at an Academic Institution)": "Faculty",
            "Researcher (at a non-Academic Institution)":
            "Researcher (non-academic)",
            "Other Professional": "Other",
        }

        if self.mendeley_api_raw and self.mendeley_api_raw[
                "reader_count_by_academic_status"]:
            for raw_title, count in self.mendeley_api_raw[
                    "reader_count_by_academic_status"].iteritems():
                standardized_title = title_lookup.get(raw_title, raw_title)
                resp[standardized_title] += count
        return resp

    @property
    def mendeley_disciplines(self):
        resp = {}
        if self.mendeley_api_raw and self.mendeley_api_raw[
                "reader_count_by_subdiscipline"]:
            for discipline, subdiscipline_dict in self.mendeley_api_raw[
                    "reader_count_by_subdiscipline"].iteritems():
                resp[discipline] = sum(subdiscipline_dict.values())
        return resp

    @property
    def post_counts_by_country_using_mendeley(self):
        posts_by_country = {}

        if self.mendeley_api_raw and self.mendeley_api_raw[
                "reader_count_by_country"]:
            for mendeley_country_name, count in self.mendeley_api_raw[
                    "reader_count_by_country"].iteritems():
                country_name = map_mendeley_countries.get(
                    mendeley_country_name, mendeley_country_name)
                posts_by_country[country_name] = count

        try:
            for iso_country, count in self.altmetric_api_raw["demographics"][
                    "geo"]["twitter"].iteritems():
                country_name = get_name_from_iso(iso_country)
                if country_name in posts_by_country:
                    posts_by_country[country_name] += count
                else:
                    posts_by_country[country_name] = count
        except (KeyError, TypeError):
            pass
        return posts_by_country

    @property
    def post_counts_by_iso_country(self):
        try:
            resp = self.altmetric_api_raw["demographics"]["geo"]["twitter"]
        except (KeyError, TypeError):
            resp = {}
        return resp

    @property
    def poster_counts_by_type(self):
        try:
            resp = self.altmetric_api_raw["demographics"]["users"]["twitter"][
                "cohorts"]
            if not resp:
                resp = {}
        except (KeyError, TypeError):
            resp = {}
        return resp

    def has_source(self, source_name):
        if self.post_counts:
            return (source_name in self.post_counts)
        return False

    @property
    def impressions(self):
        return sum(self.twitter_posters_with_followers.values())

    def get_tweeter_posters_full_names(self, most_recent=None):
        names = []

        try:
            posts = self.altmetric_api_raw["posts"]["twitter"]
        except (KeyError, TypeError):
            return names

        if most_recent:
            posts = sorted(posts, key=lambda k: k["posted_on"], reverse=True)
            posts = posts[0:most_recent]
        for post in posts:
            try:
                names.append(post["author"]["name"])
            except (KeyError, TypeError):
                pass
        return names

    @property
    def twitter_posters_with_followers(self):
        posters = {}
        try:
            twitter_posts = self.altmetric_api_raw["posts"]["twitter"]
        except (KeyError, TypeError):
            return {}

        for post in twitter_posts:
            try:
                poster = post["author"]["id_on_source"]
                followers = post["author"]["followers"]
                posters[poster] = followers
            except (KeyError, TypeError):
                pass
        return posters

    def f1000_urls_for_class(self, f1000_class):
        urls = []
        try:
            for post in self.altmetric_api_raw["posts"]["f1000"]:
                if f1000_class in post["f1000_classes"]:
                    urls.append(u"<a href='{}'>Review</a>".format(post["url"]))
        except (KeyError, TypeError):
            urls = []
        return urls

    @property
    def impact_urls(self):
        try:
            urls = self.altmetric_api_raw["citation"]["links"]
        except (KeyError, TypeError):
            urls = []
        return urls

    @property
    def languages_with_examples(self):
        resp = {}

        try:
            for (source, posts) in self.altmetric_api_raw["posts"].iteritems():
                for post in posts:
                    for key in ["title", "summary"]:
                        try:
                            num_words_in_post = len(post[key].split(" "))
                            top_detection = langdetect.detect_langs(
                                post[key])[0]
                            if (num_words_in_post > 7) and (top_detection.prob
                                                            > 0.90):

                                if top_detection.lang != "en":
                                    language_name = get_language_from_abbreviation(
                                        top_detection.lang)
                                    # print u"LANGUAGE:", language_name, top_detection.prob, post[key]

                                    # overwrites.  that's ok, we just want one example
                                    resp[language_name] = post["url"]

                        except langdetect.lang_detect_exception.LangDetectException:
                            pass

        except (KeyError, AttributeError, TypeError):
            pass

        return resp

    @property
    def publons_reviews(self):
        reviews = []
        try:
            for post in self.altmetric_api_raw["posts"]["peer_reviews"]:
                if post["pr_id"] == "publons":
                    reviews.append({
                        "url":
                        post["publons_article_url"],
                        "publons_weighted_average":
                        post["publons_weighted_average"]
                    })
        except (KeyError, TypeError):
            reviews = []
        return reviews

    @property
    def wikipedia_urls(self):
        articles = []
        try:
            for post in self.altmetric_api_raw["posts"]["wikipedia"]:
                articles.append(u"<a href='{}'>{}</a>".format(
                    post["page_url"], post["title"]))
        except (KeyError, TypeError):
            articles = []
        return articles

    def has_country(self, country_name):
        return (country_name in self.countries)

    def has_country_using_mendeley(self, country_name):
        return (country_name in self.countries_using_mendeley)

    @property
    def clean_doi(self):
        # this shouldn't be necessary because we clean DOIs
        # before we put them in. however, there are a few legacy ones that were
        # not fully cleaned. this igis to deal with them.
        return clean_doi(self.doi)

    def __repr__(self):
        return u'<Product ({id}) {doi}>'.format(id=self.id, doi=self.doi)

    def guess_genre(self):

        if self.type:
            if "data" in self.type:
                return "dataset"
            elif (self.doi and ".figshare."
                  in self.doi) or (self.url and ".figshare." in self.url):
                if self.type:
                    if ("article" in self.type or "paper" in self.type):
                        return "preprint"
                    else:
                        return self.type.replace("_", "-")
                else:
                    return "preprint"
            elif self.doi and any(fragment in self.doi
                                  for fragment in dataset_doi_fragments):
                return "dataset"
            elif self.url and any(fragment in self.url
                                  for fragment in dataset_url_fragments):
                return "dataset"
            elif "poster" in self.type:
                return "poster"
            elif "abstract" in self.type:
                return "abstract"
            elif self.doi and any(fragment in self.doi
                                  for fragment in preprint_doi_fragments):
                return "preprint"
            elif self.url and any(fragment in self.url
                                  for fragment in preprint_url_fragments):
                return "preprint"
            elif "article" in self.type:
                return "article"
            else:
                return self.type.replace("_", "-")
        return "article"

    @property
    def mendeley_countries(self):
        resp = None
        try:
            resp = self.mendeley_api_raw["reader_count_by_country"]
        except (AttributeError, TypeError):
            pass
        return resp

    @property
    def num_mentions(self):
        # used to include mendeley readers, but not anymore
        return self.num_posts

    @property
    def has_mentions(self):
        return (self.num_mentions >= 1)

    @property
    def mendeley_readers(self):
        resp = 0
        try:
            resp = self.mendeley_api_raw["reader_count"]
        except (AttributeError, TypeError):
            pass
        return resp

    def biblio_for_oadoi(self):
        response = {}
        if self.doi:
            response["doi"] = self.doi
            return response
        else:
            response["title"] = self.title
            # could add an author name here too
        return response

    def to_dict(self):
        return {
            "id": self.id,
            "mendeley": {
                "country_percent": as_proportion(self.mendeley_countries),
                "subdiscipline_percent":
                as_proportion(self.mendeley_disciplines),
                "job_title_percent": as_proportion(self.mendeley_job_titles),
                "readers": self.mendeley_readers,
                "mendeley_url": self.mendeley_url
            },
            "doi": self.doi,
            "url": self.url,
            "orcid_id": self.orcid_id,
            "year": self.year,
            "_title":
            self.display_title,  # duplicate just for api reading help
            "title": self.display_title,
            # "title_normalized": self.normalized_title,
            "journal": self.journal,
            "authors": self.display_authors,
            "altmetric_id": self.altmetric_id,
            "altmetric_score": self.altmetric_score,
            "num_posts": self.num_posts,
            "num_mentions": self.num_mentions,
            "sources": [s.to_dict() for s in self.sources],
            "posts": self.posts,
            "events_last_week_count": self.events_last_week_count,
            "genre": self.guess_genre(),
            "license": self.license,
            "has_fulltext_url": self.has_fulltext_url,
            "fulltext_url": self.fulltext_url
        }
Example #48
0
class Book(db.Model):
    __tablename__ = "books"
    id = Column(Integer, primary_key=True, autoincrement=True)
    lines = deferred(Column(ArrayType, default=lambda: []))
Example #49
0
        class SomeClass(Versioned, self.Base, ComparableEntity):
            __tablename__ = 'sometable'

            id = Column(Integer, primary_key=True)
            name = Column(String(50))
            data = deferred(Column(String(25)))
Example #50
0
class Domain(BaseMixin, db.Model):
    """
    A DNS domain affiliated with a job post.
    """
    __tablename__ = 'domain'
    #: DNS name of this domain (domain.tld)
    name = db.Column(db.Unicode(80), nullable=False, unique=True)
    #: Title of the employer at this domain
    title = db.Column(db.Unicode(250), nullable=True)
    #: Legal title
    legal_title = db.Column(db.Unicode(250), nullable=True)
    #: Description
    description = db.Column(db.UnicodeText, nullable=True)
    #: Logo URL
    logo_url = db.Column(db.Unicode(250), nullable=True)
    #: Is this a known webmail domain?
    is_webmail = db.Column(db.Boolean, default=False, nullable=False)
    #: Is this domain banned from listing on Hasjob? (Recruiter, etc)
    is_banned = db.Column(db.Boolean, default=False, nullable=False)
    #: Who banned it?
    banned_by_id = db.Column(None, db.ForeignKey('user.id', ondelete='SET NULL'), nullable=True)
    banned_by = db.relationship(User)
    #: Reason for banning
    banned_reason = db.Column(db.Unicode(250), nullable=True)
    #: Jobposts using this domain
    jobposts = db.relationship(JobPost, lazy='dynamic', backref=db.backref('domain', lazy='joined'))
    #: Search vector
    search_vector = deferred(db.Column(TSVECTOR, nullable=True))

    def __repr__(self):
        flags = [' webmail' if self.is_webmail else '', ' banned' if self.is_banned else '']
        return '<Domain %s%s>' % (self.name, ''.join(flags))

    @property
    def use_title(self):
        if self.title:
            return self.title
        post = self.jobposts.filter(JobPost.status.in_(POSTSTATUS.POSTPENDING)).order_by('datetime desc').first()
        if post:
            return post.company_name
        return self.name

    @property
    def has_profile(self):
        return bool(self.title and self.description)

    @cached_property
    def effective_logo_url(self):
        """
        Returns logo_url if present,
        else returns the logo from its most recent job post
        """
        if self.logo_url:
            return self.logo_url
        else:
            post = self.jobposts.filter(JobPost.company_logo != None,
                JobPost.status.in_(POSTSTATUS.ARCHIVED)).order_by('datetime desc').first()  # NOQA
            return post.url_for('logo', _external=True) if post else None

    def editor_is(self, user):
        """
        Is this user authorized to edit this domain?
        """
        if not user:
            return False
        if JobPost.query.filter_by(domain=self, user=user).filter(JobPost.status.in_(POSTSTATUS.POSTPENDING)).notempty():
            return True
        return False

    def url_for(self, action='view', _external=False, **kwargs):
        if action == 'view':
            return url_for('browse_by_domain', domain=self.name, _external=_external, **kwargs)
        elif action == 'edit':
            return url_for('domain_edit', domain=self.name, _external=_external, **kwargs)

    @classmethod
    def get(cls, name, create=False):
        name = name.lower()
        result = cls.query.filter_by(name=name).one_or_none()
        if not result and create:
            result = cls(name=name, is_webmail=name in webmail_domains)
            db.session.add(result)
        return result
Example #51
0
 def updated(cls):
     return deferred(
         Column(DateTime(timezone=True),
                default=func.now(),
                onupdate=func.now()))
Example #52
0
class CancelInvoice(Task, TaskCompute):
    """
        CancelInvoice model
        Could also be called negative invoice
    """
    __tablename__ = 'cancelinvoice'
    __table_args__ = default_table_args
    __mapper_args__ = {'polymorphic_identity': 'cancelinvoice'}
    id = Column(Integer,
                ForeignKey('task.id'),
                info={'colanderalchemy': forms.get_hidden_field_conf()},
                primary_key=True)

    invoice_id = Column(Integer,
                        ForeignKey('invoice.id'),
                        info={
                            'colanderalchemy': {
                                'title': u"Identifiant de la facture associée",
                                'missing': colander.required,
                            }
                        },
                        default=None)

    financial_year = Column(Integer,
                            info={
                                'colanderalchemy': {
                                    'title':
                                    u"Année fiscale de référence",
                                    'widget':
                                    deform.widget.TextInputWidget(mask='9999'),
                                }
                            },
                            default=0)
    exported = deferred(Column(
        Boolean(),
        info={'colanderalchemy': {
            "title": "A déjà été exportée ?",
        }},
        default=False),
                        group="edit")

    invoice = relationship("Invoice",
                           backref=backref("cancelinvoices",
                                           info={
                                               'colanderalchemy':
                                               forms.EXCLUDED,
                                           }),
                           primaryjoin="CancelInvoice.invoice_id==Invoice.id",
                           info={
                               'colanderalchemy': forms.EXCLUDED,
                           })

    state_manager = DEFAULT_ACTION_MANAGER['cancelinvoice']
    valid_states = ('valid', )

    _number_tmpl = u"{s.company.name} {s.date:%Y-%m} A{s.company_index}"

    _name_tmpl = u"Avoir {0}"

    def _get_project_index(self, project):
        """
        Return the index of the current object in the associated project
        :param obj project: A Project instance in which we will look to get the
        current doc index
        :returns: The next number
        :rtype: int
        """
        return project.get_next_invoice_index()

    def _get_company_index(self, company):
        """
        Return the index of the current object in the associated company
        :param obj company: A Company instance in which we will look to get the
        current doc index
        :returns: The next number
        :rtype: int
        """
        return company.get_next_invoice_index()

    def is_tolate(self):
        """
        Return False
        """
        return False

    def __repr__(self):
        return u"<CancelInvoice id:{s.id}>".format(s=self)

    def __json__(self, request):
        datas = Task.__json__(self, request)

        datas.update(
            dict(
                invoice_id=self.invoice_id,
                financial_year=self.financial_year,
                exported=self.exported,
            ))
        return datas
Example #53
0
class Board(Base, Stndrd, Age_times):

    __tablename__ = "boards"

    id = Column(Integer, primary_key=True)
    name = Column(String)
    created_utc = Column(Integer)
    description = Column(String)
    description_html = Column(String)
    over_18 = Column(Boolean, default=False)
    is_nsfl = Column(Boolean, default=False)
    is_banned = Column(Boolean, default=False)
    has_banner = Column(Boolean, default=False)
    has_profile = Column(Boolean, default=False)
    creator_id = Column(Integer, ForeignKey("users.id"))
    ban_reason = Column(String(256), default=None)
    color = Column(String(8), default="805ad5")
    restricted_posting = Column(Boolean, default=False)
    hide_banner_data = Column(Boolean, default=False)
    profile_nonce = Column(Integer, default=0)
    banner_nonce = Column(Integer, default=0)
    is_private = Column(Boolean, default=False)
    color_nonce = Column(Integer, default=0)
    rank_trending = Column(Float, default=0)
    stored_subscriber_count = Column(Integer, default=1)
    all_opt_out = Column(Boolean, default=False)

    moderators = relationship("ModRelationship")
    subscribers = relationship("Subscription", lazy="dynamic")
    submissions = relationship("Submission",
                               primaryjoin="Board.id==Submission.board_id")
    contributors = relationship("ContributorRelationship", lazy="dynamic")
    bans = relationship("BanRelationship", lazy="dynamic")
    postrels = relationship("PostRelationship", lazy="dynamic")
    trending_rank = deferred(Column(Float, server_default=FetchedValue()))

    #db side functions
    subscriber_count = deferred(Column(Integer, server_default=FetchedValue()))

    def __init__(self, **kwargs):

        kwargs["created_utc"] = int(time.time())

        super().__init__(**kwargs)

    def __repr__(self):
        return f"<Board(name={self.name})>"

    @property
    def fullname(self):
        return f"t4_{self.base36id}"

    @property
    def mods_list(self):

        z = [
            x for x in self.moderators
            if x.accepted and not (x.user.is_deleted or
                                   (x.user.is_banned and not x.user.unban_utc))
        ]

        z = sorted(z, key=lambda x: x.id)
        return z

    @property
    def mods(self):

        z = [x.user for x in self.moderators if x.accepted]

        z = sorted(z, key=lambda x: x.id)

        return z

    @property
    def invited_mods(self):

        z = [
            x.user for x in self.moderators
            if x.accepted == False and x.invite_rescinded == False
        ]
        z = sorted(z, key=lambda x: x.id)
        return z

    @property
    def mods_count(self):

        return len([
            x for x in self.moderators if x.accepted and not x.invite_rescinded
        ])

    @property
    def permalink(self):

        return f"/+{self.name}"

    def can_take(self, post):
        if self.is_banned:
            return False
        return not self.postrels.filter_by(post_id=post.id).first()

    @cache.memoize(timeout=60)
    def idlist(self,
               sort="hot",
               page=1,
               t=None,
               show_offensive=True,
               v=None,
               nsfw=False,
               **kwargs):

        posts = g.db.query(Submission.id).options(lazyload('*')).filter_by(
            is_banned=False,
            is_deleted=False,
            is_pinned=False,
            board_id=self.id)

        if not nsfw:
            posts = posts.filter_by(over_18=False)

        if v and v.hide_offensive:
            posts = posts.filter_by(is_offensive=False)

        if v and not v.show_nsfl:
            posts = posts.filter_by(is_nsfl=False)

        if self.is_private:
            if v and (self.can_view(v) or v.admin_level >= 4):
                pass
            elif v:
                posts = posts.filter(
                    or_(Submission.post_public == True,
                        Submission.author_id == v.id))
            else:
                posts = posts.filter_by(post_public=True)

        if v and not self.has_mod(v) and v.admin_level <= 3:
            #blocks
            blocking = g.db.query(
                UserBlock.target_id).filter_by(user_id=v.id).subquery()
            blocked = g.db.query(
                UserBlock.user_id).filter_by(target_id=v.id).subquery()

            posts = posts.filter(Submission.author_id.notin_(blocking),
                                 Submission.author_id.notin_(blocked))

        if t:
            now = int(time.time())
            if t == 'day':
                cutoff = now - 86400
            elif t == 'week':
                cutoff = now - 604800
            elif t == 'month':
                cutoff = now - 2592000
            elif t == 'year':
                cutoff = now - 31536000
            else:
                cutoff = 0
            posts = posts.filter(Submission.created_utc >= cutoff)

        if sort == "hot":
            posts = posts.order_by(Submission.score_best.desc())
        elif sort == "new":
            posts = posts.order_by(Submission.created_utc.desc())
        elif sort == "disputed":
            posts = posts.order_by(Submission.score_disputed.desc())
        elif sort == "top":
            posts = posts.order_by(Submission.score_top.desc())
        elif sort == "activity":
            posts = posts.order_by(Submission.score_activity.desc())
        else:
            abort(422)

        posts = [x[0] for x in posts.offset(25 * (page - 1)).limit(26).all()]

        return posts

    def has_mod(self, user):

        if user is None:
            return None

        if self.is_banned:
            return False

        for x in user.moderates:
            if x.board_id == self.id and x.accepted and not x.invite_rescinded:
                return x

        return False

    def can_invite_mod(self, user):

        return user.id not in [
            x.user_id for x in self.moderators if not x.invite_rescinded
        ]

    def has_rescinded_invite(self, user):

        return user.id in [
            x.user_id for x in self.moderators if x.invite_rescinded == True
        ]

    def has_invite(self, user):

        if user is None:
            return None

        for x in [
                i for i in self.moderators
                if not i.invite_rescinded and not i.accepted
        ]:

            if x.user_id == user.id:
                return x

        return None

    def has_ban(self, user):

        if user is None:
            return None

        return g.db.query(BanRelationship).filter_by(board_id=self.id,
                                                     user_id=user.id,
                                                     is_active=True).first()

    def has_subscriber(self, user):

        if not user:
            return False

        return self.id in [
            x.board_id for x in user.subscriptions.all() if x.is_active
        ]

    def has_contributor(self, user):

        if user is None:
            return False

        return g.db.query(ContributorRelationship).filter_by(
            user_id=user.id, board_id=self.id, is_active=True).first()

    def can_submit(self, user):

        if user is None:
            return False

        if user.admin_level >= 4:
            return True

        if self.has_ban(user):
            return False

        if self.has_contributor(user) or self.has_mod(user):
            return True

        if self.is_private or self.restricted_posting:
            return False

        return True

    def can_comment(self, user):

        if user is None:
            return False

        if user.admin_level >= 4:
            return True

        if self.has_ban(user):
            return False

        if self.has_contributor(user) or self.has_mod(user):
            return True

        if self.is_private:
            return False

        return True

    def can_view(self, user):

        if user is None:
            return False

        if user.admin_level >= 4:
            return True

        if self.has_contributor(user) or self.has_mod(user) or self.has_invite(
                user):
            return True

        if self.is_private:
            return False

    def set_profile(self, file):

        self.del_profile()
        self.profile_nonce += 1

        aws.upload_file(
            name=f"board/{self.name.lower()}/profile-{self.profile_nonce}.png",
            file=file,
            resize=(100, 100))
        self.has_profile = True
        g.db.add(self)

    def set_banner(self, file):

        self.del_banner()
        self.banner_nonce += 1

        aws.upload_file(
            name=f"board/{self.name.lower()}/banner-{self.banner_nonce}.png",
            file=file)

        self.has_banner = True
        g.db.add(self)

    def del_profile(self):

        aws.delete_file(
            name=f"board/{self.name.lower()}/profile-{self.profile_nonce}.png")
        self.has_profile = False
        g.db.add(self)

    def del_banner(self):

        aws.delete_file(
            name=f"board/{self.name.lower()}/banner-{self.banner_nonce}.png")
        self.has_banner = False
        g.db.add(self)

    @property
    def banner_url(self):

        if self.has_banner:
            return f"https://i.ruqqus.com/board/{self.name.lower()}/banner-{self.banner_nonce}.png"
        else:
            return "/assets/images/guilds/default-guild-banner.png"

    @property
    def profile_url(self):

        if self.has_profile:
            return f"https://i.ruqqus.com/board/{self.name.lower()}/profile-{self.profile_nonce}.png"
        else:
            if self.over_18:
                return "/assets/images/icons/nsfw_guild_icon.png"
            else:
                return "/assets/images/guilds/default-guild-icon.png"

    @property
    def css_url(self):
        return f"/assets/{self.name}/main/{self.color_nonce}.css"

    @property
    def css_dark_url(self):
        return f"/assets/{self.name}/dark/{self.color_nonce}.css"

    def has_participant(self, user):
        return (g.db.query(Submission).filter_by(original_board_id=self.id,
                                                 author_id=user.id).first()
                or g.db.query(Comment).filter_by(
                    author_id=user.id, original_board_id=self.id).first())

    @property
    @lazy
    def n_pins(self):
        return g.db.query(Submission).filter_by(board_id=self.id,
                                                is_pinned=True).count()

    @property
    def can_pin_another(self):

        return self.n_pins < 4

    @property
    def json(self):

        if self.is_banned:
            return {
                'name': self.name,
                'permalink': self.permalink,
                'is_banned': True,
                'ban_reason': self.ban_reason,
                'id': self.base36id
            }
        return {
            'name': self.name,
            'profile_url': self.profile_url,
            'banner_url': self.banner_url,
            'created_utc': self.created_utc,
            'mods_count': self.mods_count,
            'subscriber_count': self.subscriber_count,
            'permalink': self.permalink,
            'description': self.description,
            'description_html': self.description_html,
            'over_18': self.over_18,
            'is_banned': False,
            'is_private': self.is_private,
            'is_restricted': self.restricted_posting,
            'id': self.base36id,
            'fullname': self.fullname,
            'banner_url': self.banner_url,
            'profile_url': self.profile_url,
            'color': "#" + self.color
        }

    @property
    def show_settings_icons(self):
        return self.is_private or self.restricted_posting or self.over_18 or self.all_opt_out

    @cache.memoize(600)
    def comment_idlist(self, page=1, v=None, nsfw=False, **kwargs):

        posts = g.db.query(Submission).options(
            lazyload('*')).filter_by(board_id=self.id)

        if not nsfw:
            posts = posts.filter_by(over_18=False)

        if v and not v.show_nsfl:
            posts = posts.filter_by(is_nsfl=False)

        if self.is_private:
            if v and (self.can_view(v) or v.admin_level >= 4):
                pass
            elif v:
                posts = posts.filter(
                    or_(Submission.post_public == True,
                        Submission.author_id == v.id))
            else:
                posts = posts.filter_by(post_public=True)

        posts = posts.subquery()

        comments = g.db.query(Comment).options(lazyload('*'))

        if v and v.hide_offensive:
            comments = comments.filter_by(is_offensive=False)

        if v and not self.has_mod(v) and v.admin_level <= 3:
            #blocks
            blocking = g.db.query(
                UserBlock.target_id).filter_by(user_id=v.id).subquery()
            blocked = g.db.query(
                UserBlock.user_id).filter_by(target_id=v.id).subquery()

            comments = comments.filter(Comment.author_id.notin_(blocking),
                                       Comment.author_id.notin_(blocked))

        if not v or not v.admin_level >= 3:
            comments = comments.filter_by(is_deleted=False, is_banned=False)

        comments = comments.join(posts,
                                 Comment.parent_submission == posts.c.id)

        comments = comments.order_by(Comment.created_utc.desc()).offset(
            25 * (page - 1)).limit(26).all()

        return [x.id for x in comments]
Example #54
0
class Invoice(Task, InvoiceCompute):
    """
        Invoice Model
    """
    __tablename__ = 'invoice'
    __table_args__ = default_table_args
    __mapper_args__ = {
        'polymorphic_identity': 'invoice',
    }
    id = Column(
        ForeignKey('task.id'),
        primary_key=True,
        info={
            'colanderalchemy': {
                'widget': deform.widget.HiddenWidget(),
                'missing': colander.drop,
            }
        },
    )
    paid_status = Column(
        String(10),
        default='waiting',
        info={
            'colanderalchemy': {
                'widget': deform.widget.SelectWidget(values=INVOICE_STATES),
                'title': u'Statut de la facture',
                "validator": colander.OneOf(dict(INVOICE_STATES).keys()),
            }
        })

    # seems it's not used anymore
    deposit = deferred(
        Column(Integer,
               nullable=False,
               info={'colanderalchemy': {
                   'exclude': True
               }},
               default=0),
        group='edit',
    )
    # Common with only estimations
    course = deferred(Column(
        Integer,
        info={'colanderalchemy': {
            'title': u"Concerne une formation"
        }},
        nullable=False,
        default=0),
                      group='edit')
    # Common with only cancelinvoices
    financial_year = Column(Integer,
                            info={
                                'colanderalchemy': {
                                    'title':
                                    u"Année fiscale de référence",
                                    'widget':
                                    deform.widget.TextInputWidget(mask='9999'),
                                }
                            },
                            default=0)
    exported = deferred(Column(
        Boolean(),
        info={'colanderalchemy': {
            'title': u"A déjà été exportée ?"
        }},
        default=False),
                        group="edit")

    estimation_id = Column(
        ForeignKey('estimation.id'),
        info={'colanderalchemy': {
            'missing': colander.drop
        }},
    )

    estimation = relationship(
        "Estimation",
        primaryjoin="Invoice.estimation_id==Estimation.id",
        info={
            'colanderalchemy': forms.EXCLUDED,
            'export': {
                'exclude': True
            },
        },
    )
    state_manager = DEFAULT_ACTION_MANAGER['invoice']

    paid_states = ('resulted', )
    not_paid_states = (
        'valid',
        'paid',
    )
    valid_states = paid_states + not_paid_states

    _number_tmpl = u"{s.company.name} {s.date:%Y-%m} F{s.company_index}"

    _name_tmpl = u"Facture {0}"

    _deposit_name_tmpl = u"Facture d'acompte {0}"

    _sold_name_tmpl = u"Facture de solde {0}"

    def _get_project_index(self, project):
        """
        Return the index of the current object in the associated project
        :param obj project: A Project instance in which we will look to get the
        current doc index
        :returns: The next number
        :rtype: int
        """
        return project.get_next_invoice_index()

    def _get_company_index(self, company):
        """
        Return the index of the current object in the associated company
        :param obj company: A Company instance in which we will look to get the
        current doc index
        :returns: The next number
        :rtype: int
        """
        return company.get_next_invoice_index()

    def set_deposit_label(self):
        self.name = self._deposit_name_tmpl.format(self.project_index)

    def set_sold_label(self):
        self.name = self._sold_name_tmpl.format(self.project_index)

    def set_project(self, project):
        self.project = project

    def gen_cancelinvoice(self, user):
        """
            Return a cancel invoice with self's informations
        """
        cancelinvoice = CancelInvoice(
            company=self.company,
            project=self.project,
            customer=self.customer,
            phase=self.phase,
            user=user,
        )
        cancelinvoice.address = self.address
        cancelinvoice.workplace = self.workplace
        cancelinvoice.description = self.description

        cancelinvoice.invoice = self
        cancelinvoice.expenses_ht = -1 * self.expenses_ht
        cancelinvoice.financial_year = self.financial_year
        cancelinvoice.prefix = self.prefix
        cancelinvoice.display_units = self.display_units

        cancelinvoice.line_groups = []
        for group in self.line_groups:
            cancelinvoice.line_groups.append(group.gen_cancelinvoice_group())
        order = self.get_next_row_index()

        for discount in self.discounts:
            discount_line = TaskLine(
                cost=discount.amount,
                tva=discount.tva,
                quantity=1,
                description=discount.description,
                order=order,
                unity='',
            )
            discount_line.product_id = Product.first_by_tva_value(discount.tva)
            order += 1
            cancelinvoice.default_line_group.lines.append(discount_line)

        for index, payment in enumerate(self.payments):
            paid_line = TaskLine(
                cost=math_utils.reverse_tva(
                    payment.amount,
                    payment.tva.value,
                    False,
                ),
                tva=payment.tva.value,
                quantity=1,
                description=u"Paiement {0}".format(index + 1),
                order=order,
                unity='NONE',
            )
            paid_line.product_id = Product.first_by_tva_value(
                payment.tva.value)
            order += 1
            cancelinvoice.default_line_group.lines.append(paid_line)
        cancelinvoice.mentions = self.mentions
        cancelinvoice.payment_conditions = u"Réglé"
        return cancelinvoice

    def get_next_row_index(self):
        return len(self.default_line_group.lines) + 1

    def record_payment(self, **kw):
        """
        Record a payment for the current invoice
        """
        resulted = kw.pop('resulted', False)
        if kw['amount'] > 0:
            payment = Payment()
            for key, value in kw.iteritems():
                setattr(payment, key, value)
            log.info(u"Amount : {0}".format(payment.amount))
            self.payments.append(payment)

        return self.check_resulted(
            force_resulted=resulted,
            user_id=kw['user_id'],
        )

    def check_resulted(self, force_resulted=False, user_id=None):
        """
        Check if the invoice is resulted or not and set the appropriate status
        """
        log.debug(u"-> There still to pay : %s" % self.topay())
        if self.topay() <= 0 or force_resulted:
            self.paid_status = 'resulted'

        elif len(self.payments) > 0 or self.cancelinvoice_amount() > 0:
            self.paid_status = 'paid'

        else:
            self.paid_status = 'waiting'

        if user_id is not None:
            status_record = TaskStatus(status_code=self.paid_status,
                                       status_person_id=user_id,
                                       status_comment='')
            self.statuses.append(status_record)
        return self

    def duplicate(self, user, project, phase, customer):
        """
            Duplicate the current invoice
        """
        invoice = Invoice(
            self.company,
            customer,
            project,
            phase,
            user,
        )

        if customer.id == self.customer_id:
            invoice.address = self.address
        else:
            invoice.address = customer.full_address

        invoice.workplace = self.workplace

        invoice.description = self.description

        invoice.payment_conditions = self.payment_conditions
        invoice.deposit = self.deposit
        invoice.course = self.course
        invoice.display_units = self.display_units
        invoice.expenses_ht = self.expenses_ht
        invoice.financial_year = datetime.date.today().year

        invoice.line_groups = []
        for group in self.line_groups:
            invoice.line_groups.append(group.duplicate())

        for line in self.discounts:
            invoice.discounts.append(line.duplicate())

        invoice.mentions = self.mentions
        return invoice

    def __repr__(self):
        return u"<Invoice id:{s.id}>".format(s=self)

    def __json__(self, request):
        datas = Task.__json__(self, request)

        datas.update(
            dict(
                deposit=self.deposit,
                course=self.course,
                financial_year=self.financial_year,
                exported=self.exported,
                estimation_id=self.estimation_id,
            ))
        return datas

    def is_tolate(self):
        """
            Return True if a payment is expected since more than
            45 days
        """
        res = False
        if self.paid_status in ('waiting', 'paid'):
            today = datetime.date.today()
            elapsed = today - self.date
            if elapsed > datetime.timedelta(days=45):
                res = True
            else:
                res = False
        return res
Example #55
0
class Localization(Base):
    """Localization information, including the localization ID, event ID, right
    ascension, declination, error radius (if applicable), and the healpix
    map."""

    update = delete = AccessibleIfUserMatches('sent_by')

    sent_by_id = sa.Column(
        sa.ForeignKey('users.id', ondelete='CASCADE'),
        nullable=False,
        index=True,
        doc="The ID of the User who created this Localization.",
    )

    sent_by = relationship(
        "User",
        foreign_keys=sent_by_id,
        back_populates="localizations",
        doc="The user that saved this Localization",
    )

    nside = 512
    # HEALPix resolution used for flat (non-multiresolution) operations.

    dateobs = sa.Column(
        sa.ForeignKey('gcnevents.dateobs', ondelete="CASCADE"),
        nullable=False,
        index=True,
        doc='UTC event timestamp',
    )

    localization_name = sa.Column(sa.String, doc='Localization name', index=True)

    uniq = deferred(
        sa.Column(
            sa.ARRAY(sa.BigInteger),
            nullable=False,
            doc='Multiresolution HEALPix UNIQ pixel index array',
        )
    )

    probdensity = deferred(
        sa.Column(
            sa.ARRAY(sa.Float),
            nullable=False,
            doc='Multiresolution HEALPix probability density array',
        )
    )

    distmu = deferred(
        sa.Column(sa.ARRAY(sa.Float), doc='Multiresolution HEALPix distance mu array')
    )

    distsigma = deferred(
        sa.Column(
            sa.ARRAY(sa.Float), doc='Multiresolution HEALPix distance sigma array'
        )
    )

    distnorm = deferred(
        sa.Column(
            sa.ARRAY(sa.Float),
            doc='Multiresolution HEALPix distance normalization array',
        )
    )

    contour = deferred(sa.Column(JSONB, doc='GeoJSON contours'))

    @hybrid_property
    def is_3d(self):
        return (
            self.distmu is not None
            and self.distsigma is not None
            and self.distnorm is not None
        )

    @is_3d.expression
    def is_3d(cls):
        return sa.and_(
            cls.distmu.isnot(None),
            cls.distsigma.isnot(None),
            cls.distnorm.isnot(None),
        )

    @property
    def table_2d(self):
        """Get multiresolution HEALPix dataset, probability density only."""
        return Table(
            [np.asarray(self.uniq, dtype=np.int64), self.probdensity],
            names=['UNIQ', 'PROBDENSITY'],
        )

    @property
    def table(self):
        """Get multiresolution HEALPix dataset, probability density and
        distance."""
        if self.is_3d:
            return Table(
                [
                    np.asarray(self.uniq, dtype=np.int64),
                    self.probdensity,
                    self.distmu,
                    self.distsigma,
                    self.distnorm,
                ],
                names=['UNIQ', 'PROBDENSITY', 'DISTMU', 'DISTSIGMA', 'DISTNORM'],
            )
        else:
            return self.table_2d

    @property
    def flat_2d(self):
        """Get flat resolution HEALPix dataset, probability density only."""
        order = healpy.nside2order(Localization.nside)
        result = ligo_bayestar.rasterize(self.table_2d, order)['PROB']
        return healpy.reorder(result, 'NESTED', 'RING')

    @property
    def flat(self):
        """Get flat resolution HEALPix dataset, probability density and
        distance."""
        if self.is_3d:
            order = healpy.nside2order(Localization.nside)
            t = ligo_bayestar.rasterize(self.table, order)
            result = t['PROB'], t['DISTMU'], t['DISTSIGMA'], t['DISTNORM']
            return healpy.reorder(result, 'NESTED', 'RING')
        else:
            return (self.flat_2d,)
Example #56
0
class LinterInstance(Base):
    """Describes the connection between a :class:`assignment.AssignmentLinter`
    and a :class:`work_models.Work`.
    """
    if t.TYPE_CHECKING:  # pragma: no cover
        query = Base.query  # type: t.ClassVar[_MyQuery['LinterInstance']]
    __tablename__ = 'LinterInstance'
    id: str = db.Column(
        'id', db.String(UUID_LENGTH), nullable=False, primary_key=True
    )
    state: LinterState = db.Column(
        'state',
        db.Enum(LinterState),
        default=LinterState.running,
        nullable=False
    )
    work_id: int = db.Column(
        'Work_id', db.Integer, db.ForeignKey('Work.id', ondelete='CASCADE')
    )
    tester_id: str = db.Column(
        'tester_id', db.Unicode, db.ForeignKey('AssignmentLinter.id')
    )
    stdout: t.Optional[str] = orm.deferred(
        db.Column('stdout', db.Unicode, nullable=True)
    )
    stderr: t.Optional[str] = orm.deferred(
        db.Column('stderr', db.Unicode, nullable=True)
    )
    _error_summary: t.Optional[str] = db.Column(
        'error_summary', db.Unicode, nullable=True
    )

    tester: 'assignment.AssignmentLinter' = db.relationship(
        "AssignmentLinter", back_populates="tests"
    )
    work: 'work_models.Work' = db.relationship('Work', foreign_keys=work_id)

    comments: LinterComment = db.relationship(
        "LinterComment", back_populates="linter", cascade='all,delete'
    )

    @property
    def error_summary(self) -> str:
        """The summary of the error the linter encountered.

        :returns: A summary of the error the linter encountered. This will
            probably be empty when the state is not ``crashed``.
        """
        if self._error_summary:
            return self._error_summary
        elif self.state == LinterState.crashed:
            return 'The linter crashed for some unknown reason.'
        else:
            return ''

    @error_summary.setter
    def error_summary(self, new_value: str) -> None:
        self._error_summary = new_value

    def __init__(
        self, work: 'work_models.Work', tester: 'assignment.AssignmentLinter'
    ) -> None:
        super().__init__(work=work, tester=tester)

        # Find a unique id
        new_id = str(uuid.uuid4())
        while db.session.query(
            LinterInstance.query.filter(LinterInstance.id == new_id).exists()
        ).scalar():  # pragma: no cover
            new_id = str(uuid.uuid4())

        self.id = new_id

    def __extended_to_json__(self) -> t.Mapping[str, object]:
        """Creates an extended JSON serializable representation of this linter
        instance.

        This object will look like this:

        .. code:: python

            {
                'stdout': str, # The stdout produced by the linter.
                'stderr': str, # The stderr produced by the linter.
                **self.__to_json__(), # Other keys are the same as the normal
                                      # json serialization.
            }

        :returns: An object as described above.
        """
        return {
            **self.__to_json__(),
            'stdout': self.stdout,
            'stderr': self.stderr,
        }

    def __to_json__(self) -> t.Mapping[str, object]:
        """Creates a JSON serializable representation of this linter instance.

        This object will look like this:

        .. code:: python

            {
                'id': str, # The id of this linter instance.
                'state': str, # The state of this linter instance.
                'work': Work, # The submission this instance has linted.
                'error_summary': str, # The summary of the error this linter
                                      # has encountered. This will be an empty
                                      # string if no error has occurred.
            }

        :returns: An object as described above.
        """
        return {
            'id': self.id,
            'state': self.state.name,
            'work': self.work,
            'error_summary': self.error_summary,
        }

    def add_comments(
        self,
        feedbacks: t.Mapping[int, t.Mapping[int, t.Sequence[t.
                                                            Tuple[str, str]]]],
    ) -> t.Iterable[LinterComment]:
        """Add comments written by this instance.

        :param feedbacks: The feedback to add, it should be in form as
            described below.
        :returns: A iterable with comments that have not been added or commited
            to the database yet.

        .. code:: python

            {
                file_id: {
                    line_number: [(linter_code, msg), ...]
                }
            }
        """
        for file_id, feedback in feedbacks.items():
            for line_number, msgs in feedback.items():
                for linter_code, msg in msgs:
                    yield LinterComment(
                        file_id=file_id,
                        line=line_number,
                        linter_code=linter_code,
                        linter_id=self.id,
                        comment=msg,
                    )
Example #57
0
class Device(Object):

    __tablename__ = class_type = "device"
    __mapper_args__ = {"polymorphic_identity": "device"}
    parent_type = "object"
    id = db.Column(Integer, ForeignKey(Object.id), primary_key=True)
    name = db.Column(db.SmallString, unique=True)
    icon = db.Column(db.SmallString, default="router")
    operating_system = db.Column(db.SmallString)
    os_version = db.Column(db.SmallString)
    ip_address = db.Column(db.SmallString)
    longitude = db.Column(db.SmallString, default="0.0")
    latitude = db.Column(db.SmallString, default="0.0")
    port = db.Column(Integer, default=22)
    username = db.Column(db.SmallString)
    password = db.Column(db.SmallString)
    enable_password = db.Column(db.SmallString)
    netmiko_driver = db.Column(db.SmallString, default="cisco_ios")
    napalm_driver = db.Column(db.SmallString, default="ios")
    configuration = deferred(
        db.Column(db.LargeString, info={"dont_track_changes": True}))
    operational_data = deferred(
        db.Column(db.LargeString, info={"dont_track_changes": True}))
    last_failure = db.Column(db.SmallString, default="Never")
    last_status = db.Column(db.SmallString, default="Never")
    last_update = db.Column(db.SmallString, default="Never")
    last_runtime = db.Column(db.SmallString)
    last_duration = db.Column(db.SmallString)
    services = relationship("Service",
                            secondary=db.service_device_table,
                            back_populates="devices")
    runs = relationship(
        "Run",
        secondary=db.run_device_table,
        back_populates="devices",
        cascade="all,delete",
    )
    tasks = relationship("Task",
                         secondary=db.task_device_table,
                         back_populates="devices")
    users = relationship("User",
                         secondary=db.user_device_table,
                         back_populates="devices")
    pools = relationship("Pool",
                         secondary=db.pool_device_table,
                         back_populates="devices")
    sessions = relationship("Session",
                            back_populates="device",
                            cascade="all, delete-orphan")

    def table_properties(self, **kwargs):
        columns = [c["data"] for c in kwargs["columns"]]
        rest_api_request = kwargs.get("rest_api_request")
        include_properties = columns if rest_api_request else None
        properties = super().get_properties(include=include_properties)
        context = int(kwargs["form"].get("context-lines", 0))
        for property in ("configuration", "operational_data"):
            if rest_api_request:
                if property in columns:
                    properties[property] = getattr(self, property)
                if f"{property}_matches" not in columns:
                    continue
            data = kwargs["form"].get(property)
            regex_match = kwargs["form"].get(f"{property}_filter") == "regex"
            if not data:
                properties[property] = ""
            else:
                result = []
                content, visited = getattr(self, property).splitlines(), set()
                for (index, line) in enumerate(content):
                    match_lines, merge = [], index - context - 1 in visited
                    if (not search(data, line) if regex_match else
                            data.lower() not in line.lower()):
                        continue
                    for i in range(-context, context + 1):
                        if index + i < 0 or index + i > len(content) - 1:
                            continue
                        if index + i in visited:
                            merge = True
                            continue
                        visited.add(index + i)
                        line = content[index + i].strip()
                        if rest_api_request:
                            match_lines.append(f"L{index + i + 1}: {line}")
                            continue
                        line = sub(f"(?i){data}", r"<mark>\g<0></mark>", line)
                        match_lines.append(f"<b>L{index + i + 1}:</b> {line}")
                    if rest_api_request:
                        result.extend(match_lines)
                    else:
                        if merge:
                            result[-1] += f"<br>{'<br>'.join(match_lines)}"
                        else:
                            result.append("<br>".join(match_lines))
                if rest_api_request:
                    properties[f"{property}_matches"] = result
                else:
                    properties[property] = "".join(
                        f"<pre style='text-align: left'>{match}</pre>"
                        for match in result)
        return properties

    @property
    def view_properties(self):
        return {
            property: getattr(self, property)
            for property in (
                "id",
                "type",
                "name",
                "icon",
                "latitude",
                "longitude",
                "last_runtime",
            )
        }

    @property
    def ui_name(self):
        return f"{self.name} ({self.model})" if self.model else self.name

    def __repr__(self):
        return f"{self.name} ({self.model})" if self.model else self.name
Example #58
0
class Person(Base):
    __tablename__ = 'person'

    id = Column(Integer, primary_key=True)

    ### Fields ###
    name = Column(Unicode(20), nullable=False, index=True)
    name_en = Column(String(80), index=True)
    name_cn = Column(Unicode(20), index=True)

    gender = Column(Enum('m', 'f', name='enum_gender'), index=True)

    birthday = Column(CHAR(8), index=True)

    education = deferred(Column(ARRAY(Unicode(60))), group='profile')
    education_id = deferred(Column(ARRAY(String(20))), group='profile')

    address = deferred(Column(ARRAY(Unicode(20))), group='profile')
    address_id = deferred(Column(ARRAY(String(16))), group='profile')

    image = Column(String(1024))
    email = deferred(Column(Text), group='extra')
    twitter = deferred(Column(String(20)), group='extra')
    facebook = deferred(Column(String(80)), group='extra')
    blog = deferred(Column(String(255)), group='extra')
    homepage = deferred(Column(String(255)), group='extra')
    wiki = deferred(Column(Text), group='extra')
    extra_vars = deferred(Column(Text), group='extra')

    ### Relations ###
    candidacies = relationship('Candidacy',
                               order_by='desc(Candidacy.assembly_id)',
                               backref='person')
    bills_ = relationship('Bill',
                          secondary=cosponsorship,
                          order_by='desc(Bill.proposed_date)',
                          backref='cosponsors')
    withdrawed_bills = relationship('Bill',
                                    secondary=bill_withdrawal,
                                    backref='withdrawers')
    parties = relationship('Party',
                           secondary=PartyAffiliation.__table__,
                           order_by='desc(PartyAffiliation.date)',
                           backref=backref('members', lazy='dynamic'),
                           lazy='dynamic')

    @hybrid_property
    def birthday_year(self):
        return int(self.birthday[:4])

    @birthday_year.expression
    def birthday_year(cls):
        return func.substr(cls.birthday, 1, 4)

    @property
    def birthday_month(self):
        return int(self.birthday[4:6]) or 1

    @property
    def birthday_day(self):
        return int(self.birthday[6:8]) or 1

    @property
    def birthday_date(self):
        return date(self.birthday_year, self.birthday_month, self.birthday_day)

    @property
    def birthday_formatted(self):
        return format_date(self.birthday_date)

    @property
    def cur_party(self):
        return self.parties.first()
Example #59
0
class Group(DatabaseModel):
    """Model for a group on the site.

    Trigger behavior:
      Incoming:
        - num_subscriptions will be incremented and decremented by insertions and
          deletions in group_subscriptions.
    """

    schema_class = GroupSchema

    __tablename__ = "groups"

    group_id: int = Column(Integer, primary_key=True)
    path: Ltree = Column(LtreeType, nullable=False, index=True, unique=True)
    created_time: datetime = Column(
        TIMESTAMP(timezone=True),
        nullable=False,
        index=True,
        server_default=text("NOW()"),
    )
    short_description: Optional[str] = Column(
        Text,
        CheckConstraint(
            f"LENGTH(short_description) <= {SHORT_DESCRIPTION_MAX_LENGTH}",
            name="short_description_length",
        ),
    )
    _sidebar_markdown: str = deferred(Column("sidebar_markdown", Text))
    sidebar_rendered_html: str = deferred(Column(Text))
    num_subscriptions: int = Column(Integer,
                                    nullable=False,
                                    server_default="0")
    is_admin_posting_only: bool = Column(Boolean,
                                         nullable=False,
                                         server_default="false")
    is_user_treated_as_topic_source: bool = Column(Boolean,
                                                   nullable=False,
                                                   server_default="false")
    common_topic_tags: List[str] = Column(TagList,
                                          nullable=False,
                                          server_default="{}")
    important_topic_tags: List[str] = Column(TagList,
                                             nullable=False,
                                             server_default="{}")

    # Create a GiST index on path as well as the btree one that will be created by the
    # index=True/unique=True keyword args to Column above. The GiST index supports
    # additional operators for ltree queries: @>, <@, @, ~, ?
    __table_args__ = (Index("ix_groups_path_gist",
                            path,
                            postgresql_using="gist"), )

    @hybrid_property
    def sidebar_markdown(self) -> str:
        """Return the sidebar's markdown."""
        return self._sidebar_markdown

    @sidebar_markdown.setter  # type: ignore
    def sidebar_markdown(self, new_markdown: str) -> None:
        """Set the sidebar's markdown and render its HTML."""
        if new_markdown == self.sidebar_markdown:
            return

        self._sidebar_markdown = new_markdown

        if self._sidebar_markdown is not None:
            self.sidebar_rendered_html = convert_markdown_to_safe_html(
                new_markdown)
        else:
            self.sidebar_rendered_html = None

    def __repr__(self) -> str:
        """Display the group's path and ID as its repr format."""
        return f"<Group {self.path} ({self.group_id})>"

    def __str__(self) -> str:
        """Use the group path for the string representation."""
        return str(self.path)

    def __lt__(self, other: "Group") -> bool:
        """Order groups by their string representation."""
        return str(self) < str(other)

    def __init__(self, path: str, short_desc: Optional[str] = None):
        """Create a new group."""
        self.path = path
        self.short_description = short_desc

    def __acl__(self) -> Sequence[Tuple[str, Any, str]]:
        """Pyramid security ACL."""
        acl = []

        # view:
        #  - all groups can be viewed by everyone
        acl.append((Allow, Everyone, "view"))

        # subscribe:
        #  - all groups can be subscribed to by logged-in users
        acl.append((Allow, Authenticated, "subscribe"))

        # post_topic:
        #  - only admins can post in admin-posting-only groups
        #  - otherwise, all logged-in users can post
        if self.is_admin_posting_only:
            acl.append((Allow, "admin", "post_topic"))
            acl.append((Deny, Everyone, "post_topic"))

        acl.append((Allow, Authenticated, "post_topic"))

        # wiki_page_create
        #  - permission must be granted specifically
        acl.append((Allow, "admin", "wiki_page_create"))
        acl.append((Allow, "wiki", "wiki_page_create"))

        acl.append(DENY_ALL)

        return acl

    def is_subgroup_of(self, other: "Group") -> bool:
        """Return whether this group is a sub-group of the other one."""
        # descendant_of() returns True if the ltrees are equal, so avoid that
        if self.path == other.path:
            return False

        return self.path.descendant_of(other.path)
Example #60
0
class JobPost(BaseMixin, db.Model):
    __tablename__ = 'jobpost'

    # Metadata
    user_id = db.Column(None,
                        db.ForeignKey('user.id'),
                        nullable=True,
                        index=True)
    user = db.relationship(User,
                           primaryjoin=user_id == User.id,
                           backref=db.backref('jobposts', lazy='dynamic'))

    hashid = db.Column(db.String(5), nullable=False, unique=True)
    datetime = db.Column(db.DateTime,
                         default=db.func.utcnow(),
                         nullable=False,
                         index=True)  # Published
    closed_datetime = db.Column(db.DateTime,
                                nullable=True)  # If withdrawn or rejected
    # Pinned on the home page. Boards use the BoardJobPost.pinned column
    sticky = db.Column(db.Boolean, nullable=False, default=False)
    pinned = db.synonym('sticky')

    # Job description
    headline = db.Column(db.Unicode(100), nullable=False)
    headlineb = db.Column(db.Unicode(100), nullable=True)
    type_id = db.Column(None, db.ForeignKey('jobtype.id'), nullable=False)
    type = db.relation(JobType, primaryjoin=type_id == JobType.id)
    category_id = db.Column(None,
                            db.ForeignKey('jobcategory.id'),
                            nullable=False)
    category = db.relation(JobCategory,
                           primaryjoin=category_id == JobCategory.id)
    location = db.Column(db.Unicode(80), nullable=False)
    parsed_location = db.Column(JsonDict)
    # remote_location tracks whether the job is work-from-home/work-from-anywhere
    remote_location = db.Column(db.Boolean, default=False, nullable=False)
    relocation_assist = db.Column(db.Boolean, default=False, nullable=False)
    description = db.Column(db.UnicodeText, nullable=False)
    perks = db.Column(db.UnicodeText, nullable=False)
    how_to_apply = db.Column(db.UnicodeText, nullable=False)
    hr_contact = db.Column(db.Boolean, nullable=True)

    # Compensation details
    pay_type = db.Column(db.SmallInteger,
                         nullable=True)  # Value in models.PAY_TYPE
    pay_currency = db.Column(db.CHAR(3), nullable=True)
    pay_cash_min = db.Column(db.Integer, nullable=True)
    pay_cash_max = db.Column(db.Integer, nullable=True)
    pay_equity_min = db.Column(db.Numeric, nullable=True)
    pay_equity_max = db.Column(db.Numeric, nullable=True)

    # Company details
    company_name = db.Column(db.Unicode(80), nullable=False)
    company_logo = db.Column(db.Unicode(255), nullable=True)
    company_url = db.Column(db.Unicode(255), nullable=False, default=u'')
    twitter = db.Column(db.Unicode(15), nullable=True)
    fullname = db.Column(
        db.Unicode(80),
        nullable=True)  # Deprecated field, used before user_id was introduced
    email = db.Column(db.Unicode(80), nullable=False)
    email_domain = db.Column(db.Unicode(80), nullable=False, index=True)
    domain_id = db.Column(None, db.ForeignKey('domain.id'), nullable=False)
    md5sum = db.Column(db.String(32), nullable=False, index=True)

    # Payment, audit and workflow fields
    words = db.Column(
        db.UnicodeText,
        nullable=True)  # All words in description, perks and how_to_apply
    promocode = db.Column(db.String(40), nullable=True)
    status = db.Column(db.Integer, nullable=False, default=POSTSTATUS.DRAFT)
    ipaddr = db.Column(db.String(45), nullable=False)
    useragent = db.Column(db.Unicode(250), nullable=True)
    edit_key = db.Column(db.String(40),
                         nullable=False,
                         default=random_long_key)
    email_verify_key = db.Column(db.String(40),
                                 nullable=False,
                                 default=random_long_key)
    email_sent = db.Column(db.Boolean, nullable=False, default=False)
    email_verified = db.Column(db.Boolean, nullable=False, default=False)
    payment_value = db.Column(db.Integer, nullable=False, default=0)
    payment_received = db.Column(db.Boolean, nullable=False, default=False)
    reviewer_id = db.Column(None,
                            db.ForeignKey('user.id'),
                            nullable=True,
                            index=True)
    reviewer = db.relationship(User,
                               primaryjoin=reviewer_id == User.id,
                               backref="reviewed_posts")
    review_datetime = db.Column(db.DateTime, nullable=True)
    review_comments = db.Column(db.Unicode(250), nullable=True)

    search_vector = deferred(db.Column(TSVECTOR, nullable=True))

    # Metadata for classification
    language = db.Column(db.CHAR(2), nullable=True)
    language_confidence = db.Column(db.Float, nullable=True)

    admins = db.relationship(User,
                             lazy='dynamic',
                             secondary=lambda: jobpost_admin_table,
                             backref=db.backref('admin_of', lazy='dynamic'))
    starred_by = db.relationship(User,
                                 lazy='dynamic',
                                 secondary=starred_job_table,
                                 backref=db.backref('starred_jobs',
                                                    lazy='dynamic'))
    #: Quick lookup locations this post is referring to
    geonameids = association_proxy('locations',
                                   'geonameid',
                                   creator=lambda l: JobLocation(geonameid=l))

    _defercols = [
        defer('user_id'),
        defer('closed_datetime'),
        defer('parsed_location'),
        defer('relocation_assist'),
        defer('description'),
        defer('perks'),
        defer('how_to_apply'),
        defer('hr_contact'),
        defer('company_logo'),
        defer('company_url'),
        defer('fullname'),
        defer('email'),
        defer('words'),
        defer('promocode'),
        defer('ipaddr'),
        defer('useragent'),
        defer('edit_key'),
        defer('email_verify_key'),
        defer('email_sent'),
        defer('email_verified'),
        defer('payment_value'),
        defer('payment_received'),
        defer('reviewer_id'),
        defer('review_datetime'),
        defer('review_comments'),
        defer('language'),
        defer('language_confidence'),

        # These are defined below JobApplication
        defer('new_applications'),
        defer('replied_applications'),
        defer('viewcounts_impressions'),
        defer('viewcounts_viewed'),
        defer('viewcounts_opened'),
        defer('viewcounts_applied'),

        # defer('pay_type'),
        # defer('pay_currency'),
        # defer('pay_cash_min'),
        # defer('pay_cash_max'),
        # defer('pay_equity_min'),
        # defer('pay_equity_max'),
    ]

    @classmethod
    def get(cls, hashid):
        return cls.query.filter_by(hashid=hashid).one_or_none()

    @classmethod
    def fetch(cls, hashid):
        """Returns a SQLAlchemy query object for JobPost"""
        return cls.query.filter_by(hashid=hashid).options(
            load_only("id", "headline", "headlineb", "hashid", "datetime",
                      "status", "email_domain", "review_comments",
                      "company_url"))

    def __repr__(self):
        return u'<JobPost {hashid} "{headline}">'.format(
            hashid=self.hashid, headline=self.headline)

    def admin_is(self, user):
        if user is None:
            return False
        return user == self.user or bool(
            self.admins.options(
                db.load_only('id')).filter_by(id=user.id).count())

    @property
    def expiry_date(self):
        return self.datetime + agelimit

    @property
    def after_expiry_date(self):
        return self.expiry_date + timedelta(days=1)

    def status_label(self):
        if self.status == POSTSTATUS.DRAFT:
            return _("Draft")
        elif self.status == POSTSTATUS.PENDING:
            return _("Pending")
        elif self.is_new():
            return _("New!")

    def is_draft(self):
        return self.status == POSTSTATUS.DRAFT

    def is_pending(self):
        return self.status == POSTSTATUS.PENDING

    def is_unpublished(self):
        return self.status in POSTSTATUS.UNPUBLISHED

    def is_listed(self):
        now = datetime.utcnow()
        return (self.status
                in POSTSTATUS.LISTED) and (self.datetime > now - agelimit)

    def is_public(self):
        return self.status in POSTSTATUS.LISTED

    def is_flagged(self):
        return self.status == POSTSTATUS.FLAGGED

    def is_moderated(self):
        return self.status == POSTSTATUS.MODERATED

    def is_announcement(self):
        return self.status == POSTSTATUS.ANNOUNCEMENT

    def is_new(self):
        return self.datetime >= datetime.utcnow() - newlimit

    def is_closed(self):
        return self.status == POSTSTATUS.CLOSED

    def is_unacceptable(self):
        return self.status in [POSTSTATUS.REJECTED, POSTSTATUS.SPAM]

    def is_old(self):
        return self.datetime <= datetime.utcnow() - agelimit

    def pay_type_label(self):
        return PAY_TYPE.get(self.pay_type)

    def withdraw(self):
        self.status = POSTSTATUS.WITHDRAWN

    def close(self):
        self.status = POSTSTATUS.CLOSED

    def confirm(self):
        self.status = POSTSTATUS.CONFIRMED

    def url_for(self, action='view', _external=False, **kwargs):
        if self.status in POSTSTATUS.UNPUBLISHED and action in ('view',
                                                                'edit'):
            domain = None
        else:
            domain = self.email_domain

        # A/B test flag for permalinks
        if 'b' in kwargs:
            if kwargs['b'] is not None:
                kwargs['b'] = unicode(int(kwargs['b']))
            else:
                kwargs.pop('b')

        if action == 'view':
            return url_for('jobdetail',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'reveal':
            return url_for('revealjob',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'apply':
            return url_for('applyjob',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'edit':
            return url_for('editjob',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'withdraw':
            return url_for('withdraw',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'close':
            return url_for('close',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'reopen':
            return url_for('reopen',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'moderate':
            return url_for('moderatejob',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'pin':
            return url_for('pinnedjob',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'reject':
            return url_for('rejectjob',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'confirm':
            return url_for('confirm',
                           hashid=self.hashid,
                           _external=_external,
                           **kwargs)
        elif action == 'logo':
            return url_for('logoimage',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'confirm-link':
            return url_for('confirm_email',
                           hashid=self.hashid,
                           domain=domain,
                           key=self.email_verify_key,
                           _external=True,
                           **kwargs)
        elif action == 'star':
            return url_for('starjob',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'manage':
            return url_for('managejob',
                           hashid=self.hashid,
                           domain=domain,
                           _external=_external,
                           **kwargs)
        elif action == 'browse':
            if self.email_domain in webmail_domains:
                return url_for('browse_by_email',
                               md5sum=self.md5sum,
                               _external=_external,
                               **kwargs)
            else:
                return url_for('browse_by_domain',
                               domain=self.email_domain,
                               _external=_external,
                               **kwargs)

    def permissions(self, user, inherited=None):
        perms = super(JobPost, self).permissions(user, inherited)
        if self.status in POSTSTATUS.LISTED:
            perms.add('view')
        if self.admin_is(user):
            if self.status in POSTSTATUS.UNPUBLISHED:
                perms.add('view')
            perms.add('edit')
            perms.add('manage')
            perms.add('withdraw')
        return perms

    @property
    def from_webmail_domain(self):
        return self.email_domain in webmail_domains

    @property
    def company_url_domain_zone(self):
        if not self.company_url:
            return u''
        else:
            r = tldextract.extract(self.company_url)
            return u'.'.join([r.domain, r.suffix])

    @property
    def pays_cash(self):
        if self.pay_type is None:  # Legacy record from before `pay_type` was mandatory
            return True
        return self.pay_type != PAY_TYPE.NOCASH and self.pay_cash_min is not None and self.pay_cash_max is not None

    @property
    def pays_equity(self):
        return self.pay_equity_min is not None and self.pay_equity_max is not None

    def pay_label(self):
        if self.pay_type is None:
            return u"NA"
        elif self.pay_type == PAY_TYPE.NOCASH:
            cash = None
            suffix = ""
        else:
            if self.pay_type == PAY_TYPE.RECURRING:
                suffix = "pa"
            else:
                suffix = ""

            indian = False
            if self.pay_currency == "INR":
                indian = True
                symbol = u"₹"
            elif self.pay_currency == "USD":
                symbol = u"$"
            elif self.pay_currency == "EUR":
                symbol = u"€"
            elif self.pay_currency == "GBP":
                symbol = u"£"
            else:
                symbol = u"¤"

            if self.pay_cash_min == self.pay_cash_max:
                cash = symbol + number_abbreviate(self.pay_cash_min, indian)
            else:
                cash = symbol + number_abbreviate(
                    self.pay_cash_min, indian) + "-" + number_abbreviate(
                        self.pay_cash_max, indian)

            if suffix:
                cash = cash + " " + suffix

        if self.pays_equity:
            if self.pay_equity_min == self.pay_equity_max:
                equity = str(self.pay_equity_min) + "%"
            else:
                equity = str(self.pay_equity_min) + "-" + str(
                    self.pay_equity_max) + "%"
        else:
            equity = None

        if cash:
            if equity:
                return ", ".join([cash, equity])
            else:
                return cash
        else:
            if equity:
                return equity
            else:
                return "No pay"

    def tag_content(self):
        return Markup('\n').join(
            (Markup('<div>') + Markup(escape(self.headline)) +
             Markup('</div>'),
             Markup('<div>') + Markup(self.description) + Markup('</div>'),
             Markup('<div>') + Markup(self.perks) + Markup('</div>')))

    @property
    def viewcounts_key(self):
        # Also see views.helper.update_impression_counts for a copy of this key
        return 'hasjob/viewcounts/%d' % self.id

    @cached_property  # For multiple accesses in a single request
    def viewcounts(self):
        cache_key = self.viewcounts_key
        values = g.viewcounts.get(cache_key) if g else None
        if values is None:
            values = redis_store.hgetall(cache_key)
        if 'impressions' not in values:
            # Also see views.helper.update_impression_counts for a copy of this query
            # values['impressions'] = db.session.query(db.func.count(
            #     db.func.distinct(EventSession.user_id)).label('count')).join(
            #     JobImpression).filter(JobImpression.jobpost == self).first().count
            values['impressions'] = self.viewcounts_impressions
            redis_store.hset(cache_key, 'impressions', values['impressions'])
            redis_store.expire(cache_key, 86400)
        else:
            values['impressions'] = int(values['impressions'])
        if 'viewed' not in values:
            # values['viewed'] = UserJobView.query.filter_by(jobpost=self).count()
            values['viewed'] = self.viewcounts_viewed
            redis_store.hset(cache_key, 'viewed', values['viewed'])
            redis_store.expire(cache_key, 86400)
        else:
            values['viewed'] = int(values['viewed'])
        if 'opened' not in values:
            # values['opened'] = UserJobView.query.filter_by(jobpost=self, applied=True).count()
            values['opened'] = self.viewcounts_opened
            redis_store.hset(cache_key, 'opened', values['opened'])
            redis_store.expire(cache_key, 86400)
        else:
            values['opened'] = int(values['opened'])
        if 'applied' not in values:
            # values['applied'] = JobApplication.query.filter_by(jobpost=self).count()
            values['applied'] = self.viewcounts_applied
            redis_store.hset(cache_key, 'applied', values['applied'])
            redis_store.expire(cache_key, 86400)
        else:
            values['applied'] = int(values['applied'])
        # pay_label rendering is extraordinarily slow. We don't know why yet, but it's static data, so cache it
        if 'pay_label' not in values:
            values['pay_label'] = self.pay_label()
            redis_store.hset(cache_key, 'pay_label',
                             values['pay_label'].encode('utf-8'))
            redis_store.expire(cache_key, 86400)
        elif isinstance(
                values['pay_label'],
                str):  # Redis appears to return bytestrings, not Unicode
            values['pay_label'] = unicode(values['pay_label'], 'utf-8')
        return values

    def uncache_viewcounts(self, key=None):
        cache_key = self.viewcounts_key
        if not key:
            redis_store.delete(cache_key)
        else:
            redis_store.hdel(cache_key, key)

    @cached_property
    def ab_impressions(self):
        results = {'NA': 0, 'A': 0, 'B': 0}
        counts = db.session.query(JobImpression.bgroup.label('bgroup'),
                                  db.func.count('*').label('count')).filter(
                                      JobImpression.jobpost == self).group_by(
                                          JobImpression.bgroup)
        for row in counts:
            if row.bgroup is False:
                results['A'] = row.count
            elif row.bgroup is True:
                results['B'] = row.count
            else:
                results['NA'] = row.count
        return results

    @cached_property
    def ab_views(self):
        results = {
            'C_NA': 0,
            'C_A': 0,
            'C_B': 0,  # Conversions (cointoss=True, crosstoss=False)
            'E_NA': 0,
            'E_A': 0,
            'E_B':
            0,  # External (cointoss=False, crosstoss=True OR False [do sum])
            'X_NA': 0,
            'X_A': 0,
            'X_B': 0,  # Cross toss (cointoss=True, crosstoss=True)
        }
        counts = db.session.query(JobViewSession.bgroup.label('bgroup'),
                                  JobViewSession.cointoss.label('cointoss'),
                                  JobViewSession.crosstoss.label('crosstoss'),
                                  db.func.count('*').label('count')).filter(
                                      JobViewSession.jobpost == self).group_by(
                                          JobViewSession.bgroup,
                                          JobViewSession.cointoss,
                                          JobViewSession.crosstoss)

        for row in counts:
            if row.cointoss is True and row.crosstoss is False:
                prefix = 'C'
            elif row.cointoss is False:
                prefix = 'E'
            elif row.cointoss is True and row.crosstoss is True:
                prefix = 'X'
            if row.bgroup is False:
                results[prefix + '_A'] += row.count
            elif row.bgroup is True:
                results[prefix + '_B'] += row.count
            else:
                results[prefix + '_NA'] += row.count
        return results

    @property
    def sort_score(self):
        """
        Sort with a gravity of 1.8 using the HackerNews algorithm
        """
        viewcounts = self.viewcounts
        opened = int(viewcounts['opened'])
        applied = int(viewcounts['applied'])
        age = datetime.utcnow() - self.datetime
        hours = age.days * 24 + age.seconds / 3600

        return ((applied * 3) + (opened - applied)) / pow((hours + 2), 1.8)

    @cached_property  # For multiple accesses in a single request
    def viewstats(self):
        now = datetime.utcnow()
        delta = now - self.datetime
        if delta.days < 2:  # Less than two days
            if delta.seconds < 21600:  # Less than 6 hours
                return 'q', viewstats_by_id_qhour(self.id)
            else:
                return 'h', viewstats_by_id_hour(self.id)
        else:
            return 'd', viewstats_by_id_day(self.id)

    def reports(self):
        if not self.flags:
            return []
        counts = {}
        for flag in self.flags:
            counts[flag.reportcode] = counts.setdefault(flag.reportcode, 0) + 1
        return [{
            'count': i[2],
            'title': i[1]
        } for i in sorted([(k.seq, k.title, v) for k, v in counts.items()])]