示例#1
0
class UserRole(Base):
    __tablename__ = 'users_roles'
    user_id = Column(Integer(), ForeignKey('users.id'), primary_key=True)
    role_id = Column(Integer(), ForeignKey('roles.id'), primary_key=True)
示例#2
0
class TVDBEpisode(Base):
    __tablename__ = 'tvdb_episodes'

    id = Column(Integer, primary_key=True, autoincrement=False)
    expired = Column(Boolean)
    last_updated = Column(Integer)
    season_number = Column(Integer)
    episode_number = Column(Integer)
    absolute_number = Column(Integer)
    name = Column(Unicode)
    overview = Column(Unicode)
    rating = Column(Float)
    director = Column(Unicode)
    _image = Column(Unicode)
    _first_aired = Column('firstaired', DateTime)
    first_aired = text_date_synonym('_first_aired')

    series_id = Column(Integer, ForeignKey('tvdb_series.id'), nullable=False)

    def __init__(self, series_id, ep_id):
        """
        Looks up movie on tvdb and creates a new database model for it.
        These instances should only be added to a session via `session.merge`.
        """
        self.series_id = series_id
        self.id = ep_id
        self.expired = False
        try:
            episode = TVDBRequest().get('episodes/%s' % self.id)
        except requests.RequestException as e:
            raise LookupError('Error updating data from tvdb: %s' % e)

        self.id = episode['id']
        self.last_updated = episode['lastUpdated']
        self.season_number = episode['airedSeason']
        self.episode_number = episode['airedEpisodeNumber']
        self.absolute_number = episode['absoluteNumber']
        self.name = episode['episodeName']
        self.overview = episode['overview']
        self.director = episode['director']
        self._image = episode['filename']
        self.rating = episode['siteRating']
        self.first_aired = episode['firstAired']

    def __repr__(self):
        return '<TVDBEpisode series=%s,season=%s,episode=%s>' % \
               (self.series.name, self.season_number, self.episode_number)

    def to_dict(self):
        return {
            'id': self.id,
            'expired': self.expired,
            'last_update': self.last_updated,
            'season_number': self.season_number,
            'episode_number': self.episode_number,
            'absolute_number': self.absolute_number,
            'episode_name': self.name,
            'overview': self.overview,
            'director': self.director,
            'rating': self.rating,
            'image': self.image,
            'first_aired': self.first_aired,
            'series_id': self.series_id
        }

    @property
    def image(self):
        if self._image:
            return TVDBRequest.BANNER_URL + self._image
示例#3
0
class Participation(Base):
    """Class to store a single participation of a user in a contest.

    """
    __tablename__ = 'participations'

    # Auto increment primary key.
    id = Column(
        Integer,
        primary_key=True)

    # If the IP lock is enabled the user can log into CWS only if their
    # requests come from an IP address that belongs to any of these
    # subnetworks. An empty list prevents the user from logging in,
    # None disables the IP lock for the user.
    ip = Column(
        CastingArray(CIDR),
        nullable=True)

    # Starting time: for contests where every user has at most x hours
    # of the y > x hours totally available, this is the time the user
    # decided to start their time-frame.
    starting_time = Column(
        DateTime,
        nullable=True)

    # A shift in the time interval during which the user is allowed to
    # submit.
    delay_time = Column(
        Interval,
        CheckConstraint("delay_time >= '0 seconds'"),
        nullable=False,
        default=timedelta())

    # An extra amount of time allocated for this user.
    extra_time = Column(
        Interval,
        CheckConstraint("extra_time >= '0 seconds'"),
        nullable=False,
        default=timedelta())

    # Contest-specific password. If this password is not null then the
    # traditional user.password field will be "replaced" by this field's
    # value (only for this participation).
    password = Column(
        Unicode,
        nullable=True)

    # A hidden participation (e.g. does not appear in public rankings), can
    # also be used for debugging purposes.
    hidden = Column(
        Boolean,
        nullable=False,
        default=False)

    # An unrestricted participation (e.g. contest time,
    # maximum number of submissions, minimum interval between submissions,
    # maximum number of user tests, minimum interval between user tests),
    # can also be used for debugging purposes.
    unrestricted = Column(
        Boolean,
        nullable=False,
        default=False)

    # Contest (id and object) to which the user is participating.
    contest_id = Column(
        Integer,
        ForeignKey(Contest.id,
                   onupdate="CASCADE", ondelete="CASCADE"),
        nullable=False,
        index=True)
    contest = relationship(
        Contest,
        back_populates="participations")

    # User (id and object) which is participating.
    user_id = Column(
        Integer,
        ForeignKey(User.id,
                   onupdate="CASCADE", ondelete="CASCADE"),
        nullable=False,
        index=True)
    user = relationship(
        User,
        back_populates="participations")
    __table_args__ = (UniqueConstraint('contest_id', 'user_id'),)

    # Team (id and object) that the user is representing with this
    # participation.
    team_id = Column(
        Integer,
        ForeignKey(Team.id,
                   onupdate="CASCADE", ondelete="RESTRICT"),
        nullable=True)
    team = relationship(
        Team,
        back_populates="participations")

    # These one-to-many relationships are the reversed directions of
    # the ones defined in the "child" classes using foreign keys.

    messages = relationship(
        "Message",
        order_by="[Message.timestamp]",
        cascade="all, delete-orphan",
        passive_deletes=True,
        back_populates="participation")

    questions = relationship(
        "Question",
        order_by="[Question.question_timestamp, Question.reply_timestamp]",
        cascade="all, delete-orphan",
        passive_deletes=True,
        back_populates="participation")

    submissions = relationship(
        "Submission",
        cascade="all, delete-orphan",
        passive_deletes=True,
        back_populates="participation")

    user_tests = relationship(
        "UserTest",
        cascade="all, delete-orphan",
        passive_deletes=True,
        back_populates="participation")

    printjobs = relationship(
        "PrintJob",
        cascade="all, delete-orphan",
        passive_deletes=True,
        back_populates="participation")
class FuncKeyDestForward(Base):

    DESTINATION_TYPE_ID = 6

    __tablename__ = 'func_key_dest_forward'
    __table_args__ = (
        PrimaryKeyConstraint('func_key_id', 'destination_type_id',
                             'extension_id'),
        ForeignKeyConstraint(['func_key_id', 'destination_type_id'],
                             ['func_key.id', 'func_key.destination_type_id']),
        CheckConstraint(
            'destination_type_id = {}'.format(DESTINATION_TYPE_ID)),
    )

    func_key_id = Column(Integer)
    destination_type_id = Column(
        Integer, server_default="{}".format(DESTINATION_TYPE_ID))
    extension_id = Column(Integer, ForeignKey('extensions.id'))
    number = Column(String(40))

    type = 'forward'

    func_key = relationship(FuncKey,
                            cascade='all,delete-orphan',
                            single_parent=True)

    extension = relationship(Extension)
    extension_typeval = association_proxy(
        'extension',
        'typeval',
        # Only to keep value persistent in the instance
        creator=lambda _typeval: Extension(type='extenfeatures',
                                           typeval=_typeval))

    def to_tuple(self):
        return (
            ('exten', self.exten),
            ('forward', self.forward),
        )

    @hybrid_property
    def exten(self):
        return self.number

    @exten.setter
    def exten(self, value):
        self.number = value

    @hybrid_property
    def forward(self):
        FORWARDS = {
            'fwdbusy': 'busy',
            'fwdrna': 'noanswer',
            'fwdunc': 'unconditional'
        }
        return FORWARDS.get(self.extension_typeval, self.extension_typeval)

    @forward.expression
    def forward(cls):
        return cls.extension_typeval  # only used to pass test

    @forward.setter
    def forward(self, value):
        TYPEVALS = {
            'busy': 'fwdbusy',
            'noanswer': 'fwdrna',
            'unconditional': 'fwdunc'
        }
        self.extension_typeval = TYPEVALS.get(value, value)
示例#5
0
        tables = [table_schema(name, session) for name in table_names]
        for table in tables:
            session.execute(table.delete())
        table_add_column('rottentomatoes_actors', 'rt_id', String, session)
        ver = 1
    if ver is 1:
        table = table_schema('rottentomatoes_search_results', session)
        session.execute(sql.delete(table, table.c.movie_id == None))
        ver = 2
    return ver


# association tables
genres_table = Table(
    'rottentomatoes_movie_genres', Base.metadata,
    Column('movie_id', Integer, ForeignKey('rottentomatoes_movies.id')),
    Column('genre_id', Integer, ForeignKey('rottentomatoes_genres.id')),
    Index('ix_rottentomatoes_movie_genres', 'movie_id', 'genre_id'))
Base.register_table(genres_table)

actors_table = Table(
    'rottentomatoes_movie_actors', Base.metadata,
    Column('movie_id', Integer, ForeignKey('rottentomatoes_movies.id')),
    Column('actor_id', Integer, ForeignKey('rottentomatoes_actors.id')),
    Index('ix_rottentomatoes_movie_actors', 'movie_id', 'actor_id'))
Base.register_table(actors_table)

directors_table = Table(
    'rottentomatoes_movie_directors', Base.metadata,
    Column('movie_id', Integer, ForeignKey('rottentomatoes_movies.id')),
    Column('director_id', Integer, ForeignKey('rottentomatoes_directors.id')),
示例#6
0
class ItemCandidate(Base):
    __tablename__ = 'item_candidate'

    item_id = Column(Integer, ForeignKey('item.item_id'), primary_key=True)
    osm_id = Column(BigInteger, primary_key=True)
    osm_type = Column(osm_type_enum, primary_key=True)
    name = Column(String)
    dist = Column(Float)
    tags = Column(postgresql.JSON)
    planet_table = Column(String)
    src_id = Column(BigInteger)
    geom = Column(Geography(srid=4326, spatial_index=True))
    geojson = column_property(func.ST_AsGeoJSON(geom), deferred=True)
    identifier_match = Column(Boolean)
    address_match = Column(Boolean)
    name_match = Column(postgresql.JSON)

    #    __table_args__ = (
    #        ForeignKeyConstraint(
    #            ['osm_type', 'osm_id'],
    #            ['osm_candidate.osm_type', 'osm_candidate.osm_id']
    #        ),
    #    )

    item = relationship(
        'Item',
        backref=backref('candidates',
                        lazy='dynamic',
                        cascade='save-update, merge, delete, delete-orphan'))
    # candidate = relationship(OsmCandidate)

    #     @property
    #     def name(self):
    #         return self.candidate.name
    #
    #     @property
    #     def tags(self):
    #         return self.candidate.tags
    #
    @property
    def key(self):
        return f'Q{self.item_id}-{self.osm_type:s}-{self.osm_id:d}'

    def get_match(self):
        endings = matcher.get_ending_from_criteria(self.tags)
        wikidata_names = self.item.names()
        return match.check_for_match(self.tags, wikidata_names, endings)

    def get_all_matches(self):
        endings = matcher.get_ending_from_criteria(self.item.tags)
        wikidata_names = self.item.names()
        m = match.get_all_matches(self.tags, wikidata_names, endings)
        return m

    def languages(self):
        return {key[5:] for key in self.tags.keys() if key.startswith('name:')}

    def matching_tags(self):
        tags = []

        for tag_or_key in self.item.tags:
            if '=' not in tag_or_key and tag_or_key in self.tags:
                tags.append(tag_or_key)
                continue
            key, _, value = tag_or_key.partition('=')
            if self.tags.get(key) == value:
                tags.append(tag_or_key)
                continue

        return tags

    def update(self, candidate):
        for k, v in candidate.items():
            if k in {'osm_id', 'osm_type'}:
                continue
            setattr(self, k, v)

    @property
    def wikidata_tag(self):
        return self.tags.get('wikidata') or None

    def label_best_language(self, languages):
        if not languages:
            return self.label

        for key in 'bridge:name', 'tunnel:name', 'lock_name':
            if key in self.tags:
                return self.tags[key]

        names = {
            k[5:]: v
            for k, v in self.tags.items() if k.startswith('name:')
        }
        if 'name' in self.tags:
            top_lang = g.default_languages[0]['code']
            if top_lang not in names:
                names[top_lang] = self.tags['name']

        for lang in languages:
            key = lang if isinstance(lang, str) else lang.iso_639_1
            if key in names:
                return names[key]

        return self.label

    @property
    def label(self):
        for key in 'bridge:name', 'tunnel:name', 'lock_name':
            if key in self.tags:
                return self.tags[key]

        if 'name' in self.tags:
            name = self.tags['name']
            if 'addr:housename' in self.tags:
                return f'{name} (house name: {self.tags["addr:housename"]})'
            else:
                return name

        if 'name:en' in self.tags:
            return self.tags['name:en']
        for k, v in self.tags.items():
            if k.startswith('name:'):
                return v
        for k, v in self.tags.items():
            if 'name' in k:
                return v

        if all(tag in self.tags
               for tag in ('addr:housenumber', 'addr:street')):
            housenumber = self.tags['addr:housenumber']
            street = self.tags['addr:street']
            return f'{housenumber} {street}'

        return f'{self.osm_type}/{self.osm_id}'

    @property
    def url(self):
        return f'{osm_api_base}/{self.osm_type}/{self.osm_id}'

    def name_match_count(self, osm_key):
        if not self.name_match:
            return

        match_count = 0
        for match_type, wikidata_name, source in self.name_match[osm_key]:
            match_count += len(source)
        return match_count

    def set_match_detail(self):
        keys = ['identifier', 'address', 'name']
        if any(getattr(self, key + '_match') is not None for key in keys):
            return False  # no need

        endings = matcher.get_ending_from_criteria(self.tags)
        endings |= self.item.more_endings_from_isa()

        names = self.item.names()
        identifiers = self.item.get_item_identifiers()
        self.address_match = match.check_name_matches_address(self.tags, names)
        self.name_match = match.check_for_match(self.tags, names, endings)
        self.identifier_match = match.check_identifier(self.tags, identifiers)
        return True

    def display_distance(self):
        if has_app_context() and g.user.is_authenticated and g.user.units:
            units = g.user.units
        else:
            units = 'local'  # default

        if units == 'local':
            country_code = (getattr(g, 'country_code', None)
                            if has_app_context() else None)
            units = country_units.get(country_code, 'km_and_metres')

        return utils.display_distance(units, self.dist)

    def checkbox_ticked(self):
        max_dist = 500
        if any(tag == 'place' or (
                tag != 'place=farm' and tag.startswith('place='))
               for tag in self.matching_tags()):
            max_dist = 2000
        elif self.item.is_nhle:
            max_dist = 100
        return ((not self.dist or self.dist < max_dist
                 and 'designation=civil_parish' not in self.matching_tags())
                or self.item.candidates.count() > 1)

    def new_wikipedia_tag(self, languages):
        sitelinks = {
            code[:-4]: link['title']
            for code, link in self.item.sitelinks().items()
            if code.endswith('wiki')
        }

        for lang in languages:
            code = lang if isinstance(lang,
                                      str) else lang.wikimedia_language_code
            if code in sitelinks:
                return (code, sitelinks[code])
        return (None, None)
示例#7
0
class PanelApplication(MagModel):
    event_id = Column(UUID,
                      ForeignKey('event.id', ondelete='SET NULL'),
                      nullable=True)
    poc_id = Column(UUID,
                    ForeignKey('attendee.id', ondelete='SET NULL'),
                    nullable=True)
    name = Column(UnicodeText)
    length = Column(Choice(c.PANEL_LENGTH_OPTS), default=c.SIXTY_MIN)
    length_text = Column(UnicodeText)
    length_reason = Column(UnicodeText)
    description = Column(UnicodeText)
    unavailable = Column(UnicodeText)
    available = Column(UnicodeText)
    affiliations = Column(UnicodeText)
    past_attendance = Column(UnicodeText)
    presentation = Column(Choice(c.PRESENTATION_OPTS))
    other_presentation = Column(UnicodeText)
    tech_needs = Column(MultiChoice(c.TECH_NEED_OPTS))
    other_tech_needs = Column(UnicodeText)
    need_tables = Column(Boolean, default=False)
    tables_desc = Column(UnicodeText)
    has_cost = Column(Boolean, default=False)
    is_loud = Column(Boolean, default=False)
    tabletop = Column(Boolean, default=False)
    cost_desc = Column(UnicodeText)
    livestream = Column(Choice(c.LIVESTREAM_OPTS), default=c.OPT_IN)
    panelist_bringing = Column(UnicodeText)
    extra_info = Column(UnicodeText)
    applied = Column(UTCDateTime, server_default=utcnow())
    status = Column(Choice(c.PANEL_APP_STATUS_OPTS),
                    default=c.PENDING,
                    admin_only=True)
    comments = Column(UnicodeText, admin_only=True)

    applicants = relationship('PanelApplicant', backref='application')

    email_model_name = 'app'

    @property
    def email(self):
        return self.submitter and self.submitter.email

    @property
    def submitter(self):
        for a in self.applicants:
            if a.submitter:
                return a
        return None

    @property
    def other_panelists(self):
        return [a for a in self.applicants if not a.submitter]

    @property
    def matched_attendees(self):
        return [a.attendee for a in self.applicants if a.attendee_id]

    @property
    def unmatched_applicants(self):
        return [a for a in self.applicants if not a.attendee_id]

    @hybrid_property
    def has_been_accepted(self):
        return self.status == c.ACCEPTED
示例#8
0
class Executable(Base):
    """Class to store information about one file generated by the
    compilation of a submission. Not to be used directly (import it
    from SQLAlchemyAll).

    """
    __tablename__ = 'executables'
    __table_args__ = (
        ForeignKeyConstraint(
            ('submission_id', 'dataset_id'),
            (SubmissionResult.submission_id, SubmissionResult.dataset_id),
            onupdate="CASCADE",
            ondelete="CASCADE"),
        UniqueConstraint('submission_id',
                         'dataset_id',
                         'filename',
                         name='cst_executables_submission_id_filename'),
    )

    # Auto increment primary key.
    id = Column(Integer, primary_key=True)

    # Filename and digest of the file.
    filename = Column(String, nullable=False)
    digest = Column(String, nullable=False)

    # Submission id and object of the submission.
    submission_id = Column(Integer,
                           ForeignKey(Submission.id,
                                      onupdate="CASCADE",
                                      ondelete="CASCADE"),
                           nullable=False,
                           index=True)

    submission = relationship(Submission,
                              backref=backref("executables",
                                              collection_class=list,
                                              cascade="all, delete-orphan",
                                              passive_deletes=True))

    # Dataset id and object that this executable belongs to.
    dataset_id = Column(Integer,
                        ForeignKey(Dataset.id,
                                   onupdate="CASCADE",
                                   ondelete="CASCADE"),
                        nullable=False,
                        index=True)
    dataset = relationship(Dataset)

    # Submission result owning this executable.
    submission_result = relationship(
        SubmissionResult,
        backref=backref('executables',
                        collection_class=smart_mapped_collection('filename'),
                        cascade="all, delete-orphan",
                        passive_deletes=True))

    def export_to_dict(self):
        """Return object data as a dictionary.

        """
        return {'filename': self.filename, 'digest': self.digest}
示例#9
0
class Evaluation(Base):
    """Class to store information about the outcome of the evaluation
    of a submission against one testcase. Not to be used directly
    (import it from SQLAlchemyAll).

    """
    __tablename__ = 'evaluations'
    __table_args__ = (
        ForeignKeyConstraint(
            ('submission_id', 'dataset_id'),
            (SubmissionResult.submission_id, SubmissionResult.dataset_id),
            onupdate="CASCADE",
            ondelete="CASCADE"),
        UniqueConstraint('submission_id',
                         'dataset_id',
                         'num',
                         name='cst_evaluations_submission_id_num'),
    )

    # Auto increment primary key.
    id = Column(Integer, primary_key=True)

    # Number of the testcase
    num = Column(Integer, nullable=False)

    # Submission id of the submission.
    submission_id = Column(Integer,
                           ForeignKey(Submission.id,
                                      onupdate="CASCADE",
                                      ondelete="CASCADE"),
                           nullable=False,
                           index=True)

    # Dataset id and object that this evaluation belongs to.
    dataset_id = Column(Integer,
                        ForeignKey(Dataset.id,
                                   onupdate="CASCADE",
                                   ondelete="CASCADE"),
                        nullable=False,
                        index=True)
    task = relationship(Dataset)

    # Submission result owning this evaluation.
    submission_result = relationship(SubmissionResult,
                                     backref=backref(
                                         'evaluations',
                                         collection_class=ordering_list('num'),
                                         order_by=[num],
                                         cascade="all, delete-orphan",
                                         passive_deletes=True))

    # String containing output from the grader (usually "Correct",
    # "Time limit", ...).
    text = Column(String, nullable=True)

    # String containing the outcome of the evaluation (usually 1.0,
    # ...) not necessary the points awarded, that will be computed by
    # the score type.
    outcome = Column(String, nullable=True)

    # Memory used by the evaluation, in bytes.
    memory_used = Column(Integer, nullable=True)

    # Evaluation's time and wall-clock time, in s.
    execution_time = Column(Float, nullable=True)
    execution_wall_clock_time = Column(Float, nullable=True)

    # Worker shard and sanbox where the evaluation was performed
    evaluation_shard = Column(Integer, nullable=True)
    evaluation_sandbox = Column(String, nullable=True)

    def export_to_dict(self):
        """Return object data as a dictionary.

        """
        return {
            'text': self.text,
            'outcome': self.outcome,
            'num': self.num,
            'memory_used': self.memory_used,
            'execution_time': self.execution_time,
            'execution_wall_clock_time': self.execution_wall_clock_time,
            'evaluation_shard': self.evaluation_shard,
            'evaluation_sandbox': self.evaluation_sandbox
        }
示例#10
0
class SubmissionResult(Base):
    """Class to store the evaluation results of a submission. Not to
    be used directly (import it from SQLAlchemyAll).

    """
    __tablename__ = 'submission_results'
    __table_args__ = (UniqueConstraint(
        'submission_id',
        'dataset_id',
        name='cst_submission_results_submission_id_dataset_id'), )

    # Primary key is submission_id, dataset_id.
    # Yes, task_id is redundant, as we can get it from the
    # submission, but we need it in order to be a sane foreign key
    # into datasets.
    # Note that there is no constraint to enforce that task_id ==
    # submission.task_id. If you can figure this out, you will win a
    # pony.
    submission_id = Column(Integer,
                           ForeignKey(Submission.id,
                                      onupdate="CASCADE",
                                      ondelete="CASCADE"),
                           primary_key=True)

    dataset_id = Column(Integer,
                        ForeignKey(Dataset.id,
                                   onupdate="CASCADE",
                                   ondelete="CASCADE"),
                        primary_key=True)
    dataset = relationship(Dataset)

    submission = relationship(Submission,
                              backref=backref("results",
                                              collection_class=list,
                                              cascade="all, delete-orphan",
                                              passive_deletes=True))

    # Now below follow the actual result fields.

    # Compilation outcome (can be None = yet to compile, "ok" =
    # compilation successful and we can evaluate, "fail" =
    # compilation unsuccessful, throw it away).
    compilation_outcome = Column(String, nullable=True)

    # String containing output from the sandbox, and the compiler
    # stdout and stderr.
    compilation_text = Column(String, nullable=True)

    # Number of attempts of compilation.
    compilation_tries = Column(Integer, nullable=False, default=0)

    # Worker shard and sanbox where the compilation was performed
    compilation_shard = Column(Integer, nullable=True)
    compilation_sandbox = Column(String, nullable=True)

    # Evaluation outcome (can be None = yet to evaluate, "ok" =
    # evaluation successful). At any time, this should be equal to
    # evaluations != [].
    evaluation_outcome = Column(String, nullable=True)

    # Number of attempts of evaluation.
    evaluation_tries = Column(Integer, nullable=False, default=0)

    # Score as computed by ScoreService. Null means not yet scored.
    score = Column(Float, nullable=True)

    # Score details. It is a string containing *simple* HTML code that
    # AWS (and CWS if the user used a token) uses to display the
    # details of the submission. For example, results for each
    # testcases, subtask, etc.
    score_details = Column(String, nullable=True)

    # The same as the last two fields, but from the point of view of
    # the user (when he/she did not play a token).
    public_score = Column(Float, nullable=True)
    public_score_details = Column(String, nullable=True)

    # Ranking score details. It is a list of strings that are going to
    # be shown in a single row in the table of submission in RWS. JSON
    # encoded.
    ranking_score_details = Column(String, nullable=True)

    # Follows the description of the fields automatically added by
    # SQLAlchemy.
    # executables (dict of Executable objects indexed by filename)
    # evaluations (list of Evaluation objects, one for testcase)

    def export_to_dict(self):
        """Return object data as a dictionary.

        """
        res = {
            'dataset_id':
            self.dataset.id,
            'compilation_outcome':
            self.compilation_outcome,
            'compilation_tries':
            self.compilation_tries,
            'compilation_text':
            self.compilation_text,
            'compilation_shard':
            self.compilation_shard,
            'compilation_sandbox':
            self.compilation_sandbox,
            'evaluation_outcome':
            self.evaluation_outcome,
            'evaluation_tries':
            self.evaluation_tries,
            'score':
            self.score,
            'score_details':
            self.score_details,
            'public_score':
            self.public_score,
            'public_score_details':
            self.public_score_details,
            'ranking_score_details':
            self.ranking_score_details,
            'evaluations':
            [evaluation.export_to_dict() for evaluation in self.evaluations],
            'executables': [
                executable.export_to_dict()
                for executable in self.executables.itervalues()
            ],
        }
        return res

    @classmethod
    def import_from_dict(cls, data, task):
        """Build the object using data from a dictionary.

        """
        data['task'] = task
        data['executables'] = [
            Executable.import_from_dict(executable_data)
            for executable_data in data['executables']
        ]
        data['executables'] = dict([(executable.filename, executable)
                                    for executable in data['executables']])
        data['evaluations'] = [
            Evaluation.import_from_dict(eval_data)
            for eval_data in data['evaluations']
        ]
        return cls(**data)

    @classmethod
    def get_from_id_or_create(cls, submission_id, dataset_id, session):
        # Find an existing submission result.
        submission_result = SubmissionResult.get_from_id(
            (submission_id, dataset_id), session)

        # Create one if it doesn't exist.
        if submission_result is None:
            submission = Submission.get_from_id(submission_id, session)
            dataset = Dataset.get_from_id(dataset_id, session)
            if submission is None or dataset is None:
                return None

            submission_result = SubmissionResult(submission=submission,
                                                 dataset=dataset)

            session.add(submission_result)

        return submission_result

    def compiled(self):
        """Return if the submission has been compiled.

        return (bool): True if compiled, False otherwise.

        """
        return self.compilation_outcome is not None

    def evaluated(self):
        """Return if the submission has been evaluated.

        return (bool): True if evaluated, False otherwise.

        """
        return self.evaluation_outcome is not None

    def scored(self):
        """Return if the submission has been scored.

        return (bool): True if scored, False otherwise.

        """
        return self.score is not None

    def invalidate_compilation(self):
        """Blank all compilation and evaluation outcomes, and the score.

        """
        self.invalidate_evaluation()
        self.compilation_outcome = None
        self.compilation_text = None
        self.compilation_tries = 0
        self.executables = {}

    def invalidate_evaluation(self):
        """Blank the evaluation outcomes and the score.

        """
        self.invalidate_score()
        self.evaluation_outcome = None
        self.evaluations = []
        self.evaluation_tries = 0

    def invalidate_score(self):
        """Blank the score.

        """
        self.score = None
        self.score_details = None
        self.public_score = None
        self.public_score_details = None
示例#11
0
class Submission(Base):
    """Class to store a submission. Not to be used directly (import it
    from SQLAlchemyAll).

    """
    __tablename__ = 'submissions'

    # Auto increment primary key.
    id = Column(Integer, primary_key=True)

    # User (id and object) that did the submission.
    user_id = Column(Integer,
                     ForeignKey(User.id,
                                onupdate="CASCADE",
                                ondelete="CASCADE"),
                     nullable=False,
                     index=True)
    user = relationship(User,
                        backref=backref("submissions",
                                        cascade="all, delete-orphan",
                                        passive_deletes=True))

    # Task (id and object) of the submission.
    task_id = Column(Integer,
                     ForeignKey(Task.id,
                                onupdate="CASCADE",
                                ondelete="CASCADE"),
                     nullable=False,
                     index=True)
    task = relationship(Task,
                        backref=backref("submissions",
                                        cascade="all, delete-orphan",
                                        passive_deletes=True))

    # Time of the submission.
    timestamp = Column(DateTime, nullable=False)

    # Language of submission, or None if not applicable.
    language = Column(String, nullable=True)

    # Follows the description of the fields automatically added by
    # SQLAlchemy.
    # files (dict of File objects indexed by filename)
    # token (Token object or None)
    # results (dict of SubmissionResult indexed by dataset_id)

    LANGUAGES = ["c", "cpp", "pas"]
    LANGUAGES_MAP = {
        ".c": "c",
        ".cpp": "cpp",
        ".cc": "cpp",
        ".pas": "******",
    }

    def export_to_dict(self):
        """Return object data as a dictionary.

        """
        res = {
            'task': self.task.name,
            'timestamp': make_timestamp(self.timestamp),
            'files':
            [_file.export_to_dict() for _file in self.files.itervalues()],
            'language': self.language,
            'token': self.token,
            'results':
            [_sr.export_to_dict() for _, _sr in sorted(self.results)],
        }
        if self.token is not None:
            res['token'] = self.token.export_to_dict()
        return res

    @classmethod
    def import_from_dict(cls, data, tasks_by_name):
        """Build the object using data from a dictionary.

        """
        data['files'] = [
            File.import_from_dict(file_data) for file_data in data['files']
        ]
        data['files'] = dict([(_file.filename, _file)
                              for _file in data['files']])
        if data['token'] is not None:
            data['token'] = Token.import_from_dict(data['token'])
        data['task'] = tasks_by_name[data['task']]
        data['user'] = None
        data['timestamp'] = make_datetime(data['timestamp'])
        data['results'] = [
            SubmissionResult.import_from_dict(_r, data['task'])
            for _r in data['results']
        ]
        data['results'] = dict([(_r.dataset_id, _r) for _r in data['results']])
        return cls(**data)

    def get_result(self, dataset_id):
        return SubmissionResult.get_from_id((self.id, dataset_id),
                                            self.sa_session)

    def tokened(self):
        """Return if the user played a token against the submission.

        return (bool): True if tokened, False otherwise.

        """
        return self.token is not None

    def play_token(self, timestamp=None):
        """Tell the submission that a token has been used.

        timestamp (int): the time the token has been played.

        """
        self.token = Token(timestamp=timestamp)
示例#12
0
class Record(BaseORMModel, RecordDetail, table=True):  # type: ignore[call-arg]
    __tablename__ = "records"
    # __table_args__ = (UniqueConstraint("domain_id", "url"),)

    domain_id: UUID = Field(sa_column=Column(
        GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False))
    # domain: "Domain" = Relationship(back_populates="records")

    problem_set_id: Optional[UUID] = Field(
        sa_column=Column(GUID,
                         ForeignKey("problem_sets.id", ondelete="SET NULL"),
                         nullable=True))
    problem_set: Optional["ProblemSet"] = Relationship(
        back_populates="records")

    problem_id: Optional[UUID] = Field(sa_column=Column(
        GUID, ForeignKey("problems.id", ondelete="SET NULL"), nullable=True))
    problem: Optional["Problem"] = Relationship(back_populates="records")

    problem_config_id: Optional[UUID] = Field(
        sa_column=Column(GUID,
                         ForeignKey("problem_configs.id", ondelete="SET NULL"),
                         nullable=True))
    problem_config: Optional["ProblemConfig"] = Relationship(
        back_populates="records")

    committer_id: Optional[UUID] = Field(sa_column=Column(
        GUID, ForeignKey("users.id", ondelete="SET NULL"), nullable=True))
    committer: Optional["User"] = Relationship(
        back_populates="committed_records",
        sa_relationship_kwargs={"foreign_keys": "[Record.committer_id]"},
    )

    judger_id: Optional[UUID] = Field(sa_column=Column(
        GUID, ForeignKey("users.id", ondelete="SET NULL"), nullable=True))
    judger: Optional["User"] = Relationship(
        back_populates="judged_records",
        sa_relationship_kwargs={"foreign_keys": "[Record.judger_id]"},
    )

    @classmethod
    async def submit(
        cls,
        *,
        background_tasks: BackgroundTasks,
        celery_app: Celery,
        problem_submit: ProblemSolutionSubmit,
        problem_set: Optional["ProblemSet"],
        problem: "Problem",
        user: "******",
    ) -> "Record":
        problem_config = await problem.get_latest_problem_config()
        if problem_config is None:
            raise BizError(ErrorCode.ProblemConfigNotFoundError)

        if (problem_submit.code_type == RecordCodeType.archive
                and problem_submit.file is None):
            raise BizError(ErrorCode.Error)

        problem_set_id = problem_set.id if problem_set else None
        record = cls(
            domain_id=problem.domain_id,
            problem_set_id=problem_set_id,
            problem_id=problem.id,
            problem_config_id=problem_config.id,
            committer_id=user.id,
        )

        await record.save_model(commit=False, refresh=False)
        problem.num_submit += 1
        await problem.save_model(commit=True, refresh=True)
        await record.refresh_model()

        key = cls.get_user_latest_record_key(problem_set_id, problem.id,
                                             user.id)
        value = RecordPreview(id=record.id,
                              state=record.state,
                              created_at=record.created_at)

        cache = get_redis_cache()
        await cache.set(key, value, namespace="user_latest_records")

        background_tasks.add_task(
            record.upload,
            celery_app=celery_app,
            problem_submit=problem_submit,
            problem=problem,
        )

        return record

    async def upload(
        self,
        celery_app: Celery,
        problem_submit: ProblemSolutionSubmit,
        problem: "Problem",
    ) -> None:
        def sync_func() -> None:
            lakefs_record = LakeFSRecord(problem, self)
            lakefs_record.ensure_branch()

            if problem_submit.code_type == RecordCodeType.archive:
                if problem_submit.file is None:
                    raise BizError(ErrorCode.Error)
                lakefs_record.upload_archive(problem_submit.file.filename,
                                             problem_submit.file.file)

            commit = lakefs_record.commit(f"record: {self.id}")
            logger.info(commit)
            self.state = RecordState.queueing
            self.commit_id = commit.id

        try:
            await run_in_threadpool(sync_func)
            self.task_id = uuid4()
            await self.save_model()
            await self.create_task(celery_app)
            logger.info("upload record success: {}", self)
        except Exception as e:
            logger.error("upload record failed: {}", self)
            logger.exception(e)
            self.state = RecordState.failed
            await self.save_model()

    async def create_task(self, celery_app: Celery) -> AsyncResult:
        # create a task in celery with this record
        # TODO: get queue from problem config or somewhere else
        result = celery_app.send_task(
            "joj.tiger.compile",
            args=[self.dict(), ""],
            queue="joj.tiger.official.default",
            task_id=str(self.task_id),
        )
        return result

    @classmethod
    def get_user_latest_record_key(cls, problem_set_id: Optional[UUID],
                                   problem_id: UUID, user_id: UUID) -> str:
        if problem_set_id is None:
            return "problem:{}:user:{}".format(problem_id, user_id)
        return "problem_set:{}:problem:{}:user:{}".format(
            problem_set_id, problem_id, user_id)

    @classmethod
    async def get_user_latest_record(
        cls,
        problem_set_id: Optional[UUID],
        problem_id: UUID,
        user_id: UUID,
        use_cache: bool = True,
    ) -> Optional[RecordPreview]:
        cache = get_redis_cache()
        key = cls.get_user_latest_record_key(problem_set_id, problem_id,
                                             user_id)
        if use_cache:
            value = await cache.get(key, namespace="user_latest_records")
            try:
                data = value["record"]
                if data is None:
                    return None
                return RecordPreview(**data)
            except (TypeError, ValueError, KeyError):
                pass
            except Exception as e:
                logger.error("error when loading record from cache:")
                logger.exception(e)

        statement = (
            cls.sql_select().where(cls.problem_id == problem_id).where(
                cls.committer_id == user_id).order_by(
                    cls.created_at.desc())  # type: ignore
            .limit(1))
        if problem_set_id is None:
            statement = statement.where(
                cls.problem_set_id.is_(None))  # type: ignore
        else:
            statement = statement.where(cls.problem_set_id == problem_set_id)
        result = await cls.session_exec(statement)
        record_model: "Record" = result.one_or_none()
        if record_model is None:
            record = None
        else:
            record = RecordPreview(**record_model.dict())
        if use_cache:
            await cache.set(
                key,
                {"record": record.dict() if record else None},
                namespace="user_latest_records",
            )
        return record

    @classmethod
    async def get_user_latest_records(
            cls, problem_set_id: Optional[UUID], problem_ids: List[UUID],
            user_id: UUID) -> List[Optional[RecordPreview]]:
        cache = get_redis_cache()
        keys = [
            cls.get_user_latest_record_key(problem_set_id, problem_id, user_id)
            for problem_id in problem_ids
        ]
        values = []
        if keys:
            values = await cache.multi_get(keys,
                                           namespace="user_latest_records")
        records = []
        updated_cache_pairs = []
        for i, value in enumerate(values):
            record = None
            try:
                data = value["record"]
                if data is not None:
                    record = RecordPreview(**data)
                use_cache = True
            except (TypeError, ValueError, KeyError):
                use_cache = False
            except Exception as e:
                use_cache = False
                logger.error("error when loading records from cache:")
                logger.exception(e)
            if not use_cache:
                record = await cls.get_user_latest_record(
                    problem_set_id=problem_set_id,
                    problem_id=problem_ids[i],
                    user_id=user_id,
                    use_cache=False,
                )
                updated_cache_pairs.append(
                    (keys[i], record.dict() if record else None))
            records.append(record)
        if updated_cache_pairs:
            await cache.multi_set(updated_cache_pairs,
                                  namespace="user_latest_records")
        logger.info(
            "cache: get {} keys, set {} keys",
            len(problem_ids) - len(updated_cache_pairs),
            len(updated_cache_pairs),
        )
        return records
示例#13
0
class User(Base):
  name = Column('name', Unicode(255, collation='utf8_unicode_ci'), nullable=False)
  sirname = Column('sirname', Unicode(255, collation='utf8_unicode_ci'), nullable=False)
  username = Column('username', Unicode(255, collation='utf8_unicode_ci'), nullable=False, unique=True)
  password = Column('password', Unicode(255, collation='utf8_unicode_ci'), nullable=False)
  last_login = Column('last_login', DateTime)
  email = Column('email', Unicode(255, collation='utf8_unicode_ci'), unique=True)
  api_key = Column('apikey', Unicode(255, collation='utf8_unicode_ci'), index=True)
  gpg_key = Column('gpg_key', UnicodeText(collation='utf8_unicode_ci'))
  activated = Column('activated', DateTime)
  activation_sent = Column('activation_sent', DateTime)
  activation_str = Column('activation_str', Unicode(255, collation='utf8_unicode_ci'))
  dbcode = Column('code', Integer, default=0, nullable=False)
  __bit_code = None

  group_id = Column('group_id', BigInteger, ForeignKey('groups.group_id', onupdate='restrict', ondelete='restrict'), index=True)
  group = relationship(Group, lazy='joined')
  plain_password = None
  notifications = Column('notifications', Boolean, default=True, nullable=False)

  @property
  def can_access(self):
    return (not self.permissions.disabled) and self.is_activated

  @property
  def is_activated(self):
    return self.activated

  @property
  def permissions(self):
    """
    Property for the bit_value
    """
    if self.__bit_code is None:
      if self.dbcode is None:
        self.__bit_code = UserRights('0', self)
      else:
        self.__bit_code = UserRights(self.dbcode, self)
    return self.__bit_code

  @property
  def display_name(self):
    return '{0} {1}'.format(self.sirname, self.name)

  def validate(self):
    """
    Checks if the attributes of the class are valid

    :returns: Boolean
    """
    # TODO: Edit cannot change username!
    # TODO: Verify validation of User Object
    if not (self.password or self.username):
      return False
    ObjectValidator.validateAlNum(self,
                                  'username',
                                  minLength=3,
                                  maxLength=254)
    # Don't update if the password is already a hash
    if not (self.password == 'EXTERNALAUTH') and re.match('^[0-9a-f]{40}$',
                                                          self.password) is None:
      ObjectValidator.validateRegex(self,
                                    'password',
                                    r'(?=^.{8,}$)(?=.*[a-z])(?=.*[A-Z])(?=.*[0-9])(?=.*[\W_])(?=^.*[^\s].*$).*$',
                                    'Password has to be set and contain Upper and Lower cases, symbols and numbers and have at least a length of 8'
                                    )

    ObjectValidator.validateEmailAddress(self, 'email')
    ObjectValidator.validateAlNum(self, 'name', minLength=3, withSymbols=True)
    ObjectValidator.validateAlNum(self, 'sirname', minLength=3, withSymbols=True)

    # if self.gpg_key:
    #  ObjectValidator.validateRegex(self,
    #                                'gpg_key',
    #                                '-----BEGIN PGP PUBLIC KEY BLOCK-----(.*?)-----END PGP PUBLIC KEY BLOCK-----',
    #                                'GPG Key not under the right format')
    if self.last_login is not None:
      ObjectValidator.validateDateTime(self, 'last_login')
    return ObjectValidator.isObjectValid(self)

  def to_dict(self, complete=True, inflated=False):
    if self.group:
      group_id = self.group.uuid
    else:
      group_id = None
    if complete:
      return {'identifier': self.convert_value(self.uuid),
              'name': self.convert_value(self.name),
              'activated': self.convert_value(self.activated),
              'activation_send': self.convert_value(self.activation_sent),
              'activation_str': self.convert_value(self.activation_str),
              'api_key': self.convert_value(self.api_key),
              'dbcode': self.convert_value(self.dbcode),
              'email': self.convert_value(self.email),
              'gpg_key': self.convert_value(self.gpg_key),
              'group_id': self.convert_value(group_id),
              'permissions': self.permissions.to_dict(),
              # TODO: add group to dict fct
              'group': None,
              'last_login': self.convert_value(self.last_login),
              'password': self.convert_value(self.password),
              'sirname': self.convert_value(self.sirname),
              'username': self.convert_value(self.username),
              'notifications': self.convert_value(self.notifications)
              }
    else:
      return {'identifier': self.uuid,
              'username': self.username
              }

  def populate(self, json):
    self.name = json.get('name', None)
    self.email = json.get('email', None)
    self.gpg_key = json.get('gpg_key', None)
    self.api_key = json.get('api_key', None)
    self.sirname = json.get('sirname', None)
    self.username = json.get('username', None)
    self.plain_password = json.get('password', None)
    self.notifications = json.get('notifications', None)
    # permissions setting
    self.permissions.populate(json.get('permissions', {}))
示例#14
0
class Order(Base):
    __tablename__ = 'orders'
    id = Column(Integer(), primary_key=True)
    product_id = Column(Integer(), ForeignKey('products.id'))
    user_id = Column(Integer(), ForeignKey('users.id'))
示例#15
0
class Participation(Base):
    """Class to store a single participation of a user in a contest.

    """
    __tablename__ = 'participations'

    # Auto increment primary key.
    id = Column(Integer, primary_key=True)

    # The user can log in CWS only from this IP address or subnet.
    ip = Column(CastingArray(CIDR), nullable=True)

    # Starting time: for contests where every user has at most x hours
    # of the y > x hours totally available, this is the time the user
    # decided to start their time-frame.
    starting_time = Column(DateTime, nullable=True)

    # A shift in the time interval during which the user is allowed to
    # submit.
    delay_time = Column(Interval,
                        CheckConstraint("delay_time >= '0 seconds'"),
                        nullable=False,
                        default=timedelta())

    # An extra amount of time allocated for this user.
    extra_time = Column(Interval,
                        CheckConstraint("extra_time >= '0 seconds'"),
                        nullable=False,
                        default=timedelta())

    # Contest-specific password. If this password is not null then the
    # traditional user.password field will be "replaced" by this field's
    # value (only for this participation).
    password = Column(Unicode, nullable=True)

    # A hidden participation (e.g. does not appear in public rankings), can
    # also be used for debugging purposes.
    hidden = Column(Boolean, nullable=False, default=False)

    # An unrestricted participation (e.g. contest time,
    # maximum number of submissions, minimum interval between submissions,
    # maximum number of user tests, minimum interval between user tests),
    # can also be used for debugging purposes.
    unrestricted = Column(Boolean, nullable=False, default=False)

    # Contest (id and object) to which the user is participating.
    contest_id = Column(Integer,
                        ForeignKey(Contest.id,
                                   onupdate="CASCADE",
                                   ondelete="CASCADE"),
                        nullable=False,
                        index=True)
    contest = relationship(Contest,
                           backref=backref("participations",
                                           cascade="all, delete-orphan",
                                           passive_deletes=True))

    # User (id and object) which is participating.
    user_id = Column(Integer,
                     ForeignKey(User.id,
                                onupdate="CASCADE",
                                ondelete="CASCADE"),
                     nullable=False,
                     index=True)
    user = relationship(User,
                        backref=backref("participations",
                                        cascade="all, delete-orphan",
                                        passive_deletes=True))
    __table_args__ = (UniqueConstraint('contest_id', 'user_id'), )

    # Team (id and object) that the user is representing with this
    # participation.
    team_id = Column(Integer,
                     ForeignKey(Team.id,
                                onupdate="CASCADE",
                                ondelete="RESTRICT"),
                     nullable=True)
    team = relationship(Team,
                        backref=backref("participations",
                                        cascade="all, delete-orphan",
                                        passive_deletes=True))
示例#16
0
class Task(Base):
    __tablename__ = "task"
    """
    A job that gets executed.  Has a unique set of params within its Stage.
    """
    # FIXME causes a problem with mysql?
    __table_args__ = (UniqueConstraint("stage_id", "uid", name="_uc1"), )
    drm_options = {}

    id = Column(Integer, primary_key=True)
    uid = Column(String(255), index=True)

    mem_req = Column(Integer)
    core_req = Column(Integer)
    cpu_req = synonym("core_req")
    time_req = Column(Integer)
    gpu_req = Column(Integer)
    NOOP = Column(Boolean, nullable=False)
    params = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False)
    stage_id = Column(ForeignKey("stage.id", ondelete="CASCADE"),
                      nullable=False,
                      index=True)
    log_dir = Column(String(255))
    # output_dir = Column(String(255))
    _status = Column(
        Enum_ColumnType(TaskStatus, length=255),
        default=TaskStatus.no_attempt,
        nullable=False,
    )
    status_reason = Column(String(255), nullable=True)
    successful = Column(Boolean, nullable=False)
    started_on = Column(
        DateTime
    )  # FIXME this should probably be deleted.  Too hard to determine.
    submitted_on = Column(DateTime)
    finished_on = Column(DateTime)
    attempt = Column(Integer, nullable=False)
    must_succeed = Column(Boolean, nullable=False)
    drm = Column(String(255))
    # FIXME consider making job_class a proper field next time the schema changes
    # job_class = Column(String(255))
    queue = Column(String(255))
    max_attempts = Column(Integer)
    parents = relationship(
        "Task",
        secondary=TaskEdge.__table__,
        primaryjoin=id == TaskEdge.parent_id,
        secondaryjoin=id == TaskEdge.child_id,
        backref="children",
        passive_deletes=True,
        cascade="save-update, merge, delete",
    )

    # input_map = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False)
    # output_map = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False)

    @property
    def input_map(self):
        d = dict()
        for key, val in list(self.params.items()):
            if key.startswith("in_"):
                d[key] = val
        return d

    @property
    def output_map(self):
        d = dict()
        for key, val in list(self.params.items()):
            if key.startswith("out_"):
                d[key] = val
        return d

    @property
    def input_files(self):
        return list(self.input_map.values())

    @property
    def output_files(self):
        return list(self.output_map.values())

    # command = Column(Text)

    drm_native_specification = Column(String(255))
    drm_jobID = Column(String(255))

    profile_fields = [
        "wall_time",
        "cpu_time",
        "percent_cpu",
        "user_time",
        "system_time",
        "io_read_count",
        "io_write_count",
        "io_read_kb",
        "io_write_kb",
        "ctx_switch_voluntary",
        "ctx_switch_involuntary",
        "avg_rss_mem_kb",
        "max_rss_mem_kb",
        "avg_vms_mem_kb",
        "max_vms_mem_kb",
        "avg_num_threads",
        "max_num_threads",
        "avg_num_fds",
        "max_num_fds",
        "exit_status",
    ]
    exclude_from_dict = profile_fields + [
        "command",
        "info",
        "input_files",
        "output_files",
    ]

    exit_status = Column(Integer)

    percent_cpu = Column(Integer)
    # time in seconds
    wall_time = Column(Integer)

    cpu_time = Column(Integer)
    user_time = Column(Integer)
    system_time = Column(Integer)

    avg_rss_mem_kb = Column(Integer)
    max_rss_mem_kb = Column(Integer)
    avg_vms_mem_kb = Column(Integer)
    max_vms_mem_kb = Column(Integer)

    io_read_count = Column(Integer)
    io_write_count = Column(Integer)
    io_wait = Column(Integer)
    io_read_kb = Column(Integer)
    io_write_kb = Column(Integer)

    ctx_switch_voluntary = Column(Integer)
    ctx_switch_involuntary = Column(Integer)

    avg_num_threads = Column(Integer)
    max_num_threads = Column(Integer)

    avg_num_fds = Column(Integer)
    max_num_fds = Column(Integer)

    extra = Column(MutableDict.as_mutable(JSONEncodedDict), nullable=False)

    @declared_attr
    def status(cls):
        def get_status(self):
            return self._status

        def set_status(self, value):
            if self._status != value:
                self._status = value
                signal_task_status_change.send(self)

        return synonym("_status", descriptor=property(get_status, set_status))

    @property
    def workflow(self):
        return self.stage.workflow

    @property
    def log(self):
        return self.workflow.log

    @property
    def finished(self):
        return self.status in {
            TaskStatus.successful,
            TaskStatus.killed,
            TaskStatus.failed,
        }

    _cache_profile = None

    output_profile_path = logplus("profile.json")
    output_command_script_path = logplus("command.bash")
    output_stderr_path = logplus("stderr.txt")
    output_stdout_path = logplus("stdout.txt")

    @property
    def stdout_text(self):
        return readfile(self.output_stdout_path)

    @property
    def stdout_text_brief(self):
        lines = self.stdout_text.split("\n")
        if len(lines) <= 50:
            return "\n".join(lines)
        else:
            return "*** TRUNCATED (showing last 50 lines)... \n" + "\n".join(
                lines[-50:])

    @property
    def stderr_text(self):
        r = readfile(self.output_stderr_path)
        if r == "file does not exist":
            if self.drm == "lsf" and self.drm_jobID:
                r += "\n\nbpeek %s output:\n\n" % self.drm_jobID
                try:
                    r += codecs.decode(
                        sp.check_output("bpeek %s" % self.drm_jobID,
                                        shell=True),
                        "utf-8",
                    )
                except Exception as e:
                    r += str(e)
        return r

    @property
    def stderr_text_brief(self):
        lines = self.stderr_text.split("\n")
        if len(lines) <= 50:
            return "\n".join(lines)
        else:
            return "*** TRUNCATED (showing last 50 lines)... \n" + "\n".join(
                lines[-50:])

    @property
    def command_script_text(self):
        # return self.command
        return readfile(
            self.output_command_script_path).strip() or self.command

    def descendants(self, include_self=False):
        """
        :return: (list) all stages that descend from this stage in the stage_graph
        """
        x = nx.descendants(self.workflow.task_graph(), self)
        if include_self:
            return sorted({self}.union(x), key=lambda task: task.stage.number)
        else:
            return x

    def ancestors(self, include_self=False):
        x = nx.ancestors(self.workflow.task_graph(), self)
        if include_self:
            return sorted({self}.union(x), key=lambda task: task.stage.number)
        else:
            return x

    @property
    def label(self):
        """Label used for the taskgraph image"""
        params = ("" if len(self.params) == 0 else "\\n {0}".format("\\n".join(
            ["{0}: {1}".format(k, v) for k, v in list(self.params.items())])))

        return "[%s] %s%s" % (self.id, self.stage.name, params)

    def args_as_query_string(self):
        import urllib.request, urllib.parse, urllib.error

        return urllib.parse.urlencode(self.params)

    def delete(self, descendants=False):
        if descendants:
            tasks_to_delete = self.descendants(include_self=True)
            self.log.debug("Deleting %s and %s of its descendants" %
                           (self, len(tasks_to_delete) - 1))
            for t in tasks_to_delete:
                self.session.delete(t)
        else:
            self.log.debug("Deleting %s" % self)
            self.session.delete(self)

        self.session.commit()

    @property
    def url(self):
        return url_for(
            "cosmos.task",
            ex_name=self.workflow.name,
            stage_name=self.stage.name,
            task_id=self.id,
        )

    @property
    def params_pretty(self):
        return "%s" % ", ".join("%s=%s" %
                                (k, "'%s'" % v if isinstance(v, str) else v)
                                for k, v in list(self.params.items()))

    @property
    def params_pformat(self):
        return pprint.pformat(self.params, indent=2, width=1)

    def __repr__(self):
        return "<Task[%s] %s(uid='%s')>" % (
            self.id or "id_%s" % id(self),
            self.stage.name if self.stage else "",
            self.uid,
        )

    def __str__(self):
        return self.__repr__()

    # FIXME consider making job_class a proper field next time the schema changes
    def __init__(self, **kwargs):
        self.job_class = kwargs.pop("job_class", None)
        _declarative_constructor(self, **kwargs)

    @reconstructor
    def init_on_load(self):
        self.job_class = None
示例#17
0
class MITSTimes(MagModel):
    team_id = Column(ForeignKey('mits_team.id'))
    availability = Column(MultiChoice(c.MITS_SCHEDULE_OPTS))
    multiple_tables = Column(MultiChoice(c.MITS_SCHEDULE_OPTS))
示例#18
0
from sqlalchemy.orm import relation
from dateutil.parser import parse as dateutil_parse

from flexget import db_schema, plugin
from flexget.utils.soup import get_soup
from flexget.event import event
from flexget.utils import requests
from flexget.utils.database import year_property

log = logging.getLogger('api_bluray')
Base = db_schema.versioned_base('api_bluray', 0)

# association tables
genres_table = Table(
    'bluray_movie_genres', Base.metadata,
    Column('movie_id', Integer, ForeignKey('bluray_movies.id')),
    Column('genre_name', Integer, ForeignKey('bluray_genres.name')))
Base.register_table(genres_table)

BASE_URL = 'http://m.blu-ray.com/'


def bluray_request(endpoint, **params):
    full_url = BASE_URL + endpoint
    return requests.get(full_url, params=params).json()


def extract_release_date(release_date):
    if not release_date or release_date.lower() == 'no release date':
        release_date = 'Dec 31, %s' % datetime.now().year
    return dateutil_parse(release_date).date()
示例#19
0
class Run(db.Model):

    """ A run is a generic grouping object for background operations
    that perform logging to the frontend. """

    __tablename__ = 'run'

    # Status values
    STATUS_RUNNING = 'running'
    STATUS_COMPLETE = 'complete'
    STATUS_FAILED = 'failed'
    STATUS_REMOVED = 'removed'

    # Operation values for database, two operations possible
    OPERATION_SAMPLE = 'sample'
    OPERATION_IMPORT = 'import'

    id = Column(Integer, primary_key=True)
    operation = Column(Unicode(2000))
    status = Column(Unicode(2000))
    time_start = Column(DateTime, default=datetime.utcnow)
    time_end = Column(DateTime)
    dataset_id = Column(Integer, ForeignKey('dataset.id'), nullable=True)
    source_id = Column(Integer, ForeignKey('source.id'), nullable=True)

    dataset = relationship(Dataset,
                           backref=backref('runs',
                                           order_by='Run.time_start.desc()',
                                           lazy='dynamic'))
    source = relationship(Source,
                          backref=backref('runs',
                                          order_by='Run.time_start.desc()',
                                          lazy='dynamic'))

    def __init__(self, operation, dataset, source, status=None):
        self.operation = operation
        if not status:
            self.status = self.STATUS_RUNNING
        else:
            self.status = status
        self.dataset = dataset
        self.source = source


    def records_as_json(self):
        returnset = []
        for record in self.records:
            returnset.append(record.as_json())
        return returnset


    @property
    def successful_sample(self):
        """
        Returns True if the run was a sample operation (not full import)
        and ran without failures.
        """
        return self.operation == self.OPERATION_SAMPLE and \
            self.status == self.STATUS_COMPLETE

    @property
    def successful_load(self):
        """
        Returns True if the run was an import operation (not a sample)
        and ran without failures.
        """
        return self.operation == self.OPERATION_IMPORT and \
            self.status == self.STATUS_COMPLETE

    @property
    def is_running(self):
        """
        Returns True if the run is currently running
        """
        return self.status == self.STATUS_RUNNING

    @classmethod
    def by_id(cls, id):
        return db.session.query(cls).filter_by(id=id).first()

    def __repr__(self):
        if not self.source:
            return "<OldRun(%r)>" % (self.id)
        else:
            return "<Run(%r, %r)>" % (self.source.id, self.id)
示例#20
0
from uber.models.attendee import Attendee
from uber.models.types import default_relationship as relationship, Choice, DefaultColumn as Column


__all__ = [
    'dept_membership_dept_role', 'job_required_role', 'Department',
    'DeptChecklistItem', 'DeptMembership', 'DeptMembershipRequest',
    'DeptRole', 'Job', 'Shift']


# Many to many association table to represent the DeptRoles fulfilled
# by a DeptMembership
dept_membership_dept_role = Table(
    'dept_membership_dept_role',
    MagModel.metadata,
    Column('dept_membership_id', UUID, ForeignKey('dept_membership.id')),
    Column('dept_role_id', UUID, ForeignKey('dept_role.id')),
    UniqueConstraint('dept_membership_id', 'dept_role_id'),
    Index('ix_dept_membership_dept_role_dept_role_id', 'dept_role_id'),
    Index('ix_dept_membership_dept_role_dept_membership_id', 'dept_membership_id'),
)


# Many to many association table to represent the DeptRoles required
# to fulfill a Job
job_required_role = Table(
    'job_required_role',
    MagModel.metadata,
    Column('dept_role_id', UUID, ForeignKey('dept_role.id')),
    Column('job_id', UUID, ForeignKey('job.id')),
    UniqueConstraint('dept_role_id', 'job_id'),
示例#21
0
文件: app.py 项目: rajzone/pybossa
class App(db.Model, DomainObject):
    '''A microtasking Project to which Tasks are associated.
    '''

    __tablename__ = 'app'

    #: ID of the project
    id = Column(Integer, primary_key=True)
    #: UTC timestamp when the project is created
    created = Column(Text, default=make_timestamp)
    #: Project name
    name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project slug for the URL
    short_name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project description
    description = Column(Unicode(length=255), nullable=False)
    #: Project long description
    long_description = Column(UnicodeText)
    #: If the project allows anonymous contributions
    allow_anonymous_contributors = Column(Boolean, default=True)
    long_tasks = Column(Integer, default=0)
    #: If the project is hidden
    hidden = Column(Integer, default=0)
    # If the project is featured
    featured = Column(Boolean, nullable=False, default=False)
    #: Project owner_id
    owner_id = Column(Integer, ForeignKey('user.id'), nullable=False)
    time_estimate = Column(Integer, default=0)
    time_limit = Column(Integer, default=0)
    calibration_frac = Column(Float, default=0)
    bolt_course_id = Column(Integer, default=0)
    #: Project Category
    category_id = Column(Integer, ForeignKey('category.id'), nullable=False)
    #: Project info field formatted as JSON
    info = Column(JSONEncodedDict, default=dict)

    tasks = relationship(Task,
                         cascade='all, delete, delete-orphan',
                         backref='app')
    task_runs = relationship(TaskRun,
                             backref='app',
                             cascade='all, delete-orphan',
                             order_by='TaskRun.finish_time.desc()')
    category = relationship(Category)
    blogposts = relationship(Blogpost,
                             cascade='all, delete-orphan',
                             backref='app')

    def needs_password(self):
        return self.get_passwd_hash() is not None

    def get_passwd_hash(self):
        return self.info.get('passwd_hash')

    def get_passwd(self):
        if self.needs_password():
            return signer.loads(self.get_passwd_hash())
        return None

    def set_password(self, password):
        if len(password) > 1:
            self.info['passwd_hash'] = signer.dumps(password)
            return True
        self.info['passwd_hash'] = None
        return False

    def check_password(self, password):
        if self.needs_password():
            return self.get_passwd() == password
        return False
示例#22
0
class Department(MagModel):
    name = Column(UnicodeText, unique=True)
    description = Column(UnicodeText)
    solicits_volunteers = Column(Boolean, default=True)
    is_shiftless = Column(Boolean, default=False)
    parent_id = Column(UUID, ForeignKey('department.id'), nullable=True)
    is_setup_approval_exempt = Column(Boolean, default=False)
    is_teardown_approval_exempt = Column(Boolean, default=False)
    max_consecutive_hours = Column(Integer, default=0)

    jobs = relationship('Job', backref='department')

    dept_checklist_items = relationship('DeptChecklistItem', backref='department')
    dept_roles = relationship('DeptRole', backref='department')
    dept_heads = relationship(
        'Attendee',
        backref=backref('headed_depts', order_by='Department.name'),
        cascade='save-update,merge,refresh-expire,expunge',
        primaryjoin='and_('
                    'Department.id == DeptMembership.department_id, '
                    'DeptMembership.is_dept_head == True)',
        secondary='dept_membership',
        order_by='Attendee.full_name',
        viewonly=True)
    checklist_admins = relationship(
        'Attendee',
        backref=backref('checklist_admin_depts', order_by='Department.name'),
        cascade='save-update,merge,refresh-expire,expunge',
        primaryjoin='and_('
                    'Department.id == DeptMembership.department_id, '
                    'DeptMembership.is_checklist_admin == True)',
        secondary='dept_membership',
        order_by='Attendee.full_name',
        viewonly=True)
    members_with_inherent_role = relationship(
        'Attendee',
        backref=backref('depts_with_inherent_role', order_by='Department.name'),
        cascade='save-update,merge,refresh-expire,expunge',
        primaryjoin='and_('
                    'Department.id == DeptMembership.department_id, '
                    'DeptMembership.has_inherent_role)',
        secondary='dept_membership',
        order_by='Attendee.full_name',
        viewonly=True)
    members_who_can_admin_checklist = relationship(
        'Attendee',
        backref=backref('can_admin_checklist_depts', order_by='Department.name'),
        cascade='save-update,merge,refresh-expire,expunge',
        primaryjoin='and_('
                    'Department.id == DeptMembership.department_id, '
                    'or_('
                    'DeptMembership.is_checklist_admin == True, '
                    'DeptMembership.is_dept_head == True))',
        secondary='dept_membership',
        order_by='Attendee.full_name',
        viewonly=True)
    pocs = relationship(
        'Attendee',
        backref=backref('poc_depts', order_by='Department.name'),
        cascade='save-update,merge,refresh-expire,expunge',
        primaryjoin='and_('
                    'Department.id == DeptMembership.department_id, '
                    'DeptMembership.is_poc == True)',
        secondary='dept_membership',
        order_by='Attendee.full_name',
        viewonly=True)
    members = relationship(
        'Attendee',
        backref=backref('assigned_depts', order_by='Department.name'),
        cascade='save-update,merge,refresh-expire,expunge',
        order_by='Attendee.full_name',
        secondary='dept_membership')
    memberships = relationship('DeptMembership', backref='department')
    membership_requests = relationship('DeptMembershipRequest', backref='department')
    explicitly_requesting_attendees = relationship(
        'Attendee',
        backref=backref('explicitly_requested_depts', order_by='Department.name'),
        cascade='save-update,merge,refresh-expire,expunge',
        secondary='dept_membership_request',
        order_by='Attendee.full_name')
    requesting_attendees = relationship(
        'Attendee',
        backref=backref('requested_depts', order_by='Department.name'),
        cascade='save-update,merge,refresh-expire,expunge',
        primaryjoin='or_('
                    'DeptMembershipRequest.department_id == Department.id, '
                    'DeptMembershipRequest.department_id == None)',
        secondary='dept_membership_request',
        order_by='Attendee.full_name',
        viewonly=True)
    unassigned_requesting_attendees = relationship(
        'Attendee',
        cascade='save-update,merge,refresh-expire,expunge',
        primaryjoin='and_(or_('
                    'DeptMembershipRequest.department_id == Department.id, '
                    'DeptMembershipRequest.department_id == None), '
                    'not_(exists().where(and_('
                    'DeptMembership.department_id == Department.id, '
                    'DeptMembership.attendee_id == DeptMembershipRequest.attendee_id))))',
        secondary='dept_membership_request',
        order_by='Attendee.full_name',
        viewonly=True)
    unassigned_explicitly_requesting_attendees = relationship(
        'Attendee',
        cascade='save-update,merge,refresh-expire,expunge',
        primaryjoin='and_('
                    'DeptMembershipRequest.department_id == Department.id, '
                    'not_(exists().where(and_('
                    'DeptMembership.department_id == Department.id, '
                    'DeptMembership.attendee_id == DeptMembershipRequest.attendee_id))))',
        secondary='dept_membership_request',
        order_by='Attendee.full_name',
        viewonly=True)
    parent = relationship(
        'Department',
        backref=backref('sub_depts', order_by='Department.name', cascade='all,delete-orphan'),
        cascade='save-update,merge,refresh-expire,expunge',
        remote_side='Department.id',
        single_parent=True)

    @hybrid_property
    def member_count(self):
        return len(self.memberships)

    @member_count.expression
    def member_count(cls):
        return func.count(cls.memberships)

    @property
    def member_emails(self):
        return [a.email for a in self.members if a.email]

    @property
    def members_with_shifts_emails(self):
        return [a.email for a in self.members if a.weighted_hours_in(self) > 0]

    @classmethod
    def to_id(cls, department):
        if not department:
            return None

        if isinstance(department, six.string_types):
            try:
                department = int(department)
            except ValueError:
                return department

        if isinstance(department, int):
            # This is the same algorithm used by the migration script to
            # convert c.JOB_LOCATIONS into department ids in the database.
            prefix = '{:07x}'.format(department)
            return prefix + str(uuid.uuid5(cls.NAMESPACE, str(department)))[7:]

        return department.id

    def checklist_item_for_slug(self, slug):
        for item in self.dept_checklist_items:
            if item.slug == slug:
                return item
        return None

    @hybrid_property
    def normalized_name(self):
        return self.normalize_name(self.name)

    @normalized_name.expression
    def normalized_name(cls):
        return func.replace(func.replace(func.lower(cls.name), '_', ''), ' ', '')

    @classmethod
    def normalize_name(cls, name):
        return name.lower().replace('_', '').replace(' ', '')

    @property
    def dept_roles_by_id(self):
        return groupify(self.dept_roles, 'id')

    @property
    def dept_roles_by_name(self):
        return groupify(self.dept_roles, 'name')
示例#23
0
#-*- coding: utf-8 -*-
#
# Created on Jan 30, 2013
#
# @author: Younes JAAIDI
#
# $Id: dbd99bd7ae1a783e4e1a4a74b0d4960bca5516a9 $
#

from modsecurity_exception_factory.modsecurity_audit_data_source.sql_base import \
    SQLBase
from modsecurity_exception_factory.modsecurity_audit_data_source.sql_filter_condition import \
    SQLFilterCondition
from sqlalchemy import Column, Integer
from sqlalchemy.orm import relationship
from sqlalchemy.schema import Table, ForeignKey

SQLFilterFilterVariableAssociationTable = Table(
    'filter_filter_condition_association', SQLBase.metadata,
    Column('filter_id', Integer, ForeignKey('filter.id')),
    Column('filter_condition_id', Integer, ForeignKey('filter_condition.id')))


class SQLFilter(SQLBase):
    __tablename__ = 'filter'

    id = Column(Integer, primary_key=True)
    conditionList = relationship(
        SQLFilterCondition, secondary=SQLFilterFilterVariableAssociationTable)
示例#24
0
class Job(MagModel):
    _ONLY_MEMBERS = 0
    _ALL_VOLUNTEERS = 2
    _VISIBILITY_OPTS = [
        (_ONLY_MEMBERS, 'Members of this department'),
        (_ALL_VOLUNTEERS, 'All volunteers')]

    type = Column(Choice(c.JOB_TYPE_OPTS), default=c.REGULAR)
    name = Column(UnicodeText)
    description = Column(UnicodeText)
    start_time = Column(UTCDateTime)
    duration = Column(Integer)
    weight = Column(Float, default=1)
    slots = Column(Integer)
    extra15 = Column(Boolean, default=False)
    department_id = Column(UUID, ForeignKey('department.id'))
    visibility = Column(Choice(_VISIBILITY_OPTS), default=_ONLY_MEMBERS)

    required_roles = relationship(
        'DeptRole', backref='jobs', cascade='save-update,merge,refresh-expire,expunge', secondary='job_required_role')
    shifts = relationship('Shift', backref='job')

    __table_args__ = (
        Index('ix_job_department_id', department_id),
    )

    _repr_attr_names = ['name']

    @classproperty
    def _extra_apply_attrs(cls):
        return set(['required_roles_ids']).union(cls._extra_apply_attrs_restricted)

    @hybrid_property
    def department_name(self):
        return self.department.name

    @department_name.expression
    def department_name(cls):
        return select([Department.name]).where(Department.id == cls.department_id).label('department_name')

    @hybrid_property
    def max_consecutive_hours(self):
        return self.department.max_consecutive_hours

    @max_consecutive_hours.expression
    def max_consecutive_hours(cls):
        return select([Department.max_consecutive_hours]) \
            .where(Department.id == cls.department_id).label('max_consecutive_hours')

    @hybrid_property
    def restricted(self):
        return bool(self.required_roles)

    @restricted.expression
    def restricted(cls):
        return exists([job_required_role.c.dept_role_id]) \
            .where(job_required_role.c.job_id == cls.id).label('restricted')

    @property
    def required_roles_labels(self):
        return readable_join([r.name for r in self.required_roles])

    @property
    def required_roles_ids(self):
        _, ids = self._get_relation_ids('required_roles')
        return [str(d.id) for d in self.required_roles] if ids is None else ids

    @required_roles_ids.setter
    def required_roles_ids(self, value):
        self._set_relation_ids('required_roles', DeptRole, value)

    @property
    def hours(self):
        hours = set()
        for i in range(self.duration):
            hours.add(self.start_time + timedelta(hours=i))
        return hours

    @property
    def end_time(self):
        return self.start_time + timedelta(hours=self.duration)

    def working_limit_ok(self, attendee):
        """
        Prevent signing up for too many shifts in a row. `hours_worked` is the
        number of hours that the attendee is working immediately before plus
        immediately after this job, plus this job's hours. `working_hour_limit`
        is the *min* of Department.max_consecutive_hours for all the jobs we've
        seen (including self). This means that if dept A has a limit of 3 hours,
        and dept B has a limit of 2 hours, (for one-hour shifts), if we try to
        sign up for the shift order of [A, A, B], B's limits will kick in and
        block the signup.
        """

        attendee_hour_map = attendee.hour_map
        hours_worked = self.duration
        working_hour_limit = self.max_consecutive_hours
        if working_hour_limit == 0:
            working_hour_limit = 1000  # just default to something large

        # count the number of filled hours before this shift
        current_shift_hour = self.start_time - timedelta(hours=1)
        while current_shift_hour in attendee_hour_map:
            hours_worked += 1
            this_job_hour_limit = attendee_hour_map[current_shift_hour].max_consecutive_hours
            if this_job_hour_limit > 0:
                working_hour_limit = min(working_hour_limit, this_job_hour_limit)
            current_shift_hour = current_shift_hour - timedelta(hours=1)

        # count the number of filled hours after this shift
        current_shift_hour = self.start_time + timedelta(hours=self.duration)
        while current_shift_hour in attendee_hour_map:
            hours_worked += 1
            this_job_hour_limit = attendee_hour_map[current_shift_hour].max_consecutive_hours
            if this_job_hour_limit > 0:
                working_hour_limit = min(working_hour_limit, this_job_hour_limit)
            current_shift_hour = current_shift_hour + timedelta(hours=1)

        return hours_worked <= working_hour_limit

    def no_overlap(self, attendee):
        before = self.start_time - timedelta(hours=1)
        after = self.start_time + timedelta(hours=self.duration)
        return not self.hours.intersection(attendee.hours) and (
            before not in attendee.hour_map
            or not attendee.hour_map[before].extra15
            or self.department_id == attendee.hour_map[before].department_id
        ) and (
            after not in attendee.hour_map
            or not self.extra15
            or self.department_id == attendee.hour_map[after].department_id
        ) and self.working_limit_ok(attendee)

    @hybrid_property
    def slots_taken(self):
        return len(self.shifts)

    @slots_taken.expression
    def slots_taken(cls):
        return select([func.count(Shift.id)]).where(Shift.job_id == cls.id).label('slots_taken')

    @hybrid_property
    def is_public(self):
        return self.visibility > Job._ONLY_MEMBERS

    @is_public.expression
    def is_public(cls):
        return cls.visibility > Job._ONLY_MEMBERS

    @hybrid_property
    def is_unfilled(self):
        return self.slots_taken < self.slots

    @is_unfilled.expression
    def is_unfilled(cls):
        return cls.slots_taken < cls.slots

    @property
    def slots_untaken(self):
        return max(0, self.slots - self.slots_taken)

    @property
    def is_setup(self):
        return self.start_time < c.EPOCH

    @property
    def is_teardown(self):
        return self.start_time >= c.ESCHATON

    @property
    def real_duration(self):
        return self.duration + (0.25 if self.extra15 else 0)

    @property
    def weighted_hours(self):
        return self.weight * self.real_duration

    @property
    def total_hours(self):
        return self.weighted_hours * self.slots

    def _potential_volunteers(self, staffing_only=False, order_by=Attendee.full_name):
        """
        Return a list of attendees who:

            1. Are assigned to this job's department.
            2. Are allowed to work this job (job has no required roles
               or the attendee's department membership fulfills all the
               required roles).

        Args:
            staffing_only: Restrict result to attendees where staffing==True.
            order_by: Order by another Attendee attribute.
        """
        query = self.session.query(Attendee)

        if staffing_only:
            query = query.filter(Attendee.staffing == True)  # noqa: E712

        if self.required_roles:
            query = query.join(Attendee.dept_roles, aliased=True).filter(
                and_(*[DeptRole.id == r.id for r in self.required_roles]))
        else:
            query = query.join(Attendee.dept_memberships, aliased=True).filter(
                DeptMembership.department_id == self.department_id)

        return query.order_by(order_by).all()

    @property
    def capable_volunteers_opts(self):
        """
        Format output for use with the {{ options() }} template decorator .
        """
        return [(a.id, a.full_name) for a in self.capable_volunteers]

    @property
    def capable_volunteers(self):
        """
        Return a list of volunteers who could sign up for this job.

        Important: Just because a volunteer is capable of working
        this job doesn't mean they are actually available to work it.
        They may have other shift hours during that time period.
        """
        return self._potential_volunteers(staffing_only=True)

    @cached_property
    def available_volunteers(self):
        """
        Returns a list of volunteers who are allowed to sign up for
        this Job and have the free time to work it.
        """
        return [s for s in self._potential_volunteers(order_by=Attendee.last_first) if self.no_overlap(s)]
示例#25
0
    def delete(self, endpoint, **params):
        return self._request('delete', endpoint, **params)


@db_schema.upgrade('api_tvdb')
def upgrade(ver, session):
    if ver is None or ver <= 6:
        raise db_schema.UpgradeImpossible
    return ver


# association tables
genres_table = Table(
    'tvdb_series_genres', Base.metadata,
    Column('series_id', Integer, ForeignKey('tvdb_series.id')),
    Column('genre_id', Integer, ForeignKey('tvdb_genres.id')))
Base.register_table(genres_table)


class TVDBSeries(Base):
    __tablename__ = "tvdb_series"

    id = Column(Integer, primary_key=True, autoincrement=False)
    last_updated = Column(Integer)
    expired = Column(Boolean)
    name = Column(Unicode)
    language = Column(Unicode)
    rating = Column(Float)
    status = Column(Unicode)
    runtime = Column(Integer)
示例#26
0
from sqlalchemy import Column, Integer, String, Sequence, Table, DateTime
from sqlalchemy.orm import relationship
from sqlalchemy.schema import ForeignKey
from datetime import datetime

from webid.sniffer import Base
import logging
logger = logging.getLogger(name=__name__)

person_devices = Table('person_devices', Base.metadata,
                       Column('person_id', Integer, ForeignKey('person.id')),
                       Column('device_id', Integer, ForeignKey('device.id')))


class Person(Base):
    __tablename__ = 'person'
    id = Column(Integer, Sequence('person_id_seq'), primary_key=True)
    uri = Column(String(255), unique=True)
    devices = relationship('Device',
                           secondary=person_devices,
                           cascade="all, delete",
                           backref='owners')

    def __init__(self, URI):
        self.uri = URI


class Device(Base):
    __tablename__ = 'device'
    id = Column(Integer, Sequence('device_id_seq'), primary_key=True)
    uri = Column(String(255), unique=True)
示例#27
0
文件: Task.py 项目: Yixf-Self/COSMOS2
class Task(Base):
    __tablename__ = 'task'
    """
    A job that gets executed.  Has a unique set of params within its Stage.
    """
    # FIXME causes a problem with mysql?
    __table_args__ = (UniqueConstraint('stage_id', 'uid', name='_uc1'), )

    id = Column(Integer, primary_key=True)
    uid = Column(String(255), index=True)

    mem_req = Column(Integer, default=None)
    core_req = Column(Integer, default=1)
    cpu_req = synonym('core_req')
    time_req = Column(Integer)
    NOOP = Column(Boolean, default=False, nullable=False)
    params = Column(MutableDict.as_mutable(JSONEncodedDict),
                    nullable=False,
                    server_default='{}')
    stage_id = Column(ForeignKey('stage.id', ondelete="CASCADE"),
                      nullable=False,
                      index=True)
    log_dir = Column(String(255))
    # output_dir = Column(String(255))
    _status = Column(Enum34_ColumnType(TaskStatus),
                     default=TaskStatus.no_attempt)
    successful = Column(Boolean, default=False, nullable=False)
    started_on = Column(
        DateTime
    )  # FIXME this should probably be deleted.  Too hard to determine.
    submitted_on = Column(DateTime)
    finished_on = Column(DateTime)
    attempt = Column(Integer, default=1)
    must_succeed = Column(Boolean, default=True)
    drm = Column(String(255), nullable=False)
    parents = relationship(
        "Task",
        secondary=TaskEdge.__table__,
        primaryjoin=id == TaskEdge.parent_id,
        secondaryjoin=id == TaskEdge.child_id,
        backref="children",
        passive_deletes=True,
        cascade="save-update, merge, delete",
    )
    # input_files = Column(MutableList.as_mutable(ListOfStrings))
    # output_files = Column(MutableList.as_mutable(ListOfStrings))

    input_map = Column(MutableDict.as_mutable(JSONEncodedDict),
                       nullable=False,
                       server_default='{}')
    output_map = Column(MutableDict.as_mutable(JSONEncodedDict),
                        nullable=False,
                        server_default='{}')

    @property
    def input_files(self):
        return self.input_map.values()

    @property
    def output_files(self):
        return self.output_map.values()

    # command = Column(Text)

    drm_native_specification = Column(String(255))
    drm_jobID = Column(Integer)

    profile_fields = [
        'wall_time', 'cpu_time', 'percent_cpu', 'user_time', 'system_time',
        'io_read_count', 'io_write_count', 'io_read_kb', 'io_write_kb',
        'ctx_switch_voluntary', 'ctx_switch_involuntary', 'avg_rss_mem_kb',
        'max_rss_mem_kb', 'avg_vms_mem_kb', 'max_vms_mem_kb',
        'avg_num_threads', 'max_num_threads', 'avg_num_fds', 'max_num_fds',
        'exit_status'
    ]
    exclude_from_dict = profile_fields + [
        'command', 'info', 'input_files', 'output_files'
    ]

    exit_status = Column(Integer)

    percent_cpu = Column(Integer)
    wall_time = Column(Integer)

    cpu_time = Column(Integer)
    user_time = Column(Integer)
    system_time = Column(Integer)

    avg_rss_mem_kb = Column(Integer)
    max_rss_mem_kb = Column(Integer)
    avg_vms_mem_kb = Column(Integer)
    max_vms_mem_kb = Column(Integer)

    io_read_count = Column(Integer)
    io_write_count = Column(Integer)
    io_wait = Column(Integer)
    io_read_kb = Column(Integer)
    io_write_kb = Column(Integer)

    ctx_switch_voluntary = Column(Integer)
    ctx_switch_involuntary = Column(Integer)

    avg_num_threads = Column(Integer)
    max_num_threads = Column(Integer)

    avg_num_fds = Column(Integer)
    max_num_fds = Column(Integer)

    extra = Column(MutableDict.as_mutable(JSONEncodedDict),
                   nullable=False,
                   server_default='{}')

    @declared_attr
    def status(cls):
        def get_status(self):
            return self._status

        def set_status(self, value):
            if self._status != value:
                self._status = value
                signal_task_status_change.send(self)

        return synonym('_status', descriptor=property(get_status, set_status))

    @property
    def workflow(self):
        return self.stage.workflow

    @property
    def log(self):
        return self.workflow.log

    @property
    def finished(self):
        return self.status in [TaskStatus.successful, TaskStatus.failed]

    _cache_profile = None

    output_profile_path = logplus('profile.json')
    output_command_script_path = logplus('command.bash')
    output_stderr_path = logplus('stderr.txt')
    output_stdout_path = logplus('stdout.txt')

    @property
    def stdout_text(self):
        return readfile(self.output_stdout_path).strip()

    @property
    def stderr_text(self):
        r = readfile(self.output_stderr_path).strip()
        if r == 'file does not exist':
            if self.drm == 'lsf' and self.drm_jobID:
                r += '\n\nbpeek %s output:\n\n' % self.drm_jobID
                try:
                    r += codecs.decode(
                        sp.check_output('bpeek %s' % self.drm_jobID,
                                        shell=True), 'utf-8')
                except Exception as e:
                    r += str(e)
        return r

    @property
    def command_script_text(self):
        # return self.command
        return readfile(
            self.output_command_script_path).strip() or self.command

    def all_predecessors(self, as_dict=False):
        """
        :return: (list) all tasks that descend from this task in the task_graph
        """
        d = breadth_first_search.bfs_predecessors(
            self.workflow.task_graph().reverse(copy=False), self)
        if as_dict:
            return d
        return set(d.values())

    def all_successors(self):
        """
        :return: (list) all tasks that descend from this task in the task_graph
        """
        return set(
            breadth_first_search.bfs_successors(self.workflow.task_graph(),
                                                self).values())

    @property
    def label(self):
        """Label used for the taskgraph image"""
        params = '' if len(self.params) == 0 else "\\n {0}".format("\\n".join(
            ["{0}: {1}".format(k, v) for k, v in self.params.items()]))

        return "[%s] %s%s" % (self.id, self.stage.name, params)

    def args_as_query_string(self):
        import urllib

        return urllib.urlencode(self.params)

    def delete(self, delete_files=False):
        self.log.debug('Deleting %s' % self)
        if delete_files:
            for tf in self.output_files:
                os.unlink(tf)
            if os.path.exists(self.log_dir):
                shutil.rmtree(self.log_dir)

        self.session.delete(self)
        self.session.commit()

    @property
    def url(self):
        return url_for('cosmos.task',
                       ex_name=self.workflow.name,
                       stage_name=self.stage.name,
                       task_id=self.id)

    @property
    def params_pretty(self):
        return '%s' % ', '.join(
            '%s=%s' % (k, "'%s'" % v if isinstance(v, basestring) else v)
            for k, v in self.params.items())

    @property
    def params_pformat(self):
        return pprint.pformat(self.params, indent=2, width=1)

    def __repr__(self):
        return "<Task[%s] %s(uid='%s')>" % (self.id or 'id_%s' % id(self),
                                            self.stage.name
                                            if self.stage else '', self.uid)

    def __str__(self):
        return self.__repr__()
示例#28
0
class Source(Base):
    __tablename__ = 'source'

    # Table columns
    id = Column(Integer, primary_key=True)
    x_image = Column(Float, nullable=False)
    y_image = Column(Float, nullable=False)
    x_hsc = Column(Float, nullable=False)
    y_hsc = Column(Float, nullable=False)
    ra = Column(Float, nullable=False)
    dec = Column(Float, nullable=False)

    a_image = Column(Float, nullable=True)
    b_image = Column(Float, nullable=True)
    theta_image = Column(Float, nullable=True)
    ellipticity = Column(Float, nullable=True)
    kron_radius = Column(Float, nullable=True)
    petro_radius = Column(Float, nullable=True)
    flags = Column(Integer, nullable=False)

    mag_auto_g = Column(Float, nullable=True)
    mag_auto_r = Column(Float, nullable=True)
    mag_auto_i = Column(Float, nullable=True)
    magerr_auto_g = Column(Float, nullable=True)
    magerr_auto_r = Column(Float, nullable=True)
    magerr_auto_i = Column(Float, nullable=True)

    mag_petro_g = Column(Float, nullable=True)
    mag_petro_r = Column(Float, nullable=True)
    mag_petro_i = Column(Float, nullable=True)
    magerr_petro_g = Column(Float, nullable=True)
    magerr_petro_r = Column(Float, nullable=True)
    magerr_petro_i = Column(Float, nullable=True)

    mag_ap0_g = Column(Float, nullable=True)
    mag_ap1_g = Column(Float, nullable=True)
    mag_ap2_g = Column(Float, nullable=True)
    mag_ap3_g = Column(Float, nullable=True)
    mag_ap4_g = Column(Float, nullable=True)
    mag_ap5_g = Column(Float, nullable=True)
    mag_ap6_g = Column(Float, nullable=True)
    mag_ap7_g = Column(Float, nullable=True)
    mag_ap8_g = Column(Float, nullable=True)
    mag_ap9_g = Column(Float, nullable=True)
    magerr_ap0_g = Column(Float, nullable=True)
    magerr_ap1_g = Column(Float, nullable=True)
    magerr_ap2_g = Column(Float, nullable=True)
    magerr_ap3_g = Column(Float, nullable=True)
    magerr_ap4_g = Column(Float, nullable=True)
    magerr_ap5_g = Column(Float, nullable=True)
    magerr_ap6_g = Column(Float, nullable=True)
    magerr_ap7_g = Column(Float, nullable=True)
    magerr_ap8_g = Column(Float, nullable=True)
    magerr_ap9_g = Column(Float, nullable=True)

    mag_ap0_r = Column(Float, nullable=True)
    mag_ap1_r = Column(Float, nullable=True)
    mag_ap2_r = Column(Float, nullable=True)
    mag_ap3_r = Column(Float, nullable=True)
    mag_ap4_r = Column(Float, nullable=True)
    mag_ap5_r = Column(Float, nullable=True)
    mag_ap6_r = Column(Float, nullable=True)
    mag_ap7_r = Column(Float, nullable=True)
    mag_ap8_r = Column(Float, nullable=True)
    mag_ap9_r = Column(Float, nullable=True)
    magerr_ap0_r = Column(Float, nullable=True)
    magerr_ap1_r = Column(Float, nullable=True)
    magerr_ap2_r = Column(Float, nullable=True)
    magerr_ap3_r = Column(Float, nullable=True)
    magerr_ap4_r = Column(Float, nullable=True)
    magerr_ap5_r = Column(Float, nullable=True)
    magerr_ap6_r = Column(Float, nullable=True)
    magerr_ap7_r = Column(Float, nullable=True)
    magerr_ap8_r = Column(Float, nullable=True)
    magerr_ap9_r = Column(Float, nullable=True)

    mag_ap0_i = Column(Float, nullable=True)
    mag_ap1_i = Column(Float, nullable=True)
    mag_ap2_i = Column(Float, nullable=True)
    mag_ap3_i = Column(Float, nullable=True)
    mag_ap4_i = Column(Float, nullable=True)
    mag_ap5_i = Column(Float, nullable=True)
    mag_ap6_i = Column(Float, nullable=True)
    mag_ap7_i = Column(Float, nullable=True)
    mag_ap8_i = Column(Float, nullable=True)
    mag_ap9_i = Column(Float, nullable=True)
    magerr_ap0_i = Column(Float, nullable=True)
    magerr_ap1_i = Column(Float, nullable=True)
    magerr_ap2_i = Column(Float, nullable=True)
    magerr_ap3_i = Column(Float, nullable=True)
    magerr_ap4_i = Column(Float, nullable=True)
    magerr_ap5_i = Column(Float, nullable=True)
    magerr_ap6_i = Column(Float, nullable=True)
    magerr_ap7_i = Column(Float, nullable=True)
    magerr_ap8_i = Column(Float, nullable=True)
    magerr_ap9_i = Column(Float, nullable=True)

    fwhm_g = Column(Float, nullable=True)
    fwhm_r = Column(Float, nullable=True)
    fwhm_i = Column(Float, nullable=True)
    flux_radius_g = Column(Float, nullable=True)
    flux_radius_r = Column(Float, nullable=True)
    flux_radius_i = Column(Float, nullable=True)

    ebv = Column(Float, nullable=True)
    A_g = Column(Float, nullable=True)
    A_r = Column(Float, nullable=True)
    A_i = Column(Float, nullable=True)

    # Relationships
    patch_id = Column(Integer, ForeignKey('patch.id'), nullable=False)
    patch = relationship('Patch')

    @property
    def skycoord(self):
        return SkyCoord(ra=self.ra, dec=self.dec, unit='deg')

    @property
    def hr_angle_string(self):
        return self.skycoord.to_string('hmsdms')
示例#29
0
def create_translation_table(_table_name,
                             foreign_class,
                             relation_name,
                             language_class,
                             relation_lazy='select',
                             **kwargs):
    """Creates a table that represents some kind of data attached to the given
    foreign class, but translated across several languages.  Returns the new
    table's mapped class.  It won't be declarative, but it will have a
    `__table__` attribute so you can retrieve the Table object.

    `foreign_class` must have a `__singlename__`, currently only used to create
    the name of the foreign key column.

    Also supports the notion of a default language, which is attached to the
    session.  This is English by default, for historical and practical reasons.

    Usage looks like this:

        class Foo(Base): ...

        create_translation_table('foo_bars', Foo, 'bars',
            name = Column(...),
        )

        # Now you can do the following:
        foo.name
        foo.name_map['en']
        foo.foo_bars['en']

        foo.name_map['en'] = "new name"
        del foo.name_map['en']

        q.options(joinedload(Foo.bars_local))
        q.options(joinedload(Foo.bars))

    The following properties are added to the passed class:

    - `(relation_name)`, a relation to the new table.  It uses a dict-based
      collection class, where the keys are language identifiers and the values
      are rows in the created tables.
    - `(relation_name)_local`, a relation to the row in the new table that
      matches the current default language.
    - `(relation_name)_table`, the class created by this function.

    Note that these are distinct relations.  Even though the former necessarily
    includes the latter, SQLAlchemy doesn't treat them as linked; loading one
    will not load the other.  Modifying both within the same transaction has
    undefined behavior.

    For each column provided, the following additional attributes are added to
    Foo:

    - `(column)_map`, an association proxy onto `foo_bars`.
    - `(column)`, an association proxy onto `foo_bars_local`.

    Pardon the naming disparity, but the grammar suffers otherwise.

    Modifying these directly is not likely to be a good idea.

    For Markdown-formatted columns, `(column)_map` and `(column)` will give
    Markdown objects.
    """
    # n.b.: language_class only exists for the sake of tests, which sometimes
    # want to create tables entirely separate from the pokedex metadata

    foreign_key_name = foreign_class.__singlename__ + '_id'

    Translations = type(
        _table_name, (object, ), {
            '_language_identifier':
            association_proxy('local_language', 'identifier'),
            'relation_name':
            relation_name,
        })

    # Create the table object
    table = Table(
        _table_name,
        foreign_class.__table__.metadata,
        Column(foreign_key_name,
               Integer,
               ForeignKey(foreign_class.id),
               primary_key=True,
               nullable=False,
               info=dict(description="ID of the %s these texts relate to" %
                         foreign_class.__singlename__)),
        Column('local_language_id',
               Integer,
               ForeignKey(language_class.id),
               primary_key=True,
               nullable=False,
               info=dict(description="Language these texts are in")),
    )
    Translations.__table__ = table

    # Add ye columns
    # Column objects have a _creation_order attribute in ascending order; use
    # this to get the (unordered) kwargs sorted correctly
    kwitems = kwargs.items()
    kwitems.sort(key=lambda kv: kv[1]._creation_order)
    for name, column in kwitems:
        column.name = name
        table.append_column(column)

    # Construct ye mapper
    mapper(Translations,
           table,
           properties={
               'foreign_id':
               synonym(foreign_key_name),
               'local_language':
               relationship(
                   language_class,
                   primaryjoin=table.c.local_language_id == language_class.id,
                   innerjoin=True),
           })

    # Add full-table relations to the original class
    # Foo.bars_table
    setattr(foreign_class, relation_name + '_table', Translations)
    # Foo.bars
    setattr(
        foreign_class, relation_name,
        relationship(
            Translations,
            primaryjoin=foreign_class.id == Translations.foreign_id,
            collection_class=attribute_mapped_collection('local_language'),
        ))
    # Foo.bars_local
    # This is a bit clever; it uses bindparam() to make the join clause
    # modifiable on the fly.  db sessions know the current language and
    # populate the bindparam.
    # The 'dummy' value is to trick SQLA; without it, SQLA thinks this
    # bindparam is just its own auto-generated clause and everything gets
    # f****d up.
    local_relation_name = relation_name + '_local'
    setattr(
        foreign_class,
        local_relation_name,
        relationship(
            Translations,
            primaryjoin=and_(
                Translations.foreign_id == foreign_class.id,
                Translations.local_language_id == bindparam(
                    '_default_language_id',
                    value='dummy',
                    type_=Integer,
                    required=True),
            ),
            foreign_keys=[
                Translations.foreign_id, Translations.local_language_id
            ],
            uselist=False,
            #innerjoin=True,
            lazy=relation_lazy,
        ))

    # Add per-column proxies to the original class
    for name, column in kwitems:
        getset_factory = None
        string_getter = column.info.get('string_getter')
        if string_getter:
            getset_factory = _getset_factory_factory(column.name,
                                                     string_getter)

        # Class.(column) -- accessor for the default language's value
        setattr(
            foreign_class, name,
            LocalAssociationProxy(local_relation_name,
                                  name,
                                  getset_factory=getset_factory))

        # Class.(column)_map -- accessor for the language dict
        # Need a custom creator since Translations doesn't have an init, and
        # these are passed as *args anyway
        def creator(language, value):
            row = Translations()
            row.local_language = language
            setattr(row, name, value)
            return row

        setattr(
            foreign_class, name + '_map',
            association_proxy(relation_name,
                              name,
                              creator=creator,
                              getset_factory=getset_factory))

    # Add to the list of translation classes
    foreign_class.translation_classes.append(Translations)

    # Done
    return Translations
示例#30
0
class Project(db.Model, DomainObject):
    '''A microtasking Project to which Tasks are associated.
    '''

    __tablename__ = 'project'

    #: ID of the project
    id = Column(Integer, primary_key=True)
    #: UTC timestamp when the project is created
    created = Column(Text, default=make_timestamp)
    #: UTC timestamp when the project is updated (or any of its relationships)
    updated = Column(Text, default=make_timestamp, onupdate=make_timestamp)
    #: Project name
    name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project slug for the URL
    short_name = Column(Unicode(length=255), unique=True, nullable=False)
    #: Project description
    description = Column(Unicode(length=255), nullable=False)
    #: Project long description
    long_description = Column(UnicodeText)
    #: Project webhook
    webhook = Column(Text)
    #: If the project allows anonymous contributions
    allow_anonymous_contributors = Column(Boolean, default=True)
    #: If the project is published
    published = Column(Boolean, nullable=False, default=False)
    # If the project is hidden
    hidden = Column(Boolean, default=False)
    # If the project is featured
    featured = Column(Boolean, nullable=False, default=False)
    # Secret key for project
    secret_key = Column(Text, default=make_uuid)
    # If the project owner has been emailed
    contacted = Column(Boolean, nullable=False, default=False)
    #: Project owner_id
    owner_id = Column(Integer, ForeignKey('user.id'), nullable=False)
    #: Project Category
    category_id = Column(Integer, ForeignKey('category.id'), nullable=False)
    #: Project info field formatted as JSON
    info = Column(MutableDict.as_mutable(JSON), default=dict())
    #: If emails are sent to users about new tasks
    email_notif = Column(Boolean, default=False)

    tasks = relationship(Task,
                         cascade='all, delete, delete-orphan',
                         backref='project')
    task_runs = relationship(TaskRun,
                             backref='project',
                             cascade='all, delete-orphan',
                             order_by='TaskRun.finish_time.desc()')
    category = relationship(Category)
    blogposts = relationship(Blogpost,
                             cascade='all, delete-orphan',
                             backref='project')
    coowners = relationship("User",
                            lazy='subquery',
                            single_parent=True,
                            secondary="project_coowner")

    def needs_password(self):
        return self.get_passwd_hash() is not None

    def get_passwd_hash(self):
        return self.info.get('passwd_hash')

    def get_passwd(self):
        if self.needs_password():
            return signer.loads(self.get_passwd_hash())
        return None

    def set_password(self, password):
        if len(password) > 1:
            self.info['passwd_hash'] = signer.dumps(password)
            return True
        self.info['passwd_hash'] = None
        return False

    def check_password(self, password):
        if self.needs_password():
            return self.get_passwd() == password
        return False

    def has_autoimporter(self):
        return self.get_autoimporter() is not None

    def get_autoimporter(self):
        return self.info.get('autoimporter')

    def set_autoimporter(self, new=None):
        self.info['autoimporter'] = new

    def delete_autoimporter(self):
        del self.info['autoimporter']

    def has_presenter(self):
        return self.info.get('task_presenter') not in ('', None)

    @classmethod
    def public_attributes(self):
        """Return a list of public attributes."""
        return [
            'id', 'description', 'info', 'n_tasks', 'n_volunteers', 'name',
            'overall_progress', 'short_name', 'created', 'description',
            'last_activity', 'last_activity_raw', 'overall_progress',
            'n_task_runs', 'n_results', 'owner', 'updated', 'featured',
            'owner_id', 'n_completed_tasks', 'n_blogposts'
        ]

    @classmethod
    def public_info_keys(self):
        """Return a list of public info keys."""
        default = [
            'container', 'thumbnail', 'thumbnail_url', 'task_presenter',
            'tutorial', 'sched'
        ]
        extra = current_app.config.get('PROJECT_INFO_PUBLIC_FIELDS')
        if extra:
            return list(set(default).union(set(extra)))
        else:
            return default

    def get_presenter_headers(self):
        headers = set()
        task_presenter = self.info.get('task_presenter')

        if not task_presenter:
            return headers

        search_backward_stop = 0
        for match in re.finditer('\.info\.([a-zA-Z0-9_]+)', task_presenter):
            linebreak_index = task_presenter.rfind('\n', search_backward_stop,
                                                   match.start())
            if linebreak_index > -1:
                search_start = linebreak_index
            else:
                search_start = search_backward_stop
            if task_presenter.rfind('//', search_start, match.start()) > -1:
                continue

            comment_start = task_presenter.rfind('/*', search_backward_stop,
                                                 match.start())
            if comment_start > -1:
                search_backward_stop = comment_start
                comment_end = 'task_presenter'.rfind('*/',
                                                     search_backward_stop,
                                                     match.start())
                if comment_end < 0:
                    continue
            headers.add(match.group(1))
            search_backward_stop = match.end()

        return headers