Beispiel #1
0
    def get_status(self):
        """Returns a dict with info about the current status of all
        workers.

        return (dict): dict of info: current operation, starting time,
            number of errors, and additional data specified in the
            operation.

        """
        result = dict()
        for shard in self._worker.keys():
            s_time = self._start_time[shard]
            s_time = make_timestamp(s_time) if s_time is not None else None
            s_data = self._side_data[shard]
            s_data = (s_data[0], make_timestamp(s_data[1])) \
                if s_data is not None else None

            result["%d" % shard] = {
                'connected': self._worker[shard].connected,
                'operation': (self._operation[shard]
                              if not isinstance(self._operation[shard],
                                                QueueItem)
                              else self._operation[shard].to_dict()),
                'start_time': s_time,
                'side_data': s_data}
        return result
Beispiel #2
0
    def send_token(self, submission):
        """Send the token for the given submission to all rankings.

        Put the submission and its token subchange in all the proxy
        queues for them to be sent to rankings.

        """
        # Data to send to remote rankings.
        submission_id = str(submission.id)
        submission_data = {
            "user": encode_id(submission.user.username),
            "task": encode_id(submission.task.name),
            "time": int(make_timestamp(submission.timestamp))}

        subchange_id = "%d%st" % (make_timestamp(submission.token.timestamp),
                                  submission_id)
        subchange_data = {
            "submission": submission_id,
            "time": int(make_timestamp(submission.token.timestamp)),
            "token": True}

        # Adding operations to the queue.
        for ranking in self.rankings:
            ranking.data_queue.put((ranking.SUBMISSION_TYPE,
                                    {submission_id: submission_data}))
            ranking.data_queue.put((ranking.SUBCHANGE_TYPE,
                                    {subchange_id: subchange_data}))

        self.tokens_sent_to_rankings.add(submission.id)
Beispiel #3
0
    def operations_for_score(self, submission):
        """Send the score for the given submission to all rankings.

        Put the submission and its score subchange in all the proxy
        queues for them to be sent to rankings.

        """
        submission_result = submission.get_result()

        # Data to send to remote rankings.
        submission_id = "%d" % submission.id
        submission_data = {
            "user": encode_id(submission.participation.user.username),
            "task": encode_id(submission.task.name),
            "time": int(make_timestamp(submission.timestamp))}

        subchange_id = "%d%ss" % (make_timestamp(submission.timestamp),
                                  submission_id)
        subchange_data = {
            "submission": submission_id,
            "time": int(make_timestamp(submission.timestamp))}

        # This check is probably useless.
        if submission_result is not None and submission_result.scored():
            # We're sending the unrounded score to RWS
            subchange_data["score"] = submission_result.score
            subchange_data["extra"] = submission_result.ranking_score_details

        self.scores_sent_to_rankings.add(submission.id)

        return [
            ProxyOperation(ProxyExecutor.SUBMISSION_TYPE,
                           {submission_id: submission_data}),
            ProxyOperation(ProxyExecutor.SUBCHANGE_TYPE,
                           {subchange_id: subchange_data})]
Beispiel #4
0
    def operations_for_token(self, submission):
        """Send the token for the given submission to all rankings.

        Put the submission and its token subchange in all the proxy
        queues for them to be sent to rankings.

        """
        # Data to send to remote rankings.
        submission_id = "%d" % submission.id
        submission_data = {
            "user": encode_id(submission.user.username),
            "task": encode_id(submission.task.name),
            "time": int(make_timestamp(submission.timestamp))}

        subchange_id = "%d%st" % (make_timestamp(submission.token.timestamp),
                                  submission_id)
        subchange_data = {
            "submission": submission_id,
            "time": int(make_timestamp(submission.token.timestamp)),
            "token": True}

        self.tokens_sent_to_rankings.add(submission.id)

        return [
            ProxyOperation(ProxyExecutor.SUBMISSION_TYPE,
                           {submission_id: submission_data}),
            ProxyOperation(ProxyExecutor.SUBCHANGE_TYPE,
                           {subchange_id: subchange_data})]
Beispiel #5
0
    def send_token(self, submission):
        """Send the token for the given submission to all rankings.

        Put the submission and its token subchange in all the proxy
        queues for them to be sent to rankings.

        """
        # Data to send to remote rankings.
        submission_id = str(submission.id)
        submission_data = {
            "user": encode_id(submission.user.username),
            "task": encode_id(submission.task.name),
            "time": int(make_timestamp(submission.timestamp))}

        subchange_id = "%d%st" % (make_timestamp(submission.token.timestamp),
                                  submission_id)
        subchange_data = {
            "submission": submission_id,
            "time": int(make_timestamp(submission.token.timestamp)),
            "token": True}

        # Adding operations to the queue.
        for ranking in self.rankings:
            ranking.data_queue.put((ranking.SUBMISSION_TYPE,
                                    {submission_id: submission_data}))
            ranking.data_queue.put((ranking.SUBCHANGE_TYPE,
                                    {subchange_id: subchange_data}))

        self.tokens_sent_to_rankings.add(submission.id)
Beispiel #6
0
    def operations_for_token(self, submission):
        """Send the token for the given submission to all rankings.

        Put the submission and its token subchange in all the proxy
        queues for them to be sent to rankings.

        """
        # Data to send to remote rankings.
        submission_id = "%d" % submission.id
        submission_data = {
            "user": encode_id(submission.participation.user.username),
            "task": encode_id(submission.task.name),
            "time": int(make_timestamp(submission.timestamp))}

        subchange_id = "%d%st" % (make_timestamp(submission.token.timestamp),
                                  submission_id)
        subchange_data = {
            "submission": submission_id,
            "time": int(make_timestamp(submission.token.timestamp)),
            "token": True}

        self.tokens_sent_to_rankings.add(submission.id)

        return [
            ProxyOperation(ProxyExecutor.SUBMISSION_TYPE,
                           {submission_id: submission_data}),
            ProxyOperation(ProxyExecutor.SUBCHANGE_TYPE,
                           {subchange_id: subchange_data})]
Beispiel #7
0
    def get(self):
        res = []
        last_notification = make_datetime(
            float(self.get_argument("last_notification", "0")))

        # Keep "== None" in filter arguments. SQLAlchemy does not
        # understand "is None".
        questions = self.sql_session.query(Question)\
            .filter(Question.reply_timestamp == None)\
            .filter(Question.question_timestamp > last_notification)\
            .all()  # noqa

        for question in questions:
            res.append({
                "type": "new_question",
                "timestamp": make_timestamp(question.question_timestamp),
                "subject": question.subject,
                "text": question.text,
                "contest_id": question.participation.contest_id
            })

        # Simple notifications
        for notification in self.application.service.notifications:
            res.append({"type": "notification",
                        "timestamp": make_timestamp(notification[0]),
                        "subject": notification[1],
                        "text": notification[2]})
        self.application.service.notifications = []

        self.write(json.dumps(res))
Beispiel #8
0
    def operations_for_score(self, submission):
        """Send the score for the given submission to all rankings.

        Put the submission and its score subchange in all the proxy
        queues for them to be sent to rankings.

        """
        submission_result = submission.get_result()

        # Data to send to remote rankings.
        submission_id = "%d" % submission.id
        submission_data = {
            "user": encode_id(submission.participation.user.username),
            "task": encode_id(submission.task.name),
            "time": int(make_timestamp(submission.timestamp))}

        subchange_id = "%d%ss" % (make_timestamp(submission.timestamp),
                                  submission_id)
        subchange_data = {
            "submission": submission_id,
            "time": int(make_timestamp(submission.timestamp))}

        # This check is probably useless.
        if submission_result is not None and submission_result.scored():
            # We're sending the unrounded score to RWS
            subchange_data["score"] = submission_result.score
            subchange_data["extra"] = submission_result.ranking_score_details

        self.scores_sent_to_rankings.add(submission.id)

        return [
            ProxyOperation(ProxyExecutor.SUBMISSION_TYPE,
                           {submission_id: submission_data}),
            ProxyOperation(ProxyExecutor.SUBCHANGE_TYPE,
                           {subchange_id: subchange_data})]
Beispiel #9
0
    def get_status(self):
        """Returns a dict with info about the current status of all
        workers.

        return (dict): dict of info: current operation, starting time,
            number of errors, and additional data specified in the
            operation.

        """
        result = dict()
        for shard in self._worker.keys():
            s_time = self._start_time[shard]
            s_time = make_timestamp(s_time) if s_time is not None else None
            s_data = self._side_data[shard]
            s_data = (s_data[0], make_timestamp(s_data[1])) \
                if s_data is not None else None

            result["%d" % shard] = {
                'connected':
                self._worker[shard].connected,
                'operation':
                (self._operation[shard]
                 if not isinstance(self._operation[shard], QueueItem) else
                 self._operation[shard].to_dict()),
                'start_time':
                s_time,
                'side_data':
                s_data
            }
        return result
Beispiel #10
0
    def initialize(self):
        """Send basic data to all the rankings.

        It's data that's supposed to be sent before the contest, that's
        needed to understand what we're talking about when we send
        submissions: contest, users, tasks.

        No support for teams, flags and faces.

        """
        logger.info("Initializing rankings.")

        with SessionGen() as session:
            contest = Contest.get_from_id(self.contest_id, session)

            if contest is None:
                logger.error(
                    "Received request for unexistent contest "
                    "id %s.", self.contest_id)
                raise KeyError("Contest not found.")

            contest_id = encode_id(contest.name)
            contest_data = {
                "name": contest.description,
                "begin": int(make_timestamp(contest.start)),
                "end": int(make_timestamp(contest.stop)),
                "score_precision": contest.score_precision
            }

            users = dict()

            for user in contest.users:
                if not user.hidden:
                    users[encode_id(user.username)] = \
                        {"f_name": user.first_name,
                         "l_name": user.last_name,
                         "team": None}

            tasks = dict()

            for task in contest.tasks:
                score_type = get_score_type(dataset=task.active_dataset)
                tasks[encode_id(task.name)] = \
                    {"short_name": task.name,
                     "name": task.title,
                     "contest": encode_id(contest.name),
                     "order": task.num,
                     "max_score": score_type.max_score,
                     "extra_headers": score_type.ranking_headers,
                     "score_precision": task.score_precision,
                     "score_mode": task.score_mode}

        self.enqueue(
            ProxyOperation(ProxyExecutor.CONTEST_TYPE,
                           {contest_id: contest_data}))
        self.enqueue(ProxyOperation(ProxyExecutor.USER_TYPE, users))
        self.enqueue(ProxyOperation(ProxyExecutor.TASK_TYPE, tasks))
    def initialize(self):
        """Send basic data to all the rankings.

        It's data that's supposed to be sent before the contest, that's
        needed to understand what we're talking about when we send
        submissions: contest, users, tasks.

        No support for teams, flags and faces.

        """
        logger.info("Initializing rankings.")

        with SessionGen() as session:
            contest = Contest.get_from_id(self.contest_id, session)

            if contest is None:
                logger.error("Received request for unexistent contest "
                             "id %s.", self.contest_id)
                raise KeyError("Contest not found.")

            contest_id = encode_id(contest.name)
            contest_data = {
                "name": contest.description,
                "begin": int(make_timestamp(contest.start)),
                "end": int(make_timestamp(contest.stop)),
                "score_precision": contest.score_precision}

            users = dict()

            for participation in contest.participations:
                user = participation.user
                if not participation.hidden:
                    users[encode_id(user.username)] = \
                        {"f_name": user.first_name,
                         "l_name": user.last_name,
                         "team": None}

            tasks = dict()

            for task in contest.tasks:
                score_type = get_score_type(dataset=task.active_dataset)
                tasks[encode_id(task.name)] = \
                    {"short_name": task.name,
                     "name": task.title,
                     "contest": encode_id(contest.name),
                     "order": task.num,
                     "max_score": score_type.max_score,
                     "extra_headers": score_type.ranking_headers,
                     "score_precision": task.score_precision,
                     "score_mode": task.score_mode}

        self.enqueue(ProxyOperation(ProxyExecutor.CONTEST_TYPE,
                                    {contest_id: contest_data}))
        self.enqueue(ProxyOperation(ProxyExecutor.USER_TYPE, users))
        self.enqueue(ProxyOperation(ProxyExecutor.TASK_TYPE, tasks))
Beispiel #12
0
    def _verify_cookie(self):
        """Check whether the cookie is valid, and if not clear it.

        """
        # Clearing an empty cookie marks it as modified and causes it
        # to be sent in the response. This check prevents it.
        if len(self._cookie) == 0:
            return

        admin_id = self._cookie.get("id", None)
        remote_addr = self._cookie.get("ip", None)
        timestamp = self._cookie.get("timestamp", None)

        if admin_id is None or remote_addr is None or timestamp is None:
            self.clear()
            return

        if not isinstance(admin_id, int) or not isinstance(timestamp, float):
            self.clear()
            return

        if remote_addr != self._request.remote_addr:
            self.clear()
            return

        if make_timestamp() - timestamp > config.admin_cookie_duration:
            self.clear()
            return
Beispiel #13
0
    def get(self):
        participation = self.current_user

        last_notification = self.get_argument("last_notification", None)
        if last_notification is not None:
            last_notification = make_datetime(float(last_notification))

        res = get_communications(self.sql_session,
                                 participation,
                                 self.timestamp,
                                 after=last_notification)

        # Simple notifications
        notifications = self.service.notifications
        username = participation.user.username
        if username in notifications:
            for notification in notifications[username]:
                res.append({
                    "type": "notification",
                    "timestamp": make_timestamp(notification[0]),
                    "subject": notification[1],
                    "text": notification[2],
                    "level": notification[3]
                })
            del notifications[username]

        self.write(json.dumps(res))
Beispiel #14
0
    def _authenticate(self, request):
        """Check if the cookie exists and is valid

        request (werkzeug.wrappers.Request): werkzeug request object.

        return (int|None): admin id in the cookie, if it is valid.

        """
        cookie = JSONSecureCookie.load_cookie(request,
                                              AWSAuthMiddleware.COOKIE,
                                              config.secret_key)

        admin_id = cookie.get("id", None)
        remote_addr = cookie.get("ip", None)
        timestamp = cookie.get("timestamp", None)
        if admin_id is None or remote_addr is None or timestamp is None:
            return None

        if remote_addr != request.remote_addr:
            return None

        if make_timestamp() - timestamp > config.admin_cookie_duration:
            return None

        return int(admin_id)
Beispiel #15
0
    def _verify_cookie(self):
        """Check whether the cookie is valid, and if not clear it.

        """
        # Clearing an empty cookie marks it as modified and causes it
        # to be sent in the response. This check prevents it.
        if len(self._cookie) == 0:
            return

        admin_id = self._cookie.get("id", None)
        remote_addr = self._cookie.get("ip", None)
        timestamp = self._cookie.get("timestamp", None)

        if admin_id is None or remote_addr is None or timestamp is None:
            self.clear()
            return

        if not isinstance(admin_id, int) or not isinstance(timestamp, float):
            self.clear()
            return

        if remote_addr != self._request.remote_addr:
            self.clear()
            return

        if make_timestamp() - timestamp > config.admin_cookie_duration:
            self.clear()
            return
Beispiel #16
0
    def post(self):
        username = self.get_argument("username", "")
        password = self.get_argument("password", "")
        next_page = self.get_argument("next", "/")
        user = self.sql_session.query(User)\
            .filter(User.username == username)\
            .first()
        participation = self.sql_session.query(Participation)\
            .filter(Participation.contest == self.contest)\
            .filter(Participation.user == user)\
            .first()

        if user is None:
            # TODO: notify the user that they don't exist
            self.redirect("/?login_error=true")
            return

        if participation is None:
            # TODO: notify the user that they're uninvited
            self.redirect("/?login_error=true")
            return

        # If a contest-specific password is defined, use that. If it's
        # not, use the user's main password.
        if participation.password is None:
            correct_password = user.password
        else:
            correct_password = participation.password

        filtered_user = filter_ascii(username)
        filtered_pass = filter_ascii(password)

        if password != correct_password:
            logger.info("Login error: user=%s pass=%s remote_ip=%s." %
                        (filtered_user, filtered_pass, self.request.remote_ip))
            self.redirect("/?login_error=true")
            return

        if self.contest.ip_restriction and participation.ip is not None \
                and not check_ip(self.request.remote_ip, participation.ip):
            logger.info("Unexpected IP: user=%s pass=%s remote_ip=%s.",
                        filtered_user, filtered_pass, self.request.remote_ip)
            self.redirect("/?login_error=true")
            return

        if participation.hidden and self.contest.block_hidden_participations:
            logger.info("Hidden user login attempt: "
                        "user=%s pass=%s remote_ip=%s.",
                        filtered_user, filtered_pass, self.request.remote_ip)
            self.redirect("/?login_error=true")
            return

        logger.info("User logged in: user=%s remote_ip=%s.",
                    filtered_user, self.request.remote_ip)
        self.set_secure_cookie("login",
                               pickle.dumps((user.username,
                                             correct_password,
                                             make_timestamp())),
                               expires_days=None)
        self.redirect(next_page)
Beispiel #17
0
    def get_status(self):
        """Returns a dict with info about the current status of all
        workers.

        return (dict): dict of info: current operation, starting time,
            number of errors, and additional data specified in the
            operation.

        """
        result = dict()
        for shard in self._worker.keys():
            s_time = self._start_time[shard]
            s_time = make_timestamp(s_time) if s_time is not None else None

            result["%d" % shard] = {
                'connected':
                self._worker[shard].connected,
                'operations':
                [operation.to_dict() for operation in self._operations[shard]]
                if isinstance(self._operations[shard],
                              list) else self._operations[shard],
                'start_time':
                s_time
            }
        return result
Beispiel #18
0
def encode_value(type_, value):
    """Encode a given value of a given type to a JSON-compatible form.

    type_ (sqlalchemy.types.TypeEngine): the SQLAlchemy type of the
        column that held the value.
    value (object): the value.

    return (object): the value, encoded as bool, int, float, string,
        list, dict or any other JSON-compatible format.

    """
    if value is None:
        return None
    elif isinstance(type_, (
            Boolean, Integer, Float, String, Unicode, Enum, JSONB, Codename,
            Filename, FilenameSchema, Digest)):
        return value
    elif isinstance(type_, DateTime):
        return make_timestamp(value)
    elif isinstance(type_, Interval):
        return value.total_seconds()
    elif isinstance(type_, (ARRAY, FilenameSchemaArray)):
        return list(encode_value(type_.item_type, item) for item in value)
    elif isinstance(type_, CIDR):
        return str(value)
    else:
        raise RuntimeError("Unknown SQLAlchemy column type: %s" % type_)
Beispiel #19
0
def encode_value(type_, value):
    """Encode a given value of a given type to a JSON-compatible form.

    type_ (sqlalchemy.types.TypeEngine): the SQLAlchemy type of the
        column that held the value.
    value (object): the value.

    return (object): the value, encoded as bool, int, float, string,
        list, dict or any other JSON-compatible format.

    """
    if value is None:
        return None
    elif isinstance(type_, (
            Boolean, Integer, Float, String, Unicode, Enum, JSONB)):
        return value
    elif isinstance(type_, DateTime):
        return make_timestamp(value)
    elif isinstance(type_, Interval):
        return value.total_seconds()
    elif isinstance(type_, ARRAY):
        return list(encode_value(type_.item_type, item) for item in value)
    elif isinstance(type_, CIDR):
        return str(value)
    else:
        raise RuntimeError("Unknown SQLAlchemy column type: %s" % type_)
Beispiel #20
0
    def _get_current_user_from_cookie(self):
        """Return the current participation based on the cookie.

        If a participation can be extracted, the cookie is refreshed.

        return (Participation|None): the participation extracted from
            the cookie, or None if not possible.

        """
        if self.get_secure_cookie("login") is None:
            return None

        # Parse cookie.
        try:
            cookie = pickle.loads(self.get_secure_cookie("login"))
            username = cookie[0]
            password = cookie[1]
            last_update = make_datetime(cookie[2])
        except:
            return None

        # Check if the cookie is expired.
        if self.timestamp - last_update > \
                timedelta(seconds=config.cookie_duration):
            return None

        # Load user from DB and make sure it exists.
        user = self.sql_session.query(User)\
            .filter(User.username == username)\
            .first()
        if user is None:
            return None

        # Load participation from DB and make sure it exists.
        participation = self.sql_session.query(Participation)\
            .filter(Participation.contest == self.contest)\
            .filter(Participation.user == user)\
            .first()
        if participation is None:
            return None

        # Check that the password is correct (if a contest-specific
        # password is defined, use that instead of the user password).
        if participation.password is None:
            correct_password = user.password
        else:
            correct_password = participation.password
        if password != correct_password:
            return None

        if self.refresh_cookie:
            self.set_secure_cookie("login",
                                   pickle.dumps((user.username,
                                                 password,
                                                 make_timestamp())),
                                   expires_days=None)

        return participation
 def get_user_info(self, user):
     info = dict()
     info['username'] = user.username
     info['access_level'] = user.access_level
     info['join_date'] = make_timestamp(user.registration_time)
     info['mail_hash'] = self.hash(user.email, 'md5')
     info['post_count'] = len(user.posts)
     info['score'] = user.score
     return info
Beispiel #22
0
 def add_announcement(self, subject, text, timestamp, contest=None):
     if contest is None:
         contest = self.contest
     super().add_announcement(
         subject=subject, text=text, timestamp=self.at(timestamp),
         contest=contest)
     d = {"type": "announcement", "subject": subject, "text": text,
          "timestamp": make_timestamp(self.timestamp) + timestamp}
     return d
Beispiel #23
0
 def add_message(self, subject, text, timestamp, participation=None):
     if participation is None:
         participation = self.participation
     super().add_message(
         subject=subject, text=text, timestamp=self.at(timestamp),
         participation=participation)
     d = {"type": "message", "subject": subject, "text": text,
          "timestamp": make_timestamp(self.timestamp) + timestamp}
     return d
Beispiel #24
0
 def set_answer(self, q, subject, text, timestamp):
     q.reply_subject = subject
     q.reply_text = text
     q.reply_timestamp = self.at(timestamp)
     # If subject and/or text are None, "shift the rest up".
     gen = (s for s in [subject, text, "", ""] if s is not None)
     subject, text = next(gen), next(gen)
     d = {"type": "question", "subject": subject, "text": text,
          "timestamp": make_timestamp(self.timestamp) + timestamp}
     return d
Beispiel #25
0
    def _get_cookie(self, admin_id, remote_addr):
        """Return the cookie for the given admin.

        admin_id (int): id to save in the cookie.
        remote_addr (unicode): ip of the host making the request.

        return (bytes): secure cookie for the given admin id and the
            current time.

        """
        data = {"id": admin_id, "ip": remote_addr, "timestamp": make_timestamp()}
        return JSONSecureCookie(data, config.secret_key).serialize()
Beispiel #26
0
 def get_user_info(self, user):
     info = dict()
     info['username'] = user.username
     info['access_level'] = user.social_user.access_level
     info['join_date'] = make_timestamp(user.social_user.registration_time)
     info['mail_hash'] = self.hash(user.email, 'md5')
     #info['post_count'] = len(user.posts)
     info['score'] = user.social_user.score
     info['institute'] = self.get_institute_info(user.social_user.institute_id)
     info['first_name'] = user.first_name
     info['last_name'] = user.last_name
     info['tasks_solved'] = -1
     return info
Beispiel #27
0
 def get_user_info(self, user):
     info = dict()
     info["username"] = user.username
     info["access_level"] = user.access_level
     info["join_date"] = make_timestamp(user.registration_time)
     info["mail_hash"] = self.hash(user.email, "md5")
     info["post_count"] = len(user.posts)
     info["score"] = user.score
     info["institute"] = self.get_institute_info(user.institute)
     info["first_name"] = user.first_name
     info["last_name"] = user.last_name
     info["tasks_solved"] = -1
     return info
Beispiel #28
0
    def get_status(self):
        """Return the content of the queue. Note that the order may be not
        correct, but the first element is the one at the top.

        return ([QueueEntry]): a list of entries containing the
            representation of the item, the priority and the
            timestamp.

        """
        return [{'item': entry.item.to_dict(),
                 'priority': entry.priority,
                 'timestamp': make_timestamp(entry.timestamp)}
                for entry in self._queue]
Beispiel #29
0
 def add_message(self, subject, text, timestamp, participation=None):
     if participation is None:
         participation = self.participation
     super().add_message(subject=subject,
                         text=text,
                         timestamp=self.at(timestamp),
                         participation=participation)
     d = {
         "type": "message",
         "subject": subject,
         "text": text,
         "timestamp": make_timestamp(self.timestamp) + timestamp
     }
     return d
Beispiel #30
0
 def set_answer(self, q, subject, text, timestamp):
     q.reply_subject = subject
     q.reply_text = text
     q.reply_timestamp = self.at(timestamp)
     # If subject and/or text are None, "shift the rest up".
     gen = (s for s in [subject, text, "", ""] if s is not None)
     subject, text = next(gen), next(gen)
     d = {
         "type": "question",
         "subject": subject,
         "text": text,
         "timestamp": make_timestamp(self.timestamp) + timestamp
     }
     return d
Beispiel #31
0
 def add_announcement(self, subject, text, timestamp, contest=None):
     if contest is None:
         contest = self.contest
     super().add_announcement(subject=subject,
                              text=text,
                              timestamp=self.at(timestamp),
                              contest=contest)
     d = {
         "type": "announcement",
         "subject": subject,
         "text": text,
         "timestamp": make_timestamp(self.timestamp) + timestamp
     }
     return d
Beispiel #32
0
    def send_score(self, submission):
        """Send the score for the given submission to all rankings.

        Put the submission and its score subchange in all the proxy
        queues for them to be sent to rankings.

        """
        submission_result = submission.get_result()

        # Data to send to remote rankings.
        submission_id = str(submission.id)
        submission_data = {
            "user": encode_id(submission.user.username),
            "task": encode_id(submission.task.name),
            "time": int(make_timestamp(submission.timestamp))}

        subchange_id = "%d%ss" % (make_timestamp(submission.timestamp),
                                  submission_id)
        subchange_data = {
            "submission": submission_id,
            "time": int(make_timestamp(submission.timestamp))}

        # XXX This check is probably useless.
        if submission_result is not None and submission_result.scored():
            # We're sending the unrounded score to RWS
            subchange_data["score"] = submission_result.score
            subchange_data["extra"] = \
                json.loads(submission_result.ranking_score_details)

        # Adding operations to the queue.
        for ranking in self.rankings:
            ranking.data_queue.put((ranking.SUBMISSION_TYPE,
                                    {submission_id: submission_data}))
            ranking.data_queue.put((ranking.SUBCHANGE_TYPE,
                                    {subchange_id: subchange_data}))

        self.scores_sent_to_rankings.add(submission.id)
Beispiel #33
0
    def send_score(self, submission):
        """Send the score for the given submission to all rankings.

        Put the submission and its score subchange in all the proxy
        queues for them to be sent to rankings.

        """
        submission_result = submission.get_result()

        # Data to send to remote rankings.
        submission_id = str(submission.id)
        submission_data = {
            "user": encode_id(submission.user.username),
            "task": encode_id(submission.task.name),
            "time": int(make_timestamp(submission.timestamp))}

        subchange_id = "%d%ss" % (make_timestamp(submission.timestamp),
                                  submission_id)
        subchange_data = {
            "submission": submission_id,
            "time": int(make_timestamp(submission.timestamp))}

        # XXX This check is probably useless.
        if submission_result is not None and submission_result.scored():
            # We're sending the unrounded score to RWS
            subchange_data["score"] = submission_result.score
            subchange_data["extra"] = \
                json.loads(submission_result.ranking_score_details)

        # Adding operations to the queue.
        for ranking in self.rankings:
            ranking.data_queue.put((ranking.SUBMISSION_TYPE,
                                    {submission_id: submission_data}))
            ranking.data_queue.put((ranking.SUBCHANGE_TYPE,
                                    {subchange_id: subchange_data}))

        self.scores_sent_to_rankings.add(submission.id)
Beispiel #34
0
    def _get_cookie(self, admin_id, remote_addr):
        """Return the cookie for the given admin.

        admin_id (int): id to save in the cookie.
        remote_addr (unicode): ip of the host making the request.

        return (bytes): secure cookie for the given admin id and the
            current time.

        """
        data = {
            "id": admin_id,
            "ip": remote_addr,
            "timestamp": make_timestamp(),
        }
        return JSONSecureCookie(data, config.secret_key).serialize()
Beispiel #35
0
 def talk_handler(self):
     if local.data["action"] == "list":
         query = (
             local.session.query(Talk)
             .filter(or_(Talk.sender_id == local.user.id, Talk.receiver_id == local.user.id))
             .filter(Talk.pms.any())
             .order_by(desc(Talk.timestamp))
         )
         talks, local.resp["num"] = self.sliced_query(query)
         local.resp["talks"] = list()
         for t in talks:
             talk = dict()
             talk["sender"] = self.get_user_info(t.sender)
             talk["receiver"] = self.get_user_info(t.receiver)
             talk["id"] = t.id
             talk["timestamp"] = make_timestamp(t.timestamp)
             talk["read"] = t.read
             if len(t.pms) > 0:
                 talk["last_pm_sender"] = t.pms[0].sender.username
                 txt = t.pms[0].text
                 if len(txt) > 100:
                     txt = txt[:97] + "..."
                 talk["last_pm_text"] = txt
             local.resp["talks"].append(talk)
     elif local.data["action"] == "get":
         if local.user is None:
             return "Unauthorized"
         other = local.session.query(User).filter(User.username == local.data["other"]).first()
         talk = (
             local.session.query(Talk)
             .filter(
                 or_(
                     and_(Talk.sender_id == local.user.id, Talk.receiver_id == other.id),
                     and_(Talk.sender_id == other.id, Talk.receiver_id == local.user.id),
                 )
             )
             .first()
         )
         if talk is None:
             talk = Talk(timestamp=make_datetime())
             talk.sender = local.user
             talk.receiver = other
             local.session.add(talk)
             local.session.commit()
         local.resp["id"] = talk.id
     else:
         return "Bad request"
Beispiel #36
0
 def talk_handler(self):
     if local.data['action'] == 'list':
         query = local.session.query(Talk)\
             .filter(or_(
                 Talk.sender_id == local.user.id,
                 Talk.receiver_id == local.user.id))\
             .filter(Talk.pms.any())\
             .order_by(desc(Talk.timestamp))
         talks, local.resp['num'] = self.sliced_query(query)
         local.resp['talks'] = list()
         for t in talks:
             talk = dict()
             talk['sender'] = self.get_user_info(t.sender)
             talk['receiver'] = self.get_user_info(t.receiver)
             talk['id'] = t.id
             talk['timestamp'] = make_timestamp(t.timestamp)
             talk['read'] = t.read
             if len(t.pms) > 0:
                 talk['last_pm_sender'] = t.pms[0].sender.username
                 txt = t.pms[0].text
                 if len(txt) > 100:
                     txt = txt[:97] + '...'
                 talk['last_pm_text'] = txt
             local.resp['talks'].append(talk)
     elif local.data['action'] == 'get':
         if local.user is None:
             return 'Unauthorized'
         other = local.session.query(User)\
             .filter(User.username == local.data['other']).first()
         talk = local.session.query(Talk)\
             .filter(or_(
                 and_(
                     Talk.sender_id == local.user.id,
                     Talk.receiver_id == other.id),
                 and_(
                     Talk.sender_id == other.id,
                     Talk.receiver_id == local.user.id))).first()
         if talk is None:
             talk = Talk(timestamp=make_datetime())
             talk.sender = local.user
             talk.receiver = other
             local.session.add(talk)
             local.session.commit()
         local.resp['id'] = talk.id
     else:
         return 'Bad request'
Beispiel #37
0
 def pm_handler(self):
     if local.data['action'] == 'list':
         query = local.session.query(PrivateMessage)\
             .filter(PrivateMessage.talk_id == local.data['id'])\
             .order_by(desc(PrivateMessage.timestamp))
         pms, local.resp['num'] = self.sliced_query(query)
         talk = local.session.query(Talk)\
             .filter(Talk.id == local.data['id']).first()
         if talk is None:
             return 'Invalid talk'
         if local.user not in (talk.sender, talk.receiver):
             return 'Unauthorized'
         if local.data['first'] == 0 and len(talk.pms) and \
            local.user != talk.pms[0].sender:
             talk.read = True
             local.session.commit()
         local.resp['sender'] = talk.sender.username
         local.resp['receiver'] = talk.receiver.username
         local.resp['pms'] = list()
         for p in reversed(pms):
             pm = dict()
             pm['timestamp'] = make_timestamp(p.timestamp)
             pm['sender'] = self.get_user_info(p.sender)
             pm['text'] = p.text
             local.resp['pms'].append(pm)
     elif local.data['action'] == 'new':
         if local.user is None:
             return 'Unauthorized'
         talk = local.session.query(Talk)\
             .filter(Talk.id == local.data['id']).first()
         if talk is None:
             return 'Not found'
         if 'text' not in local.data or len(local.data['text']) < 1:
             return 'You must enter some text'
         pm = PrivateMessage(text=local.data['text'],
                             timestamp=make_datetime())
         pm.sender_id = local.user.id
         pm.talk = talk
         if talk.sender_id != pm.sender_id:
             talk.sender, talk.receiver = talk.receiver, talk.sender
         talk.timestamp = pm.timestamp
         talk.read = False
         local.session.add(pm)
         local.session.commit()
     else:
         return 'Bad request'
Beispiel #38
0
 def pm_handler(self):
     if local.data["action"] == "list":
         query = (
             local.session.query(PrivateMessage)
             .filter(PrivateMessage.talk_id == local.data["id"])
             .order_by(desc(PrivateMessage.timestamp))
         )
         pms, local.resp["num"] = self.sliced_query(query)
         talk = local.session.query(Talk).filter(Talk.id == local.data["id"]).first()
         if talk is None:
             return "Invalid talk"
         if local.user not in (talk.sender, talk.receiver):
             return "Unauthorized"
         if local.data["first"] == 0 and len(talk.pms) and local.user != talk.pms[0].sender:
             talk.read = True
             local.session.commit()
         local.resp["sender"] = talk.sender.username
         local.resp["receiver"] = talk.receiver.username
         local.resp["pms"] = list()
         for p in reversed(pms):
             pm = dict()
             pm["timestamp"] = make_timestamp(p.timestamp)
             pm["sender"] = self.get_user_info(p.sender)
             pm["text"] = p.text
             local.resp["pms"].append(pm)
     elif local.data["action"] == "new":
         return "Not anymore"
         if local.user is None:
             return "Unauthorized"
         talk = local.session.query(Talk).filter(Talk.id == local.data["id"]).first()
         if talk is None:
             return "Not found"
         if "text" not in local.data or len(local.data["text"]) < 1:
             return "You must enter some text"
         pm = PrivateMessage(text=local.data["text"], timestamp=make_datetime())
         pm.sender_id = local.user.id
         pm.talk = talk
         if talk.sender_id != pm.sender_id:
             talk.sender, talk.receiver = talk.receiver, talk.sender
         talk.timestamp = pm.timestamp
         talk.read = False
         local.session.add(pm)
         local.session.commit()
     else:
         return "Bad request"
 def forum_handler(self):
     if local.data['action'] == 'list':
         forums = local.session.query(Forum)\
             .filter(Forum.access_level >= local.access_level)\
             .order_by(Forum.id).all()
         local.resp['forums'] = []
         for f in forums:
             forum = dict()
             forum['id'] = f.id
             forum['description'] = f.description
             forum['title'] = f.title
             forum['topics'] = f.ntopic
             forum['posts'] = f.npost
             if len(f.topics) > 0:
                 forum['lastpost'] = {
                     'username':     f.topics[0].last_writer.username,
                     'timestamp':    make_timestamp(f.topics[0].timestamp),
                     'topic_title':  f.topics[0].title,
                     'topic_id':     f.topics[0].id,
                     'num':          f.topics[0].npost
                 }
             local.resp['forums'].append(forum)
     elif local.data['action'] == 'new':
         if local.access_level > 1:
             return 'Unauthorized'
         if local.data['title'] is None or \
            len(local.data['title']) < 4:
             return "forum.title_short"
         if local.data['description'] is None or \
            len(local.data['description']) < 4:
             return "forum.description_short"
         forum = Forum(title=local.data['title'],
                       description=local.data['description'],
                       access_level=7,
                       ntopic=0,
                       npost=0)
         local.session.add(forum)
         local.session.commit()
     else:
         return 'Bad request'
Beispiel #40
0
    def get_status(self):
        """Returns a dict with info about the current status of all
        workers.

        return (dict): dict of info: current operation, starting time,
            number of errors, and additional data specified in the
            operation.

        """
        result = dict()
        for shard in iterkeys(self._worker):
            s_time = self._start_time[shard]
            s_time = make_timestamp(s_time) if s_time is not None else None

            result["%d" % shard] = {
                'connected': self._worker[shard].connected,
                'operations': [operation.to_dict()
                               for operation in self._operations[shard]]
                if isinstance(self._operations[shard], list)
                else self._operations[shard],
                'start_time': s_time}
        return result
Beispiel #41
0
    def get(self):
        participation = self.current_user

        last_notification = self.get_argument("last_notification", None)
        if last_notification is not None:
            last_notification = make_datetime(float(last_notification))

        res = get_communications(self.sql_session, participation,
                                 self.timestamp, after=last_notification)

        # Simple notifications
        notifications = self.service.notifications
        username = participation.user.username
        if username in notifications:
            for notification in notifications[username]:
                res.append({"type": "notification",
                            "timestamp": make_timestamp(notification[0]),
                            "subject": notification[1],
                            "text": notification[2],
                            "level": notification[3]})
            del notifications[username]

        self.write(json.dumps(res))
Beispiel #42
0
    def _authenticate(self, request):
        """Check if the cookie exists and is valid

        request (werkzeug.wrappers.Request): werkzeug request object.

        return (int|None): admin id in the cookie, if it is valid.

        """
        cookie = JSONSecureCookie.load_cookie(request, AWSAuthMiddleware.COOKIE, config.secret_key)

        admin_id = cookie.get("id", None)
        remote_addr = cookie.get("ip", None)
        timestamp = cookie.get("timestamp", None)
        if admin_id is None or remote_addr is None or timestamp is None:
            return None

        if remote_addr != request.remote_addr:
            return None

        if make_timestamp() - timestamp > config.admin_cookie_duration:
            return None

        return int(admin_id)
Beispiel #43
0
 def forum_handler(self):
     if local.data["action"] == "list":
         forums = (
             local.session.query(Forum).filter(Forum.access_level >= local.access_level).order_by(Forum.id).all()
         )
         local.resp["forums"] = []
         for f in forums:
             forum = dict()
             forum["id"] = f.id
             forum["description"] = f.description
             forum["title"] = f.title
             forum["topics"] = f.ntopic
             forum["posts"] = f.npost
             if len(f.topics) > 0:
                 forum["lastpost"] = {
                     "username": f.topics[0].last_writer.username,
                     "timestamp": make_timestamp(f.topics[0].timestamp),
                     "topic_title": f.topics[0].title,
                     "topic_id": f.topics[0].id,
                     "num": f.topics[0].npost,
                 }
             local.resp["forums"].append(forum)
     elif local.data["action"] == "new":
         return "Not anymore"
         if local.access_level > 1:
             return "Unauthorized"
         if local.data["title"] is None or len(local.data["title"]) < 4:
             return "Title is too short"
         if local.data["description"] is None or len(local.data["description"]) < 4:
             return "Description is too short"
         forum = Forum(
             title=local.data["title"], description=local.data["description"], access_level=7, ntopic=0, npost=0
         )
         local.session.add(forum)
         local.session.commit()
     else:
         return "Bad request"
Beispiel #44
0
def get_communications(sql_session, participation, timestamp, after=None):
    """Retrieve some contestant's communications at some given time.

    Return the list of admin-to-contestant communications (that is,
    announcements, messages and answers to questions) for the given
    contestant that occurred up to and including the given time.
    Optionally, ignore the communications that occurred before another
    given time. The result will be returned in a JSON-compatible format
    (that is, a tree of numbers, strings, lists and dicts).

    sql_session (Session): the SQLAlchemy database session to use.
    participation (Participation): the participation of the user whose
        communications are to be returned.
    timestamp (datetime): the moment in time at which the "snapshot" of
        the communications is to be taken (i.e., communications that
        will occur after this moment, but are already in the database,
        are to be ignored).
    after (datetime|None): if not none, ignore also the communications
        that were received at or before this moment in time.

    return ([dict]): for each communication a dictionary with 4 fields:
        type (either "announcement", "message" or "question"), subject,
        text and timestamp (the number of seconds since the UNIX epoch,
        as a float).

    """

    res = list()

    # Announcements
    query = sql_session.query(Announcement) \
        .filter(Announcement.contest == participation.contest) \
        .filter(Announcement.timestamp <= timestamp)
    if after is not None:
        query = query.filter(Announcement.timestamp > after)
    for announcement in query.all():
        res.append({
            "type": "announcement",
            "timestamp": make_timestamp(announcement.timestamp),
            "subject": announcement.subject,
            "text": announcement.text
        })

    # Private messages
    query = sql_session.query(Message) \
        .filter(Message.participation == participation) \
        .filter(Message.timestamp <= timestamp)
    if after is not None:
        query = query.filter(Message.timestamp > after)
    for message in query.all():
        res.append({
            "type": "message",
            "timestamp": make_timestamp(message.timestamp),
            "subject": message.subject,
            "text": message.text
        })

    # Answers to questions
    query = sql_session.query(Question) \
        .filter(Question.participation == participation) \
        .filter(Question.reply_timestamp.isnot(None)) \
        .filter(Question.reply_timestamp <= timestamp)
    if after is not None:
        query = query.filter(Question.reply_timestamp > after)
    for question in query.all():
        subject = question.reply_subject
        text = question.reply_text
        if text is None:
            text = ""
        if subject is None:
            subject, text = text, ""
        res.append({
            "type": "question",
            "timestamp": make_timestamp(question.reply_timestamp),
            "subject": subject,
            "text": text
        })

    return res
Beispiel #45
0
    def export_object(self, obj):
        """Export the given object, returning a JSON-encodable dict.

        The returned dict will contain a "_class" item (the name of the
        class of the given object), an item for each column property
        (with a value properly translated to a JSON-compatible type)
        and an item for each relationship property (which will be an ID
        or a collection of IDs).

        The IDs used in the exported dict aren't related to the ones
        used in the DB: they are newly generated and their scope is
        limited to the exported file only. They are shared among all
        classes (that is, two objects can never share the same ID, even
        if they are of different classes).

        If, when exporting the relationship, we find an object without
        an ID we generate a new ID, assign it to the object and append
        the object to the queue of objects to export.

        The self.skip_submissions flag controls wheter we export
        submissions (and all other objects that can be reached only by
        passing through a submission) or not.

        """

        cls = type(obj)

        data = {"_class": cls.__name__}

        for prp in cls._col_props:
            col, = prp.columns
            col_type = type(col.type)

            val = getattr(obj, prp.key)
            if col_type in \
                    [Boolean, Integer, Float, Unicode, RepeatedUnicode, Enum]:
                data[prp.key] = val
            elif col_type is String:
                data[prp.key] = \
                    val.decode('latin1') if val is not None else None
            elif col_type is DateTime:
                data[prp.key] = \
                    make_timestamp(val) if val is not None else None
            elif col_type is Interval:
                data[prp.key] = \
                    val.total_seconds() if val is not None else None
            else:
                raise RuntimeError("Unknown SQLAlchemy column type: %s" %
                                   col_type)

        for prp in cls._rel_props:
            other_cls = prp.mapper.class_

            # Skip submissions if requested
            if self.skip_submissions and other_cls is Submission:
                continue

            # Skip user_tests if requested
            if self.skip_user_tests and other_cls is UserTest:
                continue

            # Skip generated data if requested
            if self.skip_generated and other_cls in (SubmissionResult,
                                                     UserTestResult):
                continue

            val = getattr(obj, prp.key)
            if val is None:
                data[prp.key] = None
            elif isinstance(val, other_cls):
                data[prp.key] = self.get_id(val)
            elif isinstance(val, list):
                data[prp.key] = list(self.get_id(i) for i in val)
            elif isinstance(val, dict):
                data[prp.key] = \
                    dict((k, self.get_id(v)) for k, v in val.iteritems())
            else:
                raise RuntimeError("Unknown SQLAlchemy relationship type: %s" %
                                   type(val))

        return data
Beispiel #46
0
def validate_login(sql_session, contest, timestamp, username, password,
                   ip_address):
    """Authenticate a user logging in, with username and password.

    Given the information the user provided (the username and the
    password) and some context information (contest, to determine which
    users are allowed to log in, how and with which restrictions;
    timestamp for cookie creation; IP address to check against) try to
    authenticate the user and return its participation and the cookie
    to set to help authenticate future visits.

    After finding the participation, IP login and hidden users
    restrictions are checked.

    sql_session (Session): the SQLAlchemy database session used to
        execute queries.
    contest (Contest): the contest the user is trying to access.
    timestamp (datetime): the date and the time of the request.
    username (str): the username the user provided.
    password (str): the password the user provided.
    ip_address (IPv4Address|IPv6Address): the IP address the request
        came from.

    return ((Participation, bytes)|(None, None)): if the user couldn't
        be authenticated then return None, otherwise return the
        participation that they wanted to authenticate as; if a cookie
        has to be set return it as well, otherwise return None.

    """
    def log_failed_attempt(msg, *args):
        logger.info(
            "Unsuccessful login attempt from IP address %s, as user "
            "%r, on contest %s, at %s: " + msg, ip_address, username,
            contest.name, timestamp, *args)

    if not contest.allow_password_authentication:
        log_failed_attempt("password authentication not allowed")
        return None, None

    participation = sql_session.query(Participation) \
        .join(Participation.user) \
        .options(contains_eager(Participation.user)) \
        .filter(Participation.contest == contest)\
        .filter(User.username == username)\
        .first()

    user = sql_session.query(User).filter(User.username == username).first()
    if participation is None:
        if user is None:
            log_failed_attemp("user not found")
            return None, None
        else:
            participation = Participation(user=user,
                                          contest=contest,
                                          team=None)
            sql_session.add(participation)
            sql_session.commit()

    if not safe_validate_password(participation, password):
        log_failed_attempt("wrong password")
        return None, None

    if contest.ip_restriction and participation.ip is not None \
            and not any(ip_address in network for network in participation.ip):
        log_failed_attempt("unauthorized IP address")
        return None, None

    if contest.block_hidden_participations and participation.hidden:
        log_failed_attempt("participation is hidden and unauthorized")
        return None, None

    logger.info(
        "Successful login attempt from IP address %s, as user %r, on "
        "contest %s, at %s", ip_address, username, contest.name, timestamp)

    return (participation,
            json.dumps([username, password,
                        make_timestamp(timestamp)]).encode("utf-8"))
Beispiel #47
0
def accept_user_test(sql_session, file_cacher, participation, task, timestamp,
                     tornado_files, language_name):
    """Process a contestant's request to submit a user test.

    sql_session (Session): the DB session to use to fetch and add data.
    file_cacher (FileCacher): the file cacher to use to store the files.
    participation (Participation): the contestant who is submitting.
    task (Task): the task on which they are submitting.
    timestamp (datetime): the moment in time they submitted at.
    tornado_files ({str: [tornado.httputil.HTTPFile]}): the files they
        sent in.
    language_name (str|None): the language they declared their files are
        in (None means unknown and thus auto-detect).

    return (UserTest): the resulting user test, if all went well.

    raise (TestingNotAllowed): if the task doesn't allow for any tests.
    raise (UnacceptableUserTest): if the contestant wasn't allowed to
        hand in a user test, if the provided data was invalid, if there
        were critical failures in the process.

    """
    contest = participation.contest
    assert task.contest is contest

    # Check whether the task is testable.

    task_type = task.active_dataset.task_type_object
    if not task_type.testable:
        raise TestingNotAllowed()

    # Check whether the contestant is allowed to send a test.

    if not check_max_number(sql_session,
                            contest.max_user_test_number,
                            participation,
                            contest=contest,
                            cls=UserTest):
        raise UnacceptableUserTest(
            N_("Too many tests!"),
            N_("You have reached the maximum limit of "
               "at most %d tests among all tasks.") %
            contest.max_user_test_number)

    if not check_max_number(sql_session,
                            task.max_user_test_number,
                            participation,
                            task=task,
                            cls=UserTest):
        raise UnacceptableUserTest(
            N_("Too many tests!"),
            N_("You have reached the maximum limit of "
               "at most %d tests on this task.") % task.max_user_test_number)

    if not check_min_interval(sql_session,
                              contest.min_user_test_interval,
                              timestamp,
                              participation,
                              contest=contest,
                              cls=UserTest):
        raise UnacceptableUserTest(
            N_("Tests too frequent!"),
            N_("Among all tasks, you can test again "
               "after %d seconds from last test.") %
            contest.min_user_test_interval.total_seconds())

    if not check_min_interval(sql_session,
                              task.min_user_test_interval,
                              timestamp,
                              participation,
                              task=task,
                              cls=UserTest):
        raise UnacceptableUserTest(
            N_("Tests too frequent!"),
            N_("For this task, you can test again "
               "after %d seconds from last test.") %
            task.min_user_test_interval.total_seconds())

    # Process the data we received and ensure it's valid.

    required_codenames = set(task.submission_format)
    required_codenames.update(task_type.get_user_managers())
    required_codenames.add("input")

    try:
        received_files = extract_files_from_tornado(tornado_files)
    except InvalidArchive:
        raise UnacceptableUserTest(
            N_("Invalid archive format!"),
            N_("The submitted archive could not be opened."))

    try:
        files, language = match_files_and_language(received_files,
                                                   language_name,
                                                   required_codenames,
                                                   contest.languages)
    except InvalidFilesOrLanguage:
        raise UnacceptableUserTest(N_("Invalid test format!"),
                                   N_("Please select the correct files."))

    digests = dict()
    missing_codenames = required_codenames.difference(iterkeys(files))
    if len(missing_codenames) > 0:
        if task.active_dataset.task_type_object.ALLOW_PARTIAL_SUBMISSION:
            digests = fetch_file_digests_from_previous_submission(
                sql_session,
                participation,
                task,
                language,
                missing_codenames,
                cls=UserTest)
        else:
            raise UnacceptableUserTest(N_("Invalid test format!"),
                                       N_("Please select the correct files."))

    if "input" not in files and "input" not in digests:
        raise UnacceptableUserTest(N_("Invalid test format!"),
                                   N_("Please select the correct files."))

    if any(
            len(content) > config.max_submission_length
            for codename, content in iteritems(files) if codename != "input"):
        raise UnacceptableUserTest(
            N_("Test too big!"),
            N_("Each source file must be at most %d bytes long.") %
            config.max_submission_length)
    if "input" in files and len(files["input"]) > config.max_input_length:
        raise UnacceptableUserTest(
            N_("Input too big!"),
            N_("The input file must be at most %d bytes long.") %
            config.max_input_length)

    # All checks done, submission accepted.

    if config.tests_local_copy:
        try:
            store_local_copy(config.tests_local_copy_path, participation, task,
                             timestamp, files)
        except StorageFailed:
            logger.error("Test local copy failed.", exc_info=True)

    # We now have to send all the files to the destination...
    try:
        for codename, content in iteritems(files):
            digest = file_cacher.put_file_content(
                content, "Test file %s sent by %s at %d." %
                (codename, participation.user.username,
                 make_timestamp(timestamp)))
            digests[codename] = digest

    # In case of error, the server aborts the submission
    except Exception as error:
        logger.error("Storage failed! %s", error)
        raise UnacceptableUserTest(N_("Test storage failed!"),
                                   N_("Please try again."))

    # All the files are stored, ready to submit!
    logger.info("All files stored for test sent by %s",
                participation.user.username)

    user_test = UserTest(
        timestamp=timestamp,
        language=language.name if language is not None else None,
        input=digests["input"],
        participation=participation,
        task=task)
    sql_session.add(user_test)

    for codename, digest in iteritems(digests):
        if codename == "input":
            continue
        if codename in task.submission_format:
            sql_session.add(
                UserTestFile(filename=codename,
                             digest=digest,
                             user_test=user_test))
        else:  # codename in task_type.get_user_managers()
            if language is not None:
                extension = language.source_extension
                filename = codename.replace(".%l", extension)
            else:
                filename = codename
            sql_session.add(
                UserTestManager(filename=filename,
                                digest=digest,
                                user_test=user_test))

    return user_test
Beispiel #48
0
def accept_submission(sql_session, file_cacher, participation, task, timestamp,
                      tornado_files, language_name, official):
    """Process a contestant's request to submit a submission.

    Parse and validate the data that a contestant sent for a submission
    and, if all checks and operations succeed, add the result to the
    database and return it.

    sql_session (Session): the DB session to use to fetch and add data.
    file_cacher (FileCacher): the file cacher to use to store the files.
    participation (Participation): the contestant who is submitting.
    task (Task): the task on which they are submitting.
    timestamp (datetime): the moment in time they submitted at.
    tornado_files ({str: [tornado.httputil.HTTPFile]}): the files they
        sent in.
    language_name (str|None): the language they declared their files are
        in (None means unknown and thus auto-detect).
    official (bool): whether the submission was sent in during a regular
        contest phase (and should be counted towards the score/rank) or
        during the analysis mode.

    return (Submission): the resulting submission, if all went well.

    raise (UnacceptableSubmission): if the contestant wasn't allowed to
        hand in a submission, if the provided data was invalid, if there
        were critical failures in the process.

    """
    contest = participation.contest
    assert task.contest is contest

    # Check whether the contestant is allowed to submit.

    if not check_max_number(sql_session,
                            contest.max_submission_number,
                            participation,
                            contest=contest):
        raise UnacceptableSubmission(
            N_("Too many submissions!"),
            N_("You have reached the maximum limit of "
               "at most %d submissions among all tasks.") %
            contest.max_submission_number)

    if not check_max_number(
            sql_session, task.max_submission_number, participation, task=task):
        raise UnacceptableSubmission(
            N_("Too many submissions!"),
            N_("You have reached the maximum limit of "
               "at most %d submissions on this task.") %
            task.max_submission_number)

    if not check_min_interval(sql_session,
                              contest.min_submission_interval,
                              timestamp,
                              participation,
                              contest=contest):
        raise UnacceptableSubmission(
            N_("Submissions too frequent!"),
            N_("Among all tasks, you can submit again "
               "after %d seconds from last submission.") %
            contest.min_submission_interval.total_seconds())

    if not check_min_interval(sql_session,
                              task.min_submission_interval,
                              timestamp,
                              participation,
                              task=task):
        raise UnacceptableSubmission(
            N_("Submissions too frequent!"),
            N_("For this task, you can submit again "
               "after %d seconds from last submission.") %
            task.min_submission_interval.total_seconds())

    # Process the data we received and ensure it's valid.

    required_codenames = set(task.submission_format)

    try:
        received_files = extract_files_from_tornado(tornado_files)
    except InvalidArchive:
        raise UnacceptableSubmission(
            N_("Invalid archive format!"),
            N_("The submitted archive could not be opened."))

    try:
        files, language = match_files_and_language(received_files,
                                                   language_name,
                                                   required_codenames,
                                                   contest.languages)
    except InvalidFilesOrLanguage:
        raise UnacceptableSubmission(N_("Invalid submission format!"),
                                     N_("Please select the correct files."))

    digests = dict()
    missing_codenames = required_codenames.difference(iterkeys(files))
    if len(missing_codenames) > 0:
        if task.active_dataset.task_type_object.ALLOW_PARTIAL_SUBMISSION:
            digests = fetch_file_digests_from_previous_submission(
                sql_session, participation, task, language, missing_codenames)
        else:
            raise UnacceptableSubmission(
                N_("Invalid submission format!"),
                N_("Please select the correct files."))

    if any(
            len(content) > config.max_submission_length
            for content in itervalues(files)):
        raise UnacceptableSubmission(
            N_("Submission too big!"),
            N_("Each source file must be at most %d bytes long.") %
            config.max_submission_length)

    # All checks done, submission accepted.

    if config.submit_local_copy:
        try:
            store_local_copy(config.submit_local_copy_path, participation,
                             task, timestamp, files)
        except StorageFailed:
            logger.error("Submission local copy failed.", exc_info=True)

    # We now have to send all the files to the destination...
    try:
        for codename, content in iteritems(files):
            digest = file_cacher.put_file_content(
                content, "Submission file %s sent by %s at %d." %
                (codename, participation.user.username,
                 make_timestamp(timestamp)))
            digests[codename] = digest

    # In case of error, the server aborts the submission
    except Exception as error:
        logger.error("Storage failed! %s", error)
        raise UnacceptableSubmission(N_("Submission storage failed!"),
                                     N_("Please try again."))

    # All the files are stored, ready to submit!
    logger.info("All files stored for submission sent by %s",
                participation.user.username)

    submission = Submission(
        timestamp=timestamp,
        language=language.name if language is not None else None,
        task=task,
        participation=participation,
        official=official)
    sql_session.add(submission)

    for codename, digest in iteritems(digests):
        sql_session.add(
            File(filename=codename, digest=digest, submission=submission))

    return submission
Beispiel #49
0
def _authenticate_request_from_cookie(sql_session, contest, timestamp, cookie):
    """Return the current participation based on the cookie.

    If a participation can be extracted, the cookie is refreshed.

    sql_session (Session): the SQLAlchemy database session used to
        execute queries.
    contest (Contest): the contest the user is trying to access.
    timestamp (datetime): the date and the time of the request.
    cookie (bytes|None): the cookie the user's browser provided in the
        request (if any).

    return ((Participation, bytes)|(None, None)): the participation
        extracted from the cookie and the cookie to set/refresh, or
        None in case of errors.

    """
    if cookie is None:
        logger.info("Unsuccessful cookie authentication: no cookie provided")
        return None, None

    # Parse cookie.
    try:
        cookie = json.loads(cookie.decode("utf-8"))
        username = cookie[0]
        password = cookie[1]
        last_update = make_datetime(cookie[2])
    except Exception as e:
        # Cookies are stored securely and thus cannot be tampered with:
        # this is either a programming or a configuration error.
        logger.warning("Invalid cookie (%s): %s", e, cookie)
        return None, None

    def log_failed_attempt(msg, *args):
        logger.info(
            "Unsuccessful cookie authentication as %r, returning from "
            "%s, at %s: " + msg, username, last_update, timestamp, *args)

    # Check if the cookie is expired.
    if timestamp - last_update > timedelta(seconds=config.cookie_duration):
        log_failed_attempt("cookie expired (lasts %d seconds)",
                           config.cookie_duration)
        return None, None

    # Load participation from DB and make sure it exists.
    participation = sql_session.query(Participation) \
        .join(Participation.user) \
        .options(contains_eager(Participation.user)) \
        .filter(Participation.contest == contest) \
        .filter(User.username == username) \
        .first()
    if participation is None:
        log_failed_attempt("user not registered to contest")
        return None, None

    correct_password = get_password(participation)

    # We compare hashed password because it would be too expensive to
    # re-hash the user-provided plaintext password at every request.
    if password != correct_password:
        log_failed_attempt("wrong password")
        return None, None

    logger.info(
        "Successful cookie authentication as user %r, on contest %s, "
        "returning from %s, at %s", username, contest.name, last_update,
        timestamp)

    # We store the hashed password (if hashing is used) so that the
    # expensive bcrypt hashing doesn't need to be done at every request.
    return (participation,
            json.dumps([username, correct_password,
                        make_timestamp(timestamp)]).encode("utf-8"))
Beispiel #50
0
    def post(self, task_name):
        participation = self.current_user

        if not self.r_params["testing_enabled"]:
            raise tornado.web.HTTPError(404)

        try:
            task = self.contest.get_task(task_name)
        except KeyError:
            raise tornado.web.HTTPError(404)

        self.fallback_page = ["testing"]
        self.fallback_args = {"task_name": task.name}

        # Check that the task is testable
        task_type = get_task_type(dataset=task.active_dataset)
        if not task_type.testable:
            logger.warning("User %s tried to make test on task %s.",
                           participation.user.username, task_name)
            raise tornado.web.HTTPError(404)

        # Alias for easy access
        contest = self.contest

        # Enforce maximum number of user_tests
        try:
            if contest.max_user_test_number is not None:
                user_test_c = self.sql_session.query(func.count(UserTest.id))\
                    .join(UserTest.task)\
                    .filter(Task.contest == contest)\
                    .filter(UserTest.participation == participation)\
                    .scalar()
                if user_test_c >= contest.max_user_test_number and \
                        not self.current_user.unrestricted:
                    raise ValueError(
                        self._("You have reached the maximum limit of "
                               "at most %d tests among all tasks.") %
                        contest.max_user_test_number)
            if task.max_user_test_number is not None:
                user_test_t = self.sql_session.query(func.count(UserTest.id))\
                    .filter(UserTest.task == task)\
                    .filter(UserTest.participation == participation)\
                    .scalar()
                if user_test_t >= task.max_user_test_number and \
                        not self.current_user.unrestricted:
                    raise ValueError(
                        self._("You have reached the maximum limit of "
                               "at most %d tests on this task.") %
                        task.max_user_test_number)
        except ValueError as error:
            self._send_error(self._("Too many tests!"), str(error))
            return

        # Enforce minimum time between user_tests
        try:
            if contest.min_user_test_interval is not None:
                last_user_test_c = self.sql_session.query(UserTest)\
                    .join(UserTest.task)\
                    .filter(Task.contest == contest)\
                    .filter(UserTest.participation == participation)\
                    .order_by(UserTest.timestamp.desc())\
                    .first()
                if last_user_test_c is not None and \
                        self.timestamp - last_user_test_c.timestamp < \
                        contest.min_user_test_interval and \
                        not self.current_user.unrestricted:
                    raise ValueError(
                        self._("Among all tasks, you can test again "
                               "after %d seconds from last test.") %
                        contest.min_user_test_interval.total_seconds())
            # We get the last user_test even if we may not need it
            # for min_user_test_interval because we may need it later,
            # in case this is a ALLOW_PARTIAL_SUBMISSION task.
            last_user_test_t = self.sql_session.query(UserTest)\
                .filter(UserTest.participation == participation)\
                .filter(UserTest.task == task)\
                .order_by(UserTest.timestamp.desc())\
                .first()
            if task.min_user_test_interval is not None:
                if last_user_test_t is not None and \
                        self.timestamp - last_user_test_t.timestamp < \
                        task.min_user_test_interval and \
                        not self.current_user.unrestricted:
                    raise ValueError(
                        self._("For this task, you can test again "
                               "after %d seconds from last test.") %
                        task.min_user_test_interval.total_seconds())
        except ValueError as error:
            self._send_error(self._("Tests too frequent!"), str(error))
            return

        # Required files from the user.
        required = set([sfe.filename for sfe in task.submission_format] +
                       task_type.get_user_managers(task.submission_format) +
                       ["input"])

        # Ensure that the user did not submit multiple files with the
        # same name.
        if any(
                len(filename) != 1
                for filename in itervalues(self.request.files)):
            self._send_error(self._("Invalid test format!"),
                             self._("Please select the correct files."))
            return

        # If the user submitted an archive, extract it and use content
        # as request.files. But only valid for "output only" (i.e.,
        # not for submissions requiring a programming language
        # identification).
        if len(self.request.files) == 1 and \
                next(iterkeys(self.request.files)) == "submission":
            if any(filename.endswith(".%l") for filename in required):
                self._send_error(self._("Invalid test format!"),
                                 self._("Please select the correct files."),
                                 task)
                return
            archive_data = self.request.files["submission"][0]
            del self.request.files["submission"]

            # Create the archive.
            archive = Archive.from_raw_data(archive_data["body"])

            if archive is None:
                self._send_error(
                    self._("Invalid archive format!"),
                    self._("The submitted archive could not be opened."))
                return

            # Extract the archive.
            unpacked_dir = archive.unpack()
            for name in archive.namelist():
                filename = os.path.basename(name)
                body = open(os.path.join(unpacked_dir, filename), "r").read()
                self.request.files[filename] = [{
                    'filename': filename,
                    'body': body
                }]

            archive.cleanup()

        # This ensure that the user sent one file for every name in
        # submission format and no more. Less is acceptable if task
        # type says so.
        provided = set(iterkeys(self.request.files))
        if not (required == provided or (task_type.ALLOW_PARTIAL_SUBMISSION
                                         and required.issuperset(provided))):
            self._send_error(self._("Invalid test format!"),
                             self._("Please select the correct files."))
            return

        # Add submitted files. After this, files is a dictionary indexed
        # by *our* filenames (something like "output01.txt" or
        # "taskname.%l", and whose value is a couple
        # (user_assigned_filename, content).
        files = {}
        for uploaded, data in iteritems(self.request.files):
            files[uploaded] = (data[0]["filename"], data[0]["body"])

        # Read the submission language provided in the request; we
        # integrate it with the language fetched from the previous
        # submission (if we use it) and later make sure it is
        # recognized and allowed.
        submission_lang = self.get_argument("language", None)
        need_lang = any(
            our_filename.find(".%l") != -1 for our_filename in files)

        # If we allow partial submissions, implicitly we recover the
        # non-submitted files from the previous user test. And put them
        # in file_digests (i.e. like they have already been sent to FS).
        file_digests = {}
        if task_type.ALLOW_PARTIAL_SUBMISSION and \
                last_user_test_t is not None and \
                (submission_lang is None or
                 submission_lang == last_user_test_t.language):
            submission_lang = last_user_test_t.language
            for filename in required.difference(provided):
                if filename in last_user_test_t.files:
                    file_digests[filename] = \
                        last_user_test_t.files[filename].digest

        # Throw an error if task needs a language, but we don't have
        # it or it is not allowed / recognized.
        if need_lang:
            error = None
            if submission_lang is None:
                error = self._("Cannot recognize the user test language.")
            elif submission_lang not in contest.languages:
                error = self._("Language %s not allowed in this contest.") \
                    % submission_lang
        if error is not None:
            self._send_error(self._("Invalid test!"), error)
            return

        # Check if submitted files are small enough.
        if any([
                len(f[1]) > config.max_submission_length
                for n, f in iteritems(files) if n != "input"
        ]):
            self._send_error(
                self._("Test too big!"),
                self._("Each source file must be at most %d bytes long.") %
                config.max_submission_length)
            return
        if len(files["input"][1]) > config.max_input_length:
            self._send_error(
                self._("Input too big!"),
                self._("The input file must be at most %d bytes long.") %
                config.max_input_length)
            return

        # All checks done, submission accepted.

        # Attempt to store the submission locally to be able to
        # recover a failure.
        if config.tests_local_copy:
            try:
                path = os.path.join(
                    config.tests_local_copy_path.replace(
                        "%s", config.data_dir), participation.user.username)
                if not os.path.exists(path):
                    os.makedirs(path)
                # Pickle in ASCII format produces str, not unicode,
                # therefore we open the file in binary mode.
                with io.open(
                        os.path.join(path,
                                     "%d" % make_timestamp(self.timestamp)),
                        "wb") as file_:
                    pickle.dump((self.contest.id, participation.user.id,
                                 task.id, files), file_)
            except Exception as error:
                logger.error("Test local copy failed.", exc_info=True)

        # We now have to send all the files to the destination...
        try:
            for filename in files:
                digest = self.service.file_cacher.put_file_content(
                    files[filename][1], "Test file %s sent by %s at %d." %
                    (filename, participation.user.username,
                     make_timestamp(self.timestamp)))
                file_digests[filename] = digest

        # In case of error, the server aborts the submission
        except Exception as error:
            logger.error("Storage failed! %s", error)
            self._send_error(self._("Test storage failed!"),
                             self._("Please try again."))
            return

        # All the files are stored, ready to submit!
        logger.info("All files stored for test sent by %s",
                    participation.user.username)
        user_test = UserTest(self.timestamp,
                             submission_lang,
                             file_digests["input"],
                             participation=participation,
                             task=task)

        for filename in [sfe.filename for sfe in task.submission_format]:
            digest = file_digests[filename]
            self.sql_session.add(
                UserTestFile(filename, digest, user_test=user_test))
        for filename in task_type.get_user_managers(task.submission_format):
            digest = file_digests[filename]
            if submission_lang is not None:
                extension = get_language(submission_lang).source_extension
                filename = filename.replace(".%l", extension)
            self.sql_session.add(
                UserTestManager(filename, digest, user_test=user_test))

        self.sql_session.add(user_test)
        self.sql_session.commit()
        self.service.evaluation_service.new_user_test(
            user_test_id=user_test.id)
        self.service.add_notification(
            participation.user.username, self.timestamp,
            self._("Test received"),
            self._("Your test has been received "
                   "and is currently being executed."), NOTIFICATION_SUCCESS)

        # The argument (encripted user test id) is not used by CWS
        # (nor it discloses information to the user), but it is useful
        # for automatic testing to obtain the user test id).
        self.redirect(
            self.contest_url(*self.fallback_page,
                             user_test_id=encrypt_number(user_test.id),
                             **self.fallback_args))
Beispiel #51
0
def validate_login(sql_session, contest, timestamp, username, password,
                   ip_address):
    """Authenticate a user logging in, with username and password.

    Given the information the user provided (the username and the
    password) and some context information (contest, to determine which
    users are allowed to log in, how and with which restrictions;
    timestamp for cookie creation; IP address to check against) try to
    authenticate the user and return its participation and the cookie
    to set to help authenticate future visits.

    After finding the participation, IP login and hidden users
    restrictions are checked.

    sql_session (Session): the SQLAlchemy database session used to
        execute queries.
    contest (Contest): the contest the user is trying to access.
    timestamp (datetime): the date and the time of the request.
    username (str): the username the user provided.
    password (str): the password the user provided.
    ip_address (IPv4Address|IPv6Address): the IP address the request
        came from.

    return ((Participation, bytes)|(None, None)): if the user couldn't
        be authenticated then return None, otherwise return the
        participation that they wanted to authenticate as; if a cookie
        has to be set return it as well, otherwise return None.

    """
    def log_failed_attempt(msg, *args):
        logger.info(
            "Unsuccessful login attempt from IP address %s, as user "
            "%r, on contest %s, at %s: " + msg, ip_address, username,
            contest.name, timestamp, *args)

    if not contest.allow_password_authentication:
        log_failed_attempt("password authentication not allowed")
        return None, None

    participation = sql_session.query(Participation) \
        .join(Participation.user) \
        .options(contains_eager(Participation.user)) \
        .filter(Participation.contest == contest)\
        .filter(User.username == username)\
        .first()

    if participation is None:
        log_failed_attempt("user not registered to contest")
        return None, None

    correct_password = get_password(participation)

    try:
        password_valid = validate_password(correct_password, password)
    except ValueError as e:
        # This is either a programming or a configuration error.
        logger.warning(
            "Invalid password stored in database for user %s in contest %s: "
            "%s", participation.user.username, participation.contest.name, e)
        return None, None

    if not password_valid:
        log_failed_attempt("wrong password")
        return None, None

    if contest.ip_restriction and participation.ip is not None \
            and not any(ip_address in network for network in participation.ip):
        log_failed_attempt("unauthorized IP address")
        return None, None

    if contest.block_hidden_participations and participation.hidden:
        log_failed_attempt("participation is hidden and unauthorized")
        return None, None

    logger.info(
        "Successful login attempt from IP address %s, as user %r, on "
        "contest %s, at %s", ip_address, username, contest.name, timestamp)

    # If hashing is used, the cookie stores the hashed password so that
    # the expensive bcrypt call doesn't need to be done at every request.
    return (participation,
            json.dumps([username, correct_password,
                        make_timestamp(timestamp)]).encode("utf-8"))
Beispiel #52
0
    def submission_handler(self):
        if local.data['action'] == 'list':
            task = local.session.query(Task)\
                .filter(Task.name == local.data['task_name']).first()
            if task is None:
                return 'Not found'
            if local.user is None:
                return 'Unauthorized'
            subs = local.session.query(Submission)\
                .filter(Submission.participation_id == local.participation.id)\
                .filter(Submission.task_id == task.id)\
                .order_by(desc(Submission.timestamp)).all()
            submissions = []
            for s in subs:
                submission = dict()
                submission['id'] = s.id
                submission['task_id'] = s.task_id
                submission['timestamp'] = make_timestamp(s.timestamp)
                submission['files'] = []
                for name, f in s.files.iteritems():
                    fi = dict()
                    if s.language is None:
                        fi['name'] = name
                    else:
                        fi['name'] = name.replace('%l', s.language)
                    fi['digest'] = f.digest
                    submission['files'].append(fi)
                result = s.get_result()
                for i in ['compilation_outcome', 'evaluation_outcome']:
                    submission[i] = getattr(result, i, None)
                if result is not None and result.score is not None:
                    submission['score'] = round(result.score, 2)
                submissions.append(submission)
            local.resp['submissions'] = submissions
        elif local.data['action'] == 'details':
            s = local.session.query(Submission)\
                .filter(Submission.id == local.data['id']).first()
            if s is None:
                return 'Not found'
            if local.user is None or s.participation_id != local.participation.id:
                return 'Unauthorized'
            submission = dict()
            submission['id'] = s.id
            submission['task_id'] = s.task_id
            submission['timestamp'] = make_timestamp(s.timestamp)
            submission['language'] = s.language
            submission['files'] = []
            for name, f in s.files.iteritems():
                fi = dict()
                if s.language is None:
                    fi['name'] = name
                else:
                    fi['name'] = name.replace('%l', s.language)
                fi['digest'] = f.digest
                submission['files'].append(fi)
            result = s.get_result()
            for i in ['compilation_outcome', 'evaluation_outcome',
                      'compilation_stdout', 'compilation_stderr',
                      'compilation_time', 'compilation_memory']:
                submission[i] = getattr(result, i, None)
            if result is not None and result.score is not None:
                submission['score'] = round(result.score, 2)
            if result is not None and result.score_details is not None:
                tmp = json.loads(result.score_details)
                if len(tmp) > 0 and 'text' in tmp[0]:
                    subt = dict()
                    subt['testcases'] = tmp
                    subt['score'] = submission['score']
                    subt['max_score'] = 100
                    submission['score_details'] = [subt]
                else:
                    submission['score_details'] = tmp
                for subtask in submission['score_details']:
                    for testcase in subtask['testcases']:
                        data = json.loads(testcase['text'])
                        testcase['text'] = data[0] % tuple(data[1:])
            else:
                submission['score_details'] = None
            local.resp = submission
        elif local.data['action'] == 'new':
            if local.user is None:
                return 'Unauthorized'
            lastsub = local.session.query(Submission)\
                .filter(Submission.participation_id == local.participation.id)\
                .order_by(desc(Submission.timestamp)).first()
            if lastsub is not None and \
               make_datetime() - lastsub.timestamp < timedelta(seconds=20):
                return 'Too frequent submissions!'

            try:
                task = local.session.query(Task)\
                    .join(SocialTask)\
                    .filter(Task.name == local.data['task_name'])\
                    .filter(SocialTask.access_level >= local.access_level).first()
            except KeyError:
                return 'Not found'

            def decode_file(f):
                f['data'] = f['data'].split(',')[-1]
                f['body'] = b64decode(f['data'])
                del f['data']
                return f

            if len(local.data['files']) == 1 and \
               'submission' in local.data['files']:
                archive_data = decode_file(local.data['files']['submission'])
                del local.data['files']['submission']

                # Create the archive.
                archive = Archive.from_raw_data(archive_data["body"])

                if archive is None:
                    return 'Invalid archive!'

                # Extract the archive.
                unpacked_dir = archive.unpack()
                for name in archive.namelist():
                    filename = os.path.basename(name)
                    body = open(os.path.join(unpacked_dir, filename), "r").read()
                    local.data['files'][filename] = {
                        'filename': filename,
                        'body': body
                    }

                files_sent = local.data['files']

                archive.cleanup()
            else:
                files_sent = \
                    dict([(k, decode_file(v))
                          for k, v in local.data['files'].iteritems()])

            # TODO: implement partial submissions (?)

            # Detect language
            files = []
            sub_lang = None
            for sfe in task.submission_format:
                f = files_sent.get(sfe.filename)
                if f is None:
                    return 'Some files are missing!'
                if len(f['body']) > config.get("core", "max_submission_length"):
                    return 'The files you sent are too big!'
                f['name'] = sfe.filename
                files.append(f)
                if sfe.filename.endswith('.%l'):
                    language = None
                    for ext, l in SOURCE_EXT_TO_LANGUAGE_MAP.iteritems():
                        if f['filename'].endswith(ext):
                            language = l
                    if language is None:
                        return 'The language of the files you sent is not ' + \
                               'recognized!'
                    elif sub_lang is not None and sub_lang != language:
                        return 'The files you sent are in different languages!'
                    else:
                        sub_lang = language

            # Add the submission
            timestamp = make_datetime()
            submission = Submission(timestamp,
                                    sub_lang,
                                    participation=local.participation,
                                    task=task)
            for f in files:
                digest = self.file_cacher.put_file_content(
                    f['body'],
                    'Submission file %s sent by %s at %d.' % (
                        f['name'], local.user.username,
                        make_timestamp(timestamp)))
                local.session.add(File(f['name'],
                                       digest,
                                       submission=submission))
            local.session.add(submission)
            local.session.commit()

            # Notify ES
            self.evaluation_service.new_submission(
                submission_id=submission.id
            )

            # Answer with submission data
            local.resp['id'] = submission.id
            local.resp['task_id'] = submission.task_id
            local.resp['timestamp'] = make_timestamp(submission.timestamp)
            local.resp['compilation_outcome'] = None
            local.resp['evaluation_outcome'] = None
            local.resp['score'] = None
            local.resp['files'] = []
            for name, f in submission.files.iteritems():
                fi = dict()
                if submission.language is None:
                    fi['name'] = name
                else:
                    fi['name'] = name.replace('%l', submission.language)
                fi['digest'] = f.digest
                local.resp['files'].append(fi)
        else:
            return 'Bad request'
 def post_handler(self):
     if local.data['action'] == 'list':
         topic = local.session.query(Topic)\
             .filter(Topic.id == local.data['topic']).first()
         if topic is None or topic.forum.access_level < local.access_level:
             return 'Not found'
         topic.nview += 1
         local.session.commit()
         query = local.session.query(Post)\
             .filter(Post.topic_id == topic.id)\
             .order_by(Post.timestamp)
         posts, local.resp['num'] = self.sliced_query(query)
         local.resp['title'] = topic.title
         local.resp['forumId'] = topic.forum.id
         local.resp['forumTitle'] = topic.forum.title
         local.resp['posts'] = []
         for p in posts:
             post = dict()
             post['id'] = p.id
             post['text'] = p.text
             post['timestamp'] = make_timestamp(p.timestamp)
             post['author'] = self.get_user_info(p.author)
             local.resp['posts'].append(post)
     elif local.data['action'] == 'new':
         if local.user is None:
             return 'Unauthorized'
         topic = local.session.query(Topic)\
             .filter(Topic.id == local.data['topic']).first()
         if topic is None or topic.forum.access_level < local.access_level:
             return 'Not found'
         if local.data['text'] is None or len(local.data['text']) < 4:
             return "post.text_short"
         post = Post(text=local.data['text'],
                     timestamp=make_datetime())
         post.author = local.user
         post.topic = topic
         post.forum = topic.forum
         topic.timestamp = post.timestamp
         topic.answered = True
         topic.last_writer = local.user
         local.session.add(post)
         topic.forum.npost = local.session.query(Post)\
             .filter(Post.forum_id == topic.forum.id).count()
         topic.npost = local.session.query(Post)\
             .filter(Post.topic_id == topic.id).count()
         local.session.commit()
     elif local.data['action'] == 'delete':
         if local.user is None:
             return 'Unauthorized'
         post = local.session.query(Post)\
             .filter(Post.id == local.data['id']).first()
         if post is None:
             return 'Not found'
         if post.author != local.user and local.user.access_level > 2:
             return 'Unauthorized'
         forum = post.topic.forum
         if post.topic.posts[0] == post:
             local.session.delete(post.topic)
             local.resp['success'] = 2
         else:
             local.session.delete(post)
             post.topic.npost = local.session.query(Post)\
                 .filter(Post.topic_id == post.topic.id).count()
         forum.npost = local.session.query(Post)\
             .filter(Post.forum_id == forum.id).count()
         forum.ntopic = local.session.query(Topic)\
             .filter(Topic.forum_id == forum.id).count()
         local.session.commit()
     elif local.data['action'] == 'edit':
         if local.user is None:
             return 'Unauthorized'
         post = local.session.query(Post)\
             .filter(Post.id == local.data['id']).first()
         if post is None:
             return 'Not found'
         if post.author != local.user and local.user.access_level > 2:
             return 'Unauthorized'
         if local.data['text'] is None or len(local.data['text']) < 4:
             return 'post.text_short'
         post.text = local.data['text']
         local.session.commit()
     else:
         return 'Bad request'
Beispiel #54
0
    def export_object(self, obj):

        """Export the given object, returning a JSON-encodable dict.

        The returned dict will contain a "_class" item (the name of the
        class of the given object), an item for each column property
        (with a value properly translated to a JSON-compatible type)
        and an item for each relationship property (which will be an ID
        or a collection of IDs).

        The IDs used in the exported dict aren't related to the ones
        used in the DB: they are newly generated and their scope is
        limited to the exported file only. They are shared among all
        classes (that is, two objects can never share the same ID, even
        if they are of different classes).

        If, when exporting the relationship, we find an object without
        an ID we generate a new ID, assign it to the object and append
        the object to the queue of objects to export.

        The self.skip_submissions flag controls wheter we export
        submissions (and all other objects that can be reached only by
        passing through a submission) or not.

        """

        cls = type(obj)

        data = {"_class": cls.__name__}

        for prp in cls._col_props:
            col, = prp.columns
            col_type = type(col.type)

            val = getattr(obj, prp.key)
            if col_type in \
                    [Boolean, Integer, Float, Unicode, RepeatedUnicode, Enum]:
                data[prp.key] = val
            elif col_type is String:
                data[prp.key] = \
                    val.decode('latin1') if val is not None else None
            elif col_type is DateTime:
                data[prp.key] = \
                    make_timestamp(val) if val is not None else None
            elif col_type is Interval:
                data[prp.key] = \
                    val.total_seconds() if val is not None else None
            else:
                raise RuntimeError("Unknown SQLAlchemy column type: %s"
                                   % col_type)

        for prp in cls._rel_props:
            other_cls = prp.mapper.class_

            # Skip submissions if requested
            if self.skip_submissions and other_cls is Submission:
                continue

            # Skip user_tests if requested
            if self.skip_user_tests and other_cls is UserTest:
                continue

            # Skip generated data if requested
            if self.skip_generated and other_cls in (SubmissionResult,
                                                     UserTestResult):
                continue

            val = getattr(obj, prp.key)
            if val is None:
                data[prp.key] = None
            elif isinstance(val, other_cls):
                data[prp.key] = self.get_id(val)
            elif isinstance(val, list):
                data[prp.key] = list(self.get_id(i) for i in val)
            elif isinstance(val, dict):
                data[prp.key] = \
                    dict((k, self.get_id(v)) for k, v in val.iteritems())
            else:
                raise RuntimeError("Unknown SQLAlchemy relationship type: %s"
                                   % type(val))

        return data
Beispiel #55
0
    def post(self, task_name):
        participation = self.current_user
        try:
            task = self.contest.get_task(task_name)
        except KeyError:
            raise tornado.web.HTTPError(404)

        # Alias for easy access
        contest = self.contest

        # Enforce maximum number of submissions
        try:
            if contest.max_submission_number is not None:
                submission_c = self.sql_session\
                    .query(func.count(Submission.id))\
                    .join(Submission.task)\
                    .filter(Task.contest == contest)\
                    .filter(Submission.participation == participation)\
                    .scalar()
                if submission_c >= contest.max_submission_number and \
                        not self.current_user.unrestricted:
                    raise ValueError(
                        self._("You have reached the maximum limit of "
                               "at most %d submissions among all tasks.") %
                        contest.max_submission_number)
            if task.max_submission_number is not None:
                submission_t = self.sql_session\
                    .query(func.count(Submission.id))\
                    .filter(Submission.task == task)\
                    .filter(Submission.participation == participation)\
                    .scalar()
                if submission_t >= task.max_submission_number and \
                        not self.current_user.unrestricted:
                    raise ValueError(
                        self._("You have reached the maximum limit of "
                               "at most %d submissions on this task.") %
                        task.max_submission_number)
        except ValueError as error:
            self.application.service.add_notification(
                participation.user.username, self.timestamp,
                self._("Too many submissions!"), error.message,
                NOTIFICATION_ERROR)
            self.redirect("/tasks/%s/submissions" % quote(task.name, safe=''))
            return

        # Enforce minimum time between submissions
        try:
            if contest.min_submission_interval is not None:
                last_submission_c = self.sql_session.query(Submission)\
                    .join(Submission.task)\
                    .filter(Task.contest == contest)\
                    .filter(Submission.participation == participation)\
                    .order_by(Submission.timestamp.desc())\
                    .first()
                if last_submission_c is not None and \
                        self.timestamp - last_submission_c.timestamp < \
                        contest.min_submission_interval and \
                        not self.current_user.unrestricted:
                    raise ValueError(
                        self._("Among all tasks, you can submit again "
                               "after %d seconds from last submission.") %
                        contest.min_submission_interval.total_seconds())
            # We get the last submission even if we may not need it
            # for min_submission_interval because we may need it later,
            # in case this is a ALLOW_PARTIAL_SUBMISSION task.
            last_submission_t = self.sql_session.query(Submission)\
                .filter(Submission.task == task)\
                .filter(Submission.participation == participation)\
                .order_by(Submission.timestamp.desc())\
                .first()
            if task.min_submission_interval is not None:
                if last_submission_t is not None and \
                        self.timestamp - last_submission_t.timestamp < \
                        task.min_submission_interval and \
                        not self.current_user.unrestricted:
                    raise ValueError(
                        self._("For this task, you can submit again "
                               "after %d seconds from last submission.") %
                        task.min_submission_interval.total_seconds())
        except ValueError as error:
            self.application.service.add_notification(
                participation.user.username, self.timestamp,
                self._("Submissions too frequent!"), error.message,
                NOTIFICATION_ERROR)
            self.redirect("/tasks/%s/submissions" % quote(task.name, safe=''))
            return

        # Ensure that the user did not submit multiple files with the
        # same name.
        if any(len(filename) != 1 for filename in self.request.files.values()):
            self.application.service.add_notification(
                participation.user.username, self.timestamp,
                self._("Invalid submission format!"),
                self._("Please select the correct files."), NOTIFICATION_ERROR)
            self.redirect("/tasks/%s/submissions" % quote(task.name, safe=''))
            return

        # If the user submitted an archive, extract it and use content
        # as request.files.
        if len(self.request.files) == 1 and \
                self.request.files.keys()[0] == "submission":
            archive_data = self.request.files["submission"][0]
            del self.request.files["submission"]

            # Create the archive.
            archive = Archive.from_raw_data(archive_data["body"])

            if archive is None:
                self.application.service.add_notification(
                    participation.user.username, self.timestamp,
                    self._("Invalid archive format!"),
                    self._("The submitted archive could not be opened."),
                    NOTIFICATION_ERROR)
                self.redirect("/tasks/%s/submissions" %
                              quote(task.name, safe=''))
                return

            # Extract the archive.
            unpacked_dir = archive.unpack()
            for name in archive.namelist():
                filename = os.path.basename(name)
                body = open(os.path.join(unpacked_dir, filename), "r").read()
                self.request.files[filename] = [{
                    'filename': filename,
                    'body': body
                }]

            archive.cleanup()

        # This ensure that the user sent one file for every name in
        # submission format and no more. Less is acceptable if task
        # type says so.
        task_type = get_task_type(dataset=task.active_dataset)
        required = set([sfe.filename for sfe in task.submission_format])
        provided = set(self.request.files.keys())
        if not (required == provided or (task_type.ALLOW_PARTIAL_SUBMISSION
                                         and required.issuperset(provided))):
            self.application.service.add_notification(
                participation.user.username, self.timestamp,
                self._("Invalid submission format!"),
                self._("Please select the correct files."), NOTIFICATION_ERROR)
            self.redirect("/tasks/%s/submissions" % quote(task.name, safe=''))
            return

        # Add submitted files. After this, files is a dictionary indexed
        # by *our* filenames (something like "output01.txt" or
        # "taskname.%l", and whose value is a couple
        # (user_assigned_filename, content).
        files = {}
        for uploaded, data in self.request.files.iteritems():
            files[uploaded] = (data[0]["filename"], data[0]["body"])

        # If we allow partial submissions, implicitly we recover the
        # non-submitted files from the previous submission. And put them
        # in file_digests (i.e. like they have already been sent to FS).
        submission_lang = None
        file_digests = {}
        if task_type.ALLOW_PARTIAL_SUBMISSION and \
                last_submission_t is not None:
            for filename in required.difference(provided):
                if filename in last_submission_t.files:
                    # If we retrieve a language-dependent file from
                    # last submission, we take not that language must
                    # be the same.
                    if "%l" in filename:
                        submission_lang = last_submission_t.language
                    file_digests[filename] = \
                        last_submission_t.files[filename].digest

        # We need to ensure that everytime we have a .%l in our
        # filenames, the user has the extension of an allowed
        # language, and that all these are the same (i.e., no
        # mixed-language submissions).

        error = None
        for our_filename in files:
            user_filename = files[our_filename][0]
            if our_filename.find(".%l") != -1:
                lang = filename_to_language(user_filename)
                if lang is None:
                    error = self._("Cannot recognize submission's language.")
                    break
                elif submission_lang is not None and \
                        submission_lang != lang:
                    error = self._("All sources must be in the same language.")
                    break
                elif lang not in contest.languages:
                    error = self._("Language %s not allowed in this contest." %
                                   lang)
                    break
                else:
                    submission_lang = lang
        if error is not None:
            self.application.service.add_notification(
                participation.user.username, self.timestamp,
                self._("Invalid submission!"), error, NOTIFICATION_ERROR)
            self.redirect("/tasks/%s/submissions" % quote(task.name, safe=''))
            return

        # Check if submitted files are small enough.
        if any(
            [len(f[1]) > config.max_submission_length
             for f in files.values()]):
            self.application.service.add_notification(
                participation.user.username, self.timestamp,
                self._("Submission too big!"),
                self._("Each source file must be at most %d bytes long.") %
                config.max_submission_length, NOTIFICATION_ERROR)
            self.redirect("/tasks/%s/submissions" % quote(task.name, safe=''))
            return

        # All checks done, submission accepted.

        # Attempt to store the submission locally to be able to
        # recover a failure.
        if config.submit_local_copy:
            try:
                path = os.path.join(
                    config.submit_local_copy_path.replace(
                        "%s", config.data_dir), participation.user.username)
                if not os.path.exists(path):
                    os.makedirs(path)
                # Pickle in ASCII format produces str, not unicode,
                # therefore we open the file in binary mode.
                with io.open(
                        os.path.join(path,
                                     "%d" % make_timestamp(self.timestamp)),
                        "wb") as file_:
                    pickle.dump((self.contest.id, participation.user.id,
                                 task.id, files), file_)
            except Exception as error:
                logger.warning("Submission local copy failed.", exc_info=True)

        # We now have to send all the files to the destination...
        try:
            for filename in files:
                digest = self.application.service.file_cacher.put_file_content(
                    files[filename][1],
                    "Submission file %s sent by %s at %d." %
                    (filename, participation.user.username,
                     make_timestamp(self.timestamp)))
                file_digests[filename] = digest

        # In case of error, the server aborts the submission
        except Exception as error:
            logger.error("Storage failed! %s", error)
            self.application.service.add_notification(
                participation.user.username, self.timestamp,
                self._("Submission storage failed!"),
                self._("Please try again."), NOTIFICATION_ERROR)
            self.redirect("/tasks/%s/submissions" % quote(task.name, safe=''))
            return

        # All the files are stored, ready to submit!
        logger.info("All files stored for submission sent by %s",
                    participation.user.username)
        submission = Submission(self.timestamp,
                                submission_lang,
                                task=task,
                                participation=participation)

        for filename, digest in file_digests.items():
            self.sql_session.add(File(filename, digest, submission=submission))
        self.sql_session.add(submission)
        self.sql_session.commit()
        self.application.service.evaluation_service.new_submission(
            submission_id=submission.id)
        self.application.service.add_notification(
            participation.user.username, self.timestamp,
            self._("Submission received"),
            self._("Your submission has been received "
                   "and is currently being evaluated."), NOTIFICATION_SUCCESS)
        # The argument (encripted submission id) is not used by CWS
        # (nor it discloses information to the user), but it is useful
        # for automatic testing to obtain the submission id).
        # FIXME is it actually used by something?
        self.redirect(
            "/tasks/%s/submissions?%s" %
            (quote(task.name, safe=''), encrypt_number(submission.id)))