Example #1
0
def main():
    parser = ArgumentParser(description="Replayer of CMS contests.")
    parser.add_argument("cws_address", type=str, help="http address of CWS",
                        default="http://127.0.0.1:8888")
    parser.add_argument("import_source",
                        help="source directory or compressed file")
    parser.add_argument("-i", "--no-import", action="store_true",
                        help="assume the contest is already in the database")
    parser.add_argument("-r", "--resume", type=str,
                        help="start from (%%H:%%M:%%S)")
    args = parser.parse_args()
    start_from = None
    if args.resume is not None:
        try:
            start_from = int(args.resume[6:8]) + \
                         int(args.resume[3:5]) * 60 + \
                         int(args.resume[0:2]) * 3600
        except:
            logger.critical("Invalid resume time %s, format is %%H:%%M:%%S"
                            % args.resume)
            return 1

    if not os.path.isdir(args.import_source):
        logger.critical("Please extract the contest "
                        "before using ReplayContest.")
        return 1

    ContestReplayer(
        import_source=args.import_source,
        no_import=args.no_import,
        start_from=start_from,
        cws_address=args.cws_address
        ).run()

    return 0
Example #2
0
    def popen(self, command,
              stdin=None, stdout=None, stderr=None,
              close_fds=True):
        """Execute the given command in the sandbox using
        subprocess.Popen, assigning the corresponding standard file
        descriptors.

        command (list): executable filename and arguments of the
                        command.
        stdin (file): a file descriptor/object or None.
        stdout (file): a file descriptor/object or None.
        stderr (file): a file descriptor/object or None.
        close_fds (bool): close all file descriptor before executing.
        return (object): popen object.

        """
        self.exec_num += 1
        self.log = None
        args = [self.box_exec] + self.build_box_options() + ["--"] + command
        logger.debug("Executing program in sandbox with command: %s" %
                     " ".join(args))
        with open(self.relative_path(self.cmd_file), 'a') as commands:
            commands.write("%s\n" % (" ".join(args)))
        try:
            p = subprocess.Popen(args,
                                    stdin=stdin, stdout=stdout, stderr=stderr,
                                    close_fds=close_fds)
        except OSError, e:
            logger.critical("Failed to execute program in sandbox with command: %s" %
                        " ".join(args))
            logger.critical("Exception: %r" % e)
            raise
Example #3
0
    def safe_put_file(self, path, descr_path):
        """Put a file to FileCacher signaling every error (including
        digest mismatch).

        path (string): the path from which to load the file.
        descr_path (string): same for description.

        return (bool): True if all ok, False if something wrong.

        """
        # First read the description.
        try:
            with open(descr_path) as fin:
                description = fin.read()
        except IOError:
            description = ''

        # Put the file.
        try:
            digest = self.file_cacher.put_file(path=path,
                                               description=description)
        except Exception as error:
            logger.critical("File %s could not be put to file server (%r), "
                            "aborting." % (path, error))
            return False

        # Then check the digest.
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.critical("File %s has hash %s, but the server returned %s, "
                            "aborting." % (path, calc_digest, digest))
            return False

        return True
Example #4
0
def main():
    parser = ArgumentParser(description="Replayer of CMS contests.")
    parser.add_argument("cws_address", type=str, help="http address of CWS",
                        default="http://127.0.0.1:8888")
    parser.add_argument("import_source",
                        help="source directory or compressed file")
    parser.add_argument("-i", "--no-import", action="store_true",
                        help="assume the contest is already in the database")
    parser.add_argument("-r", "--resume", type=str,
                        help="start from (%%H:%%M:%%S)")
    args = parser.parse_args()
    start_from = None
    if args.resume is not None:
        try:
            start_from = int(args.resume[6:8]) + \
                         int(args.resume[3:5]) * 60 + \
                         int(args.resume[0:2]) * 3600
        except:
            logger.critical("Invalid resume time %s, format is %%H:%%M:%%S"
                            % args.resume)
            return 1

    if not os.path.isdir(args.import_source):
        logger.critical("Please extract the contest "
                        "before using ReplayContest.")
        return 1

    ContestReplayer(
        import_source=args.import_source,
        no_import=args.no_import,
        start_from=start_from,
        cws_address=args.cws_address
        ).run()

    return 0
Example #5
0
    def _popen(self, command,
               stdin=None, stdout=None, stderr=None,
               close_fds=True):
        """Execute the given command in the sandbox using
        subprocess.Popen, assigning the corresponding standard file
        descriptors.

        command (list): executable filename and arguments of the
                        command.
        stdin (file): a file descriptor/object or None.
        stdout (file): a file descriptor/object or None.
        stderr (file): a file descriptor/object or None.
        close_fds (bool): close all file descriptor before executing.
        return (object): popen object.

        """
        self.exec_num += 1
        self.log = None
        args = [self.box_exec] + self.build_box_options() + ["--"] + command
        logger.debug("Executing program in sandbox with command: %s" %
                     " ".join(args))
        with open(self.relative_path(self.cmd_file), 'a') as commands:
            commands.write("%s\n" % (" ".join(args)))
        try:
            p = subprocess.Popen(args,
                                 stdin=stdin, stdout=stdout, stderr=stderr,
                                 close_fds=close_fds)
        except OSError:
            logger.critical("Failed to execute program in sandbox "
                            "with command: %s" %
                            " ".join(args), exc_info=True)
            raise

        return p
Example #6
0
    def do_import(self):
        """Take care of creating the database structure, delegating
        the loading of the contest data and putting them on the
        database.

        """
        logger.info("Creating database structure.")
        if self.drop:
            try:
                with SessionGen() as session:
                    FSObject.delete_all(session)
                    session.commit()
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        contest = Contest.import_from_dict(self.loader.import_contest(self.path))

        logger.info("Creating contest on the database.")
        with SessionGen() as session:
            session.add(contest)
            logger.info("Analyzing database.")
            session.commit()
            contest_id = contest.id
            analyze_all_tables(session)

        logger.info("Import finished (new contest id: %s)." % contest_id)

        return True
Example #7
0
    def safe_get_file(self, digest, path, descr_path=None):
        """Get file from FileCacher ensuring that the digest is
        correct.

        digest (string): the digest of the file to retrieve.
        path (string): the path where to save the file.
        descr_path (string): the path where to save the description.

        return (bool): True if all ok, False if something wrong.

        """
        # First get the file
        try:
            self.file_cacher.get_file(digest, path=path)
        except Exception as error:
            logger.error("File %s could not retrieved from file server (%r)."
                         % (digest, error))
            return False

        # Then check the digest
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.critical("File %s has wrong hash %s."
                            % (digest, calc_digest))
            return False

        # If applicable, retrieve also the description
        if descr_path is not None:
            with codecs.open(descr_path, 'w', encoding='utf-8') as fout:
                fout.write(self.file_cacher.describe(digest))

        return True
Example #8
0
    def safe_get_file(self, digest, path, descr_path=None):
        """Get file from FileCacher ensuring that the digest is
        correct.

        digest (string): the digest of the file to retrieve.
        path (string): the path where to save the file.
        descr_path (string): the path where to save the description.

        return (bool): True if all ok, False if something wrong.

        """
        # First get the file
        try:
            self.file_cacher.get_file(digest, path=path)
        except Exception as error:
            logger.error("File %s could not retrieved from file server (%r)." %
                         (digest, error))
            return False

        # Then check the digest
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.critical("File %s has wrong hash %s." %
                            (digest, calc_digest))
            return False

        # If applicable, retrieve also the description
        if descr_path is not None:
            with codecs.open(descr_path, 'w', encoding='utf-8') as fout:
                fout.write(self.file_cacher.describe(digest))

        return True
Example #9
0
    def get_submission_data(self, submission_id):
        """Given the id, returns the submission object and a new task
        type object of the correct type.

        submission_id (int): id of the submission.

        return (Submission, TaskType): corresponding objects.

        raise: JobException if id or task type not found.

        """
        submission = Submission.get_from_id(submission_id, self.session)
        if submission is None:
            err_msg = "Couldn't find submission %s " \
                      "in the database." % submission_id
            logger.critical(err_msg)
            raise JobException(err_msg)

        try:
            task_type = get_task_type(submission, self.file_cacher)
        except KeyError as error:
            err_msg = "Task type `%s' not known for " \
                "submission %s (error: %s)." % (
                submission.task.task_type, submission_id, error)
            logger.error(err_msg)
            raise JobException(err_msg)

        return (submission, task_type)
Example #10
0
    def safe_put_file(self, path, descr_path):
        """Put a file to FileCacher signaling every error (including
        digest mismatch).

        path (string): the path from which to load the file.
        descr_path (string): same for description.

        return (bool): True if all ok, False if something wrong.

        """
        # First read the description.
        try:
            with open(descr_path) as fin:
                description = fin.read()
        except IOError:
            description = ''

        # Put the file.
        try:
            digest = self.file_cacher.put_file(path=path,
                                               description=description)
        except Exception as error:
            logger.critical("File %s could not be put to file server (%r), "
                            "aborting." % (path, error))
            return False

        # Then check the digest.
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.critical("File %s has hash %s, but the server returned %s, "
                            "aborting." % (path, calc_digest, digest))
            return False

        return True
Example #11
0
def main():
    parser = argparse.ArgumentParser(
        description="Updater of CMS contest dumps.")
    parser.add_argument(
        "-V", "--to-version", action="store", type=int, default=-1,
        help="Update to given version number")
    parser.add_argument(
        "path", help="location of the dump or of the 'contest.json' file")

    args = parser.parse_args()
    path = args.path

    to_version = args.to_version
    if to_version == -1:
        to_version = model_version

    if not path.endswith("contest.json"):
        path = os.path.join(path, "contest.json")

    if not os.path.exists(path):
        logger.critical(
            "The given path doesn't exist or doesn't contain a contest "
            "dump in a format CMS is able to understand.")
        return

    with io.open(path, 'rb') as fin:
        data = json.load(fin, encoding="utf-8")

    # If no "_version" field is found we assume it's a v1.0
    # export (before the new dump format was introduced).
    dump_version = data.get("_version", 0)

    if dump_version == to_version:
        logger.info(
            "The dump you're trying to update is already stored using "
            "the most recent format supported by this version of CMS.")
        return

    if dump_version > to_version:
        logger.critical(
            "The dump you're trying to update is stored using a format "
            "that's more recent than the one supported by this version "
            "of CMS. You probably need to update CMS to handle it.")
        return

    for version in range(dump_version, to_version):
        # Update from version to version+1
        updater = __import__(
            "cmscontrib.updaters.update_%d" % (version + 1),
            globals(), locals(), ["Updater"]).Updater(data)
        data = updater.run()
        data["_version"] = version + 1

    assert data["_version"] == to_version

    with io.open(path, 'wb') as fout:
        json.dump(data, fout, encoding="utf-8", indent=4, sort_keys=True)
Example #12
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, " "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(), archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, " "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions, self.skip_user_tests, light=self.light)
            for _file in files:
                if not self.safe_get_file(_file, os.path.join(files_dir, _file), os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")
            with open(os.path.join(export_dir, "contest.json"), "w") as fout:
                json.dump(contest.export_to_dict(self.skip_submissions, self.skip_user_tests), fout, indent=4)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target, archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Example #13
0
 def _prepare_db(self):
     logger.info("Creating database structure.")
     if self.drop:
         try:
             if not (drop_db() and init_db()):
                 return False
         except Exception as error:
             logger.critical("Unable to access DB.\n%r" % error)
             return False
     return True
Example #14
0
 def newfunc(self, *args, **kwargs):
     try:
         return func(self, *args, **kwargs)
     except HTTPError:
         # These errors are generated by the code (such as 404),
         # they're expected and tornado will take care of them.
         raise
     except Exception as error:
         logger.critical("Uncaught exception (%r) while processing "
                         "a request: %s" % (error, traceback.format_exc()))
         self.write("A critical error has occurred :-(")
         self.finish()
Example #15
0
    def dump_database(self, export_dir):
        """Dump the whole database. This is never used; however, this
        part is retained for historical reasons.

        """
        # Warning: this part depends on the specific database used.
        logger.info("Dumping SQL database.")
        (engine, connection) = config.database.split(':', 1)
        db_exportfile = os.path.join(export_dir, "database_dump.sql")

        # Export procedure for PostgreSQL.
        if engine == 'postgresql':
            db_regex = re.compile('//(\w*):(\w*)@(\w*)/(\w*)')
            db_match = db_regex.match(connection)
            if db_match is not None:
                username, password, host, database = db_match.groups()
                os.environ['PGPASSWORD'] = password
                export_res = os.system('pg_dump -h %s -U %s -w %s -x " \
                    "--attribute-inserts > %s' % (host, username, database,
                                                  db_exportfile))
                del os.environ['PGPASSWORD']
                if export_res != 0:
                    logger.critical("Database export failed.")
                    return False
            else:
                logger.critical("Cannot obtain parameters for "
                                "database connection.")
                return False

        # Export procedure for SQLite.
        elif engine == 'sqlite':
            db_regex = re.compile('///(.*)')
            db_match = db_regex.match(connection)
            if db_match is not None:
                dbfile, = db_match.groups()
                export_res = os.system('sqlite3 %s .dump > %s' %
                                       (dbfile, db_exportfile))
                if export_res != 0:
                    logger.critical("Database export failed.")
                    return False
            else:
                logger.critical("Cannot obtain parameters for "
                                "database connection.")
                return False

        else:
            logger.critical("Database engine not supported. :-(")
            return False

        return True
Example #16
0
 def _prepare_db(self):
     logger.info("Creating database structure.")
     if self.drop:
         try:
             drop_everything()
         except sqlalchemy.exc.OperationalError as error:
             logger.critical("Unable to access DB.\n%r" % error)
             return False
     try:
         metadata.create_all()
     except sqlalchemy.exc.OperationalError as error:
         logger.critical("Unable to access DB.\n%r" % error)
         return False
     return True
Example #17
0
 def _prepare_db(self):
     logger.info("Creating database structure.")
     if self.drop:
         try:
             with SessionGen() as session:
                 FSObject.delete_all(session)
                 session.commit()
             metadata.drop_all()
         except sqlalchemy.exc.OperationalError as error:
             logger.critical("Unable to access DB.\n%r" % error)
             return False
     try:
         metadata.create_all()
     except sqlalchemy.exc.OperationalError as error:
         logger.critical("Unable to access DB.\n%r" % error)
         return False
Example #18
0
    def _initialize_scorers(self):
        """Initialize scorers, the ScoreType objects holding all
        submissions for a given task and deciding scores, and create
        an empty ranking view for the contest.

        """
        with SessionGen(commit=False) as session:
            contest = session.query(Contest).\
                      filter_by(id=self.contest_id).first()
            for task in contest.tasks:
                try:
                    self.scorers[task.id] = get_score_type(task=task)
                except Exception as error:
                    logger.critical("Cannot get score type for task %s: %r" %
                                    (task.name, error))
                    self.exit()
            session.commit()
Example #19
0
    def _initialize_scorers(self):
        """Initialize scorers, the ScoreType objects holding all
        submissions for a given task and deciding scores, and create
        an empty ranking view for the contest.

        """
        with SessionGen(commit=False) as session:
            contest = session.query(Contest).\
                      filter_by(id=self.contest_id).first()
            for task in contest.tasks:
                try:
                    self.scorers[task.id] = get_score_type(task=task)
                except Exception as error:
                    logger.critical("Cannot get score type for task %s.\n%r" %
                                    (task.name, error))
                    self.exit()
            session.commit()
Example #20
0
    def push_logs(self, logger):
        """Push all log lines written since the last call to
        get_logs() to the logger object.

        """
        with self.log_lock:
            tmp = self.logs
            self.logs = []
        for (line, severity) in tmp:
            if severity == 'debug':
                logger.debug(line)
            elif severity == 'info':
                logger.info(line)
            elif severity == 'warning':
                logger.warning(line)
            elif severity == 'error':
                logger.error(line)
            elif severity == 'critical':
                logger.critical(line)
Example #21
0
    def push_logs(self, logger):
        """Push all log lines written since the last call to
        get_logs() to the logger object.

        """
        with self.log_lock:
            tmp = self.logs
            self.logs = []
        for (line, severity) in tmp:
            if severity == 'debug':
                logger.debug(line)
            elif severity == 'info':
                logger.info(line)
            elif severity == 'warning':
                logger.warning(line)
            elif severity == 'error':
                logger.error(line)
            elif severity == 'critical':
                logger.critical(line)
Example #22
0
    def _initialize_scorers(self):
        """Initialize scorers, the ScoreType objects holding all
        submissions for a given task and deciding scores, and create
        an empty ranking view for the contest.

        """
        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(self.contest_id, session)

            for task in contest.tasks:
                for dataset in task.datasets:
                    try:
                        self.scorers[dataset.id] = \
                            get_score_type(dataset=dataset)
                    except Exception as error:
                        logger.critical(
                            "Cannot get score type for task %s(%d): %r" %
                            (task.name, dataset.id, error))
                        self.exit()
            session.commit()
Example #23
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %s" % self.contest_id
        logger.info("Starting export.")

        logger.info("Creating dir structure.")
        try:
            os.mkdir(self.spool_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False
        os.mkdir(self.upload_dir)

        with SessionGen(commit=False) as session:
            self.contest = Contest.get_from_id(self.contest_id, session)
            self.submissions = sorted(
                (submission
                 for submission in self.contest.get_submissions()
                 if not submission.user.hidden),
                key=lambda submission: submission.timestamp)

            # Creating users' directory.
            for user in self.contest.users:
                if not user.hidden:
                    os.mkdir(os.path.join(self.upload_dir, user.username))

            try:
                self.export_submissions()
                self.export_ranking()
            except Exception as error:
                logger.critical("Generic error. %r" % error)
                return False

        logger.info("Export finished.")
        logger.operation = ""

        return True
Example #24
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %s" % self.contest_id
        logger.info("Starting export.")

        logger.info("Creating dir structure.")
        try:
            os.mkdir(self.spool_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False
        os.mkdir(self.upload_dir)

        with SessionGen(commit=False) as session:
            self.contest = Contest.get_from_id(self.contest_id, session)
            self.submissions = sorted(
                (submission for submission in self.contest.get_submissions()
                 if not submission.user.hidden),
                key=lambda submission: submission.timestamp)

            # Creating users' directory.
            for user in self.contest.users:
                if not user.hidden:
                    os.mkdir(os.path.join(self.upload_dir, user.username))

            try:
                self.export_submissions()
                self.export_ranking()
            except Exception as error:
                logger.critical("Generic error. %r" % error)
                return False

        logger.info("Export finished.")
        logger.operation = ""

        return True
    def do_import(self):
        """Take care of creating the database structure, delegating
        the loading of the contest data and putting them on the
        database.

        """
        logger.info("Creating database structure.")
        if self.drop:
            try:
                with SessionGen() as session:
                    FSObject.delete_all(session)
                    session.commit()
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        contest = Contest.import_from_dict(
            self.loader.import_contest(self.path))

        logger.info("Creating contest on the database.")
        with SessionGen() as session:
            session.add(contest)
            logger.info("Analyzing database.")
            session.commit()
            contest_id = contest.id
            analyze_all_tables(session)

        logger.info("Import finished (new contest id: %s)." % contest_id)

        return True
Example #26
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, "
                                "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions,
                                            self.skip_user_tests,
                                            light=self.light)
            for _file in files:
                if not self.safe_get_file(_file, os.path.join(
                        files_dir, _file), os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")

            self.ids = {contest: "0"}
            self.queue = [contest]

            data = dict()
            i = 0
            while i < len(self.queue):
                obj = self.queue[i]
                data[self.ids[obj]] = self.export_object(obj)
                i += 1

            def maybe_sort_numerically(x):
                try:
                    if isinstance(x, tuple) or isinstance(x, list):
                        x = x[0]
                    x = int(x)
                except:
                    pass
                return x

            with open(os.path.join(export_dir, "contest.json"), 'w') as fout:
                json.dump(data,
                          fout,
                          indent=4,
                          sort_keys=True,
                          item_sort_key=maybe_sort_numerically)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Example #27
0
    def do_export(self):
        """Run the actual export code."""
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, "
                                "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            if self.dump_files:
                logger.info("Exporting files.")
                files = contest.enumerate_files(self.skip_submissions,
                                                self.skip_user_tests,
                                                self.light)
                for file_ in files:
                    if not self.safe_get_file(file_,
                                              os.path.join(files_dir, file_),
                                              os.path.join(descr_dir, file_)):
                        return False

            # Export the contest in JSON format.
            if self.dump_model:
                logger.info("Exporting the contest to a JSON file.")

                # We use strings because they'll be the keys of a JSON
                # object; the contest will have ID 0.
                self.ids = {contest.sa_identity_key: "0"}
                self.queue = [contest]

                data = dict()
                while len(self.queue) > 0:
                    obj = self.queue.pop(0)
                    data[self.ids[obj.sa_identity_key]] = self.export_object(obj)

                # Specify the "root" of the data graph
                data["_objects"] = ["0"]

                with io.open(os.path.join(export_dir,
                                          "contest.json"), "wb") as fout:
                    json.dump(data, fout, encoding="utf-8",
                              indent=4, sort_keys=True)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Example #28
0
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")

                with open(os.path.join(self.import_dir,
                                       "contest.json")) as fin:
                    # Throughout all the code we'll assume the input is
                    # correct without actually doing any validations.
                    # Thus, for example, we're not checking that the
                    # decoded object is a dict...
                    self.datas = json.load(fin)

                self.objs = dict()
                for _id, data in self.datas.iteritems():
                    obj = self.import_object(data)
                    self.objs[_id] = obj
                    session.add(obj)

                for _id in self.datas:
                    self.add_relationships(self.datas[_id], self.objs[_id])

                # Mmh... kind of fragile interface
                contest = self.objs["0"]

                # Check that no files were missing (only if files were
                # imported).
                if False and not self.no_files:
                    contest_files = contest.enumerate_files()
                    missing_files = contest_files.difference(files)
                    if len(missing_files) > 0:
                        logger.warning("Some files needed to the contest "
                                       "are missing in the import directory.")

                session.flush()
                contest_id = contest.id
                contest_files = contest.enumerate_files()
                session.commit()

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            for digest in contest_files:
                file_ = os.path.join(files_dir, digest)
                desc = os.path.join(descr_dir, digest)
                if not os.path.exists(file_) or not os.path.exists(desc):
                    logger.error("Some files needed to the contest "
                                 "are missing in the import directory. "
                                 "The import will continue. Be aware.")
                if not self.safe_put_file(file_, desc):
                    logger.critical("Unable to put file `%s' in the database. "
                                    "Aborting. Please remove the contest "
                                    "from the database." % file_)
                    # TODO: remove contest from the database.
                    return False

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Example #29
0
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            files = set(os.listdir(files_dir))
            for _file in files:
                if not self.safe_put_file(os.path.join(files_dir, _file),
                                          os.path.join(descr_dir, _file)):
                    return False

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")
                with open(os.path.join(self.import_dir,
                                       "contest.json")) as fin:
                    contest = Contest.import_from_dict(json.load(fin))
                    session.add(contest)

                # Check that no files were missing (only if files were
                # imported).
                if not self.no_files:
                    contest_files = contest.enumerate_files()
                    missing_files = contest_files.difference(files)
                    if len(missing_files) > 0:
                        logger.warning("Some files needed to the contest "
                                       "are missing in the import directory.")

                session.flush()
                contest_id = contest.id
                session.commit()

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Example #30
0
    def do_import(self):
        """Run the actual import code."""
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            elif self.import_source.endswith(".tar.xz") \
                    or self.import_source.endswith(".txz"):
                try:
                    import lzma
                except ImportError:
                    logger.critical("LZMA compression format not "
                                    "supported. Please install package "
                                    "lzma.")
                    return False
                archive = tarfile.open(
                    fileobj=lzma.LZMAFile(self.import_source))
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        with SessionGen(commit=False) as session:

            # Import the contest in JSON format.
            if self.load_model:
                logger.info("Importing the contest from a JSON file.")

                with io.open(os.path.join(self.import_dir,
                                          "contest.json"), "rb") as fin:
                    # TODO - Throughout all the code we'll assume the
                    # input is correct without actually doing any
                    # validations.  Thus, for example, we're not
                    # checking that the decoded object is a dict...
                    self.datas = json.load(fin, encoding="utf-8")

                # If the dump has been exported using a data model
                # different than the current one (that is, a previous
                # one) we try to update it.
                # If no "_version" field is found we assume it's a v1.0
                # export (before the new dump format was introduced).
                dump_version = self.datas.get("_version", 0)

                if dump_version < model_version:
                    logger.warning(
                        "The dump you're trying to import has been created "
                        "by an old version of CMS. It may take a while to "
                        "adapt it to the current data model. You can use "
                        "cmsDumpUpdater to update the on-disk dump and "
                        "speed up future imports.")

                if dump_version > model_version:
                    logger.critical(
                        "The dump you're trying to import has been created "
                        "by a version of CMS newer than this one and there "
                        "is no way to adapt it to the current data model. "
                        "You probably need to update CMS to handle it. It's "
                        "impossible to proceed with the importation.")
                    return False

                for version in range(dump_version, model_version):
                    # Update from version to version+1
                    updater = __import__(
                        "cmscontrib.updaters.update_%d" % (version + 1),
                        globals(), locals(), ["Updater"]).Updater(self.datas)
                    self.datas = updater.run()
                    self.datas["_version"] = version + 1

                assert self.datas["_version"] == model_version

                self.objs = dict()
                for id_, data in self.datas.iteritems():
                    if not id_.startswith("_"):
                        self.objs[id_] = self.import_object(data)
                for id_, data in self.datas.iteritems():
                    if not id_.startswith("_"):
                        self.add_relationships(data, self.objs[id_])

                for k, v in list(self.objs.iteritems()):

                    # Skip submissions if requested
                    if self.skip_submissions and isinstance(v, Submission):
                        del self.objs[k]

                    # Skip user_tests if requested
                    if self.skip_user_tests and isinstance(v, UserTest):
                        del self.objs[k]

                contest_id = list()
                contest_files = set()

                # Add each base object and all its dependencies
                for id_ in self.datas["_objects"]:
                    contest = self.objs[id_]

                    # We explictly add only the contest since all child
                    # objects will be automatically added by cascade.
                    # Adding each object individually would also add
                    # orphaned objects like the ones that depended on
                    # submissions or user_tests that we (possibly)
                    # removed above.
                    session.add(contest)
                    session.flush()

                    contest_id += [contest.id]
                    contest_files |= contest.enumerate_files(
                        self.skip_submissions, self.skip_user_tests, self.light)

                session.commit()
            else:
                contest_id = None
                contest_files = None

            # Import files.
            if self.load_files:
                logger.info("Importing files.")

                files_dir = os.path.join(self.import_dir, "files")
                descr_dir = os.path.join(self.import_dir, "descriptions")

                files = set(os.listdir(files_dir))
                descr = set(os.listdir(descr_dir))

                if not descr <= files:
                    logger.warning("Some files do not have an associated "
                                   "description.")
                if not files <= descr:
                    logger.warning("Some descriptions do not have an "
                                   "associated file.")

                if not (contest_files is None or files <= contest_files):
                    # FIXME Check if it's because this is a light import
                    # or because we're skipping submissions or user_tests
                    logger.warning("The dump contains some files that are "
                                   "not needed by the contest.")
                if not (contest_files is None or contest_files <= files):
                    # The reason for this could be that it was a light
                    # export that's not being reimported as such.
                    logger.warning("The contest needs some files that are "
                                   "not contained in the dump.")

                # Limit import to files we actually need.
                if contest_files is not None:
                    files &= contest_files

                for digest in files:
                    file_ = os.path.join(files_dir, digest)
                    desc = os.path.join(descr_dir, digest)
                    if not self.safe_put_file(file_, desc):
                        logger.critical("Unable to put file `%s' in the database. "
                                        "Aborting. Please remove the contest "
                                        "from the database." % file_)
                        # TODO: remove contest from the database.
                        return False


        if contest_id is not None:
            logger.info("Import finished (contest id: %s)." %
                        ", ".join(str(id_) for id_ in contest_id))
        else:
            logger.info("Import finished.")
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            rmtree(self.import_dir)

        return True
Example #31
0
    def do_reimport(self):
        """Get the contest from the Loader and merge it."""
        with SessionGen(commit=False) as session:
            # Load the old contest from the database.
            old_contest = Contest.get_from_id(self.old_contest_id, session)
            old_users = dict((x.username, x) for x in old_contest.users)
            old_tasks = dict((x.name, x) for x in old_contest.tasks)

            # Load the new contest from the filesystem.
            new_contest, new_tasks, new_users = self.loader.get_contest()
            new_users = dict((x["username"], x) for x in new_users)
            new_tasks = dict((x["name"], x) for x in new_tasks)

            # Updates contest-global settings that are set in new_contest.
            self._update_columns(old_contest, new_contest)

            # Do the actual merge: compare all users of the old and of
            # the new contest and see if we need to create, update or
            # delete them. Delete only if authorized, fail otherwise.
            users = set(old_users.keys()) | set(new_users.keys())
            for user in users:
                old_user = old_users.get(user, None)
                new_user = new_users.get(user, None)

                if old_user is None:
                    # Create a new user.
                    logger.info("Creating user %s" % user)
                    new_user = self.loader.get_user(new_user)
                    old_contest.users.append(new_user)
                elif new_user is not None:
                    # Update an existing user.
                    logger.info("Updating user %s" % user)
                    new_user = self.loader.get_user(new_user)
                    self._update_object(old_user, new_user)
                else:
                    # Delete an existing user.
                    if self.force:
                        logger.info("Deleting user %s" % user)
                        old_contest.users.remove(old_user)
                    else:
                        logger.critical(
                            "User %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            user)
                        return False

            # The same for tasks.
            tasks = set(old_tasks.keys()) | set(new_tasks.keys())
            for task in tasks:
                old_task = old_tasks.get(task, None)
                new_task = new_tasks.get(task, None)

                if old_task is None:
                    # Create a new task.
                    logger.info("Creating task %s" % task)
                    new_task = self.loader.get_task(new_task)
                    old_contest.tasks.append(new_task)
                elif new_task is not None:
                    # Update an existing task.
                    logger.info("Updating task %s" % task)
                    new_task = self.loader.get_task(new_task)
                    self._update_object(old_task, new_task)
                else:
                    # Delete an existing task.
                    if self.force:
                        logger.info("Deleting task %s" % task)
                        old_contest.tasks.remove(old_task)
                    else:
                        logger.critical(
                            "Task %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            task)
                        return False

            session.commit()

        logger.info("Reimport finished (contest id: %s)." %
                    self.old_contest_id)

        return True
Example #32
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, "
                                "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions,
                                            self.skip_user_tests,
                                            light=self.light)
            for _file in files:
                if not self.safe_get_file(_file,
                                          os.path.join(files_dir, _file),
                                          os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")

            self.ids = {contest: "0"}
            self.queue = [contest]

            data = dict()
            i = 0
            while i < len(self.queue):
                obj = self.queue[i]
                data[self.ids[obj]] = self.export_object(obj)
                i += 1

            def maybe_sort_numerically(x):
                try:
                    if isinstance(x, tuple) or isinstance(x, list):
                        x = x[0]
                    x = int(x)
                except:
                    pass
                return x
            with open(os.path.join(export_dir, "contest.json"), 'w') as fout:
                json.dump(data, fout, indent=4, sort_keys=True, item_sort_key=maybe_sort_numerically)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Example #33
0
    def new_evaluation(self, submission_id):
        """This RPC inform ScoringService that ES finished the
        evaluation for a submission.

        submission_id (int): the id of the submission that changed.

        """
        with SessionGen(commit=True) as session:
            submission = Submission.get_from_id(submission_id, session)
            if submission is None:
                logger.critical("[action_finished] Couldn't find "
                                " submission %d in the database" %
                                submission_id)
                return
            if submission.user.hidden:
                return

            # Assign score to the submission.
            scorer = self.scorers[submission.task_id]
            scorer.add_submission(submission_id, submission.timestamp,
                                  submission.user.username,
                                  [float(ev.outcome)
                                   for ev in submission.evaluations],
                                  submission.tokened())

            # Mark submission as scored.
            self.submission_ids_scored.add(submission_id)

            # Update the ranking view.
            contest = session.query(Contest).\
                      filter_by(id=self.contest_id).first()
            contest.update_ranking_view(self.scorers,
                                        task=submission.task)

            # Filling submission's score info in the db.
            submission.score = scorer.pool[submission_id]["score"]
            submission.public_score = \
                scorer.pool[submission_id]["public_score"]

            details = scorer.pool[submission_id]["details"]
            if details is None:
                details = []
            submission.score_details = json.dumps(details)

            public_details = scorer.pool[submission_id]["public_details"]
            if public_details is None:
                public_details = []
            submission.public_score_details = json.dumps(public_details)

            # Data to send to remote rankings.
            submission_url = "/submissions/%s" % encode_id(submission_id)
            submission_put_data = {
                "user": encode_id(submission.user.username),
                "task": encode_id(submission.task.name),
                "time": submission.timestamp}
            subchange_url = "/subchanges/%s" % encode_id("%s%ss" %
                                                         (submission.timestamp,
                                                          submission_id))
            subchange_put_data = {"submission": encode_id(submission_id),
                                  "time": submission.timestamp,
                                  "score": submission.score,
                                  "extra": details}

        # TODO: ScoreRelative here does not work with remote
        # rankings (it does in the ranking view) because we
        # update only the user owning the submission.

        # Adding operations to the queue.
        for ranking in self.rankings:
            self.operation_queue.append((send_submission,
                                         [ranking, submission_url,
                                          submission_put_data]))
            self.operation_queue.append((send_change,
                                         [ranking, subchange_url,
                                          subchange_put_data]))
Example #34
0
    def do_reimport(self):
        """Ask the loader to load the contest and actually merge the
        two.

        """
        # Create the dict corresponding to the new contest.
        yaml_contest = self.loader.import_contest(self.path)
        yaml_users = dict(((x['username'], x) for x in yaml_contest['users']))
        yaml_tasks = dict(((x['name'], x) for x in yaml_contest['tasks']))

        with SessionGen(commit=False) as session:

            # Create the dict corresponding to the old contest, from
            # the database.
            contest = Contest.get_from_id(self.contest_id, session)
            cms_contest = contest.export_to_dict()
            cms_users = dict((x['username'], x) for x in cms_contest['users'])
            cms_tasks = dict((x['name'], x) for x in cms_contest['tasks'])

            # Delete the old contest from the database.
            session.delete(contest)
            session.flush()

            # Do the actual merge: first of all update all users of
            # the old contest with the corresponding ones from the new
            # contest; if some user is present in the old contest but
            # not in the new one we check if we have to fail or remove
            # it and, in the latter case, add it to a list
            users_to_remove = []
            for user_num, user in enumerate(cms_contest['users']):
                if user['username'] in yaml_users:
                    yaml_user = yaml_users[user['username']]

                    yaml_user['submissions'] = user['submissions']
                    yaml_user['user_tests'] = user['user_tests']
                    yaml_user['questions'] = user['questions']
                    yaml_user['messages'] = user['messages']

                    cms_contest['users'][user_num] = yaml_user
                else:
                    if self.force:
                        logger.warning(
                            "User %s exists in old contest, but "
                            "not in the new one." % user['username'])
                        users_to_remove.append(user_num)
                        # FIXME Do we need really to do this, given that
                        # we already deleted the whole contest?
                        session.delete(contest.users[user_num])
                    else:
                        logger.critical(
                            "User %s exists in old contest, but "
                            "not in the new one. Use -f to force."
                            % user['username'])
                        return False

            # Delete the users
            for user_num in users_to_remove:
                del cms_contest['users'][user_num]

            # The append the users in the new contest, not present in
            # the old one.
            for user in yaml_contest['users']:
                if user['username'] not in cms_users.keys():
                    cms_contest['users'].append(user)

            # The same for tasks: update old tasks.
            tasks_to_remove = []
            for task_num, task in enumerate(cms_contest['tasks']):
                if task['name'] in yaml_tasks:
                    yaml_task = yaml_tasks[task['name']]

                    cms_contest['tasks'][task_num] = yaml_task
                else:
                    if self.force:
                        logger.warning("Task %s exists in old contest, but "
                                       "not in the new one." % task['name'])
                        tasks_to_remove.append(task_num)
                        # FIXME Do we need really to do this, given that
                        # we already deleted the whole contest?
                        session.delete(contest.tasks[task_num])
                    else:
                        logger.error("Task %s exists in old contest, but "
                                     "not in the new one. Use -f to force."
                                     % task['name'])
                        return False

            # Delete the tasks
            for task_num in tasks_to_remove:
                del cms_contest['tasks'][task_num]

            # And add new tasks.
            for task in yaml_contest['tasks']:
                if task['name'] not in cms_tasks.keys():
                    cms_contest['tasks'].append(task)

            # Reimport the contest in the db, with the previous ID.
            contest = Contest.import_from_dict(cms_contest)
            contest.id = self.contest_id
            session.add(contest)
            session.flush()

            logger.info("Analyzing database.")
            analyze_all_tables(session)
            session.commit()

        logger.info("Reimport of contest %s finished." % self.contest_id)

        return True
Example #35
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, "
                                "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions,
                                            self.skip_user_tests,
                                            light=self.light)
            for _file in files:
                if not self.safe_get_file(_file, os.path.join(
                        files_dir, _file), os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")
            with open(os.path.join(export_dir, "contest.json"), 'w') as fout:
                json.dump(contest.export_to_dict(self.skip_submissions,
                                                 self.skip_user_tests),
                          fout,
                          indent=4)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Example #36
0
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        logger.info("Reading JSON file...")
        with open(os.path.join(self.import_dir, "contest.json")) as fin:
            contest_json = json.load(fin)
        if self.no_submissions:
            for user in contest_json["users"]:
                user["submissions"] = []
                user["user_tests"] = []

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")
                contest = Contest.import_from_dict(contest_json)
                session.add(contest)

                session.flush()
                contest_id = contest.id
                contest_files = contest.enumerate_files()
                session.commit()

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            for digest in contest_files:
                file_ = os.path.join(files_dir, digest)
                desc = os.path.join(descr_dir, digest)
                print open(desc).read()
                if not os.path.exists(file_) or not os.path.exists(desc):
                    logger.error("Some files needed to the contest "
                                 "are missing in the import directory. "
                                 "The import will continue. Be aware.")
                if not self.safe_put_file(file_, desc):
                    logger.critical("Unable to put file `%s' in the database. "
                                    "Aborting. Please remove the contest "
                                    "from the database." % file_)
                    # TODO: remove contest from the database.
                    return False

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Example #37
0
    def do_reimport(self):
        """Ask the loader to load the contest and actually merge the
        two.

        """
        # Create the dict corresponding to the new contest.
        yaml_contest = self.loader.import_contest(self.path)
        yaml_users = dict(((x['username'], x) for x in yaml_contest['users']))
        yaml_tasks = dict(((x['name'], x) for x in yaml_contest['tasks']))

        with SessionGen(commit=False) as session:

            # Create the dict corresponding to the old contest, from
            # the database.
            contest = Contest.get_from_id(self.contest_id, session)
            cms_contest = contest.export_to_dict()
            cms_users = dict((x['username'], x) for x in cms_contest['users'])
            cms_tasks = dict((x['name'], x) for x in cms_contest['tasks'])

            # Delete the old contest from the database.
            session.delete(contest)
            session.flush()

            # Do the actual merge: first of all update all users of
            # the old contest with the corresponding ones from the new
            # contest; if some user is present in the old contest but
            # not in the new one we check if we have to fail or remove
            # it and, in the latter case, add it to a list
            users_to_remove = []
            for user_num, user in enumerate(cms_contest['users']):
                if user['username'] in yaml_users:
                    yaml_user = yaml_users[user['username']]

                    yaml_user['submissions'] = user['submissions']
                    yaml_user['user_tests'] = user['user_tests']
                    yaml_user['questions'] = user['questions']
                    yaml_user['messages'] = user['messages']

                    cms_contest['users'][user_num] = yaml_user
                else:
                    if self.force:
                        logger.warning("User %s exists in old contest, but "
                                       "not in the new one." %
                                       user['username'])
                        users_to_remove.append(user_num)
                        # FIXME Do we need really to do this, given that
                        # we already deleted the whole contest?
                        session.delete(contest.users[user_num])
                    else:
                        logger.critical(
                            "User %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            user['username'])
                        return False

            # Delete the users
            for user_num in users_to_remove:
                del cms_contest['users'][user_num]

            # The append the users in the new contest, not present in
            # the old one.
            for user in yaml_contest['users']:
                if user['username'] not in cms_users.keys():
                    cms_contest['users'].append(user)

            # The same for tasks: update old tasks.
            tasks_to_remove = []
            for task_num, task in enumerate(cms_contest['tasks']):
                if task['name'] in yaml_tasks:
                    yaml_task = yaml_tasks[task['name']]

                    cms_contest['tasks'][task_num] = yaml_task
                else:
                    if self.force:
                        logger.warning("Task %s exists in old contest, but "
                                       "not in the new one." % task['name'])
                        tasks_to_remove.append(task_num)
                        # FIXME Do we need really to do this, given that
                        # we already deleted the whole contest?
                        session.delete(contest.tasks[task_num])
                    else:
                        logger.error("Task %s exists in old contest, but "
                                     "not in the new one. Use -f to force." %
                                     task['name'])
                        return False

            # Delete the tasks
            for task_num in tasks_to_remove:
                del cms_contest['tasks'][task_num]

            # And add new tasks.
            for task in yaml_contest['tasks']:
                if task['name'] not in cms_tasks.keys():
                    cms_contest['tasks'].append(task)

            # Reimport the contest in the db, with the previous ID.
            contest = Contest.import_from_dict(cms_contest)
            contest.id = self.contest_id
            session.add(contest)
            session.flush()

            logger.info("Analyzing database.")
            analyze_all_tables(session)
            session.commit()

        logger.info("Reimport of contest %s finished." % self.contest_id)

        return True
Example #38
0
    def do_reimport(self):
        """Get the contest from the Loader and merge it."""
        with SessionGen(commit=False) as session:
            # Load the old contest from the database.
            old_contest = Contest.get_from_id(self.old_contest_id, session)
            old_users = dict((x.username, x) for x in old_contest.users)
            old_tasks = dict((x.name, x) for x in old_contest.tasks)

            # Load the new contest from the filesystem.
            new_contest, new_tasks, new_users = self.loader.get_contest()

            # Updates contest-global settings that are set in new_contest.
            self._update_columns(old_contest, new_contest)

            # Do the actual merge: compare all users of the old and of
            # the new contest and see if we need to create, update or
            # delete them. Delete only if authorized, fail otherwise.
            users = set(old_users.keys()) | set(new_users)
            for username in users:
                old_user = old_users.get(username, None)

                if old_user is None:
                    # Create a new user.
                    logger.info("Creating user %s" % username)
                    new_user = self.loader.get_user(username)
                    old_contest.users.append(new_user)
                elif username in new_users:
                    # Update an existing user.
                    logger.info("Updating user %s" % username)
                    new_user = self.loader.get_user(username)
                    self._update_object(old_user, new_user)
                else:
                    # Delete an existing user.
                    if self.force:
                        logger.info("Deleting user %s" % username)
                        old_contest.users.remove(old_user)
                    else:
                        logger.critical(
                            "User %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            username)
                        return False

            # The same for tasks. Setting num for tasks requires a bit
            # of trickery, since we have to avoid triggering a
            # duplicate key constraint violation while we're messing
            # with the task order. To do that we just set sufficiently
            # high number on the first pass and then fix it on a
            # second pass.
            tasks = set(old_tasks.keys()) | set(new_tasks)
            current_num = max(len(old_tasks), len(new_tasks))
            for task in tasks:
                old_task = old_tasks.get(task, None)

                if old_task is None:
                    # Create a new task.
                    logger.info("Creating task %s" % task)
                    new_task = self.loader.get_task(task)
                    new_task.num = current_num
                    current_num += 1
                    old_contest.tasks.append(new_task)
                elif task in new_tasks:
                    # Update an existing task.
                    if self.full or self.loader.has_changed(task):
                        logger.info("Updating task %s" % task)
                        new_task = self.loader.get_task(task)
                        new_task.num = current_num
                        current_num += 1
                        self._update_object(old_task, new_task)
                    else:
                        logger.info("Task %s has not changed" % task)
                else:
                    # Delete an existing task.
                    if self.force:
                        logger.info("Deleting task %s" % task)
                        session.delete(old_task)
                    else:
                        logger.critical(
                            "Task %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            task)
                        return False

                session.flush()

            # And finally we fix the numbers; old_contest must be
            # refreshed because otherwise SQLAlchemy doesn't get aware
            # that some tasks may have been deleted
            tasks_order = dict((name, num)
                               for num, name in enumerate(new_tasks))
            session.refresh(old_contest)
            for task in old_contest.tasks:
                task.num = tasks_order[task.name]

            session.commit()

        logger.info("Reimport finished (contest id: %s)." %
                    self.old_contest_id)

        return True
Example #39
0
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        logger.info("Reading JSON file...")
        with open(os.path.join(self.import_dir, "contest.json")) as fin:
            contest_json = json.load(fin)
        if self.no_submissions:
            for user in contest_json["users"]:
                user["submissions"] = []
                user["user_tests"] = []

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")
                contest = Contest.import_from_dict(contest_json)
                session.add(contest)

                session.flush()
                contest_id = contest.id
                contest_files = contest.enumerate_files()
                session.commit()

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            for digest in contest_files:
                file_ = os.path.join(files_dir, digest)
                desc = os.path.join(descr_dir, digest)
                if not os.path.exists(file_) or not os.path.exists(desc):
                    logger.error("Some files needed to the contest "
                                 "are missing in the import directory. "
                                 "The import will continue. Be aware.")
                if not self.safe_put_file(file_, desc):
                    logger.critical("Unable to put file `%s' in the database. "
                                    "Aborting. Please remove the contest "
                                    "from the database." % file_)
                    # TODO: remove contest from the database.
                    return False

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Example #40
0
    def do_import(self):
        """Run the actual import code."""
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        with SessionGen(commit=False) as session:

            # Import the contest in JSON format.
            if self.load_model:
                logger.info("Importing the contest from a JSON file.")

                with io.open(os.path.join(self.import_dir,
                                          "contest.json"), "rb") as fin:
                    # TODO - Throughout all the code we'll assume the
                    # input is correct without actually doing any
                    # validations.  Thus, for example, we're not
                    # checking that the decoded object is a dict...
                    self.datas = json.load(fin, encoding="utf-8")

                self.objs = dict()
                for id_, data in self.datas.iteritems():
                    if not id_.startswith("_"):
                        self.objs[id_] = self.import_object(data)
                for id_, data in self.datas.iteritems():
                    if not id_.startswith("_"):
                        self.add_relationships(data, self.objs[id_])

                for k, v in list(self.objs.iteritems()):

                    # Skip submissions if requested
                    if self.skip_submissions and isinstance(v, Submission):
                        del self.objs[k]

                    # Skip user_tests if requested
                    if self.skip_user_tests and isinstance(v, UserTest):
                        del self.objs[k]

                contest_id = list()
                contest_files = set()

                # Add each base object and all its dependencies
                for id_ in self.datas["_objects"]:
                    contest = self.objs[id_]

                    # We explictly add only the contest since all child
                    # objects will be automatically added by cascade.
                    # Adding each object individually would also add
                    # orphaned objects like the ones that depended on
                    # submissions or user_tests that we (possibly)
                    # removed above.
                    session.add(contest)
                    session.flush()

                    contest_id += [contest.id]
                    contest_files |= contest.enumerate_files(
                        self.skip_submissions, self.skip_user_tests, self.light)

                session.commit()
            else:
                contest_id = None
                contest_files = None

            # Import files.
            if self.load_files:
                logger.info("Importing files.")

                files_dir = os.path.join(self.import_dir, "files")
                descr_dir = os.path.join(self.import_dir, "descriptions")

                files = set(os.listdir(files_dir))
                descr = set(os.listdir(descr_dir))

                if not descr <= files:
                    logger.warning("Some files do not have an associated "
                                   "description.")
                if not files <= descr:
                    logger.warning("Some descriptions do not have an "
                                   "associated file.")

                if not (contest_files is None or files <= contest_files):
                    # FIXME Check if it's because this is a light import
                    # or because we're skipping submissions or user_tests
                    logger.warning("The dump contains some files that are "
                                   "not needed by the contest.")
                if not (contest_files is None or contest_files <= files):
                    # The reason for this could be that it was a light
                    # export that's not being reimported as such.
                    logger.warning("The contest needs some files that are "
                                   "not contained in the dump.")

                # Limit import to files we actually need.
                if contest_files is not None:
                    files &= contest_files

                for digest in files:
                    file_ = os.path.join(files_dir, digest)
                    desc = os.path.join(descr_dir, digest)
                    if not self.safe_put_file(file_, desc):
                        logger.critical("Unable to put file `%s' in the database. "
                                        "Aborting. Please remove the contest "
                                        "from the database." % file_)
                        # TODO: remove contest from the database.
                        return False


        if contest_id is not None:
            logger.info("Import finished (contest id: %s)." %
                        ", ".join(str(id_) for id_ in contest_id))
        else:
            logger.info("Import finished.")
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            files = set(os.listdir(files_dir))
            for _file in files:
                if not self.safe_put_file(os.path.join(files_dir, _file),
                                          os.path.join(descr_dir, _file)):
                    return False

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")
                with open(os.path.join(self.import_dir,
                                       "contest.json")) as fin:
                    contest = Contest.import_from_dict(json.load(fin))
                    session.add(contest)

                # Check that no files were missing (only if files were
                # imported).
                if not self.no_files:
                    contest_files = contest.enumerate_files()
                    missing_files = contest_files.difference(files)
                    if len(missing_files) > 0:
                        logger.warning("Some files needed to the contest "
                                       "are missing in the import directory.")

                session.flush()
                contest_id = contest.id
                session.commit()

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Example #42
0
    def action_finished(self, data, plus, error=None):
        """Callback from a worker, to signal that is finished some
        action (compilation or evaluation).

        data (bool): report success of the action
        plus (tuple): the tuple (job=(job_type, submission_id),
                                 side_data=(priority, timestamp),
                                 shard_of_worker)

        """
        # We notify the pool that the worker is free (even if it
        # replied with ane error), but if the pool wants to disable the
        # worker, it's because it already assigned its job to someone
        # else, so we discard the data from the worker.
        job, side_data, shard = plus

        # If worker was ignored, do nothing.
        if self.pool.release_worker(shard):
            return

        if error is not None:
            logger.error("Received error from Worker: `%s'." % error)
            return

        if not data["success"]:
            logger.error("Worker %s signaled action not successful." % shard)
            return

        job_type, submission_id = job
        unused_priority, timestamp = side_data

        logger.info("Action %s for submission %s completed. Success: %s." %
                    (job_type, submission_id, data["success"]))

        # We get the submission from db.
        with SessionGen(commit=True) as session:
            submission = Submission.get_from_id(submission_id, session)
            if submission is None:
                logger.critical("[action_finished] Couldn't find submission "
                                "%s in the database." % submission_id)
                return

            if job_type == EvaluationService.JOB_TYPE_COMPILATION:
                submission.compilation_tries += 1
                submission.compilation_outcome = data["compilation_outcome"]
                submission.compilation_text = data["compilation_text"]
                submission.compilation_shard = data["compilation_shard"]
                submission.compilation_sandbox = data["compilation_sandbox"]
                for filename, digest in data.get("executables", []):
                    session.add(Executable(digest, filename, submission))

            if job_type == EvaluationService.JOB_TYPE_EVALUATION:
                submission.evaluation_tries += 1
                submission.evaluation_outcome = "ok"
                for test_number, info in data["evaluations"].iteritems():
                    ewct = info["execution_wall_clock_time"]  # Too long... :(
                    session.add(Evaluation(
                        text=info["text"],
                        outcome=info["outcome"],
                        num=test_number,
                        memory_used=info["memory_used"],
                        execution_time=info["execution_time"],
                        execution_wall_clock_time=ewct,
                        evaluation_shard=info["evaluation_shard"],
                        evaluation_sandbox=info["evaluation_sandbox"],
                        submission=submission))

            compilation_tries = submission.compilation_tries
            compilation_outcome = submission.compilation_outcome
            evaluation_tries = submission.evaluation_tries
            evaluated = submission.evaluated()

        # Compilation.
        if job_type == EvaluationService.JOB_TYPE_COMPILATION:
            self.compilation_ended(submission_id,
                                   timestamp,
                                   compilation_tries,
                                   compilation_outcome)

        # Evaluation.
        elif job_type == EvaluationService.JOB_TYPE_EVALUATION:
            self.evaluation_ended(submission_id,
                                  timestamp,
                                  evaluation_tries,
                                  evaluated)

        # Other (i.e. error).
        else:
            logger.error("Invalid job type %r." % job_type)
            return
Example #43
0
    def new_evaluation(self, submission_id):
        """This RPC inform ScoringService that ES finished the
        evaluation for a submission.

        submission_id (int): the id of the submission that changed.

        """
        with SessionGen(commit=True) as session:
            submission = Submission.get_from_id(submission_id, session)
            if submission is None:
                logger.critical("[action_finished] Couldn't find "
                                " submission %d in the database" %
                                submission_id)
                return
            if submission.user.hidden:
                return

            # Assign score to the submission.
            scorer = self.scorers[submission.task_id]
            scorer.add_submission(submission_id, submission.timestamp,
                                  submission.user.username,
                                  dict((ev.num,
                                        {"outcome": float(ev.outcome),
                                         "text": ev.text,
                                         "time": ev.execution_time,
                                         "memory": ev.memory_used})
                                       for ev in submission.evaluations),
                                  submission.tokened())

            # Mark submission as scored.
            self.submission_ids_scored.add(submission_id)

            # Filling submission's score info in the db.
            submission.score = scorer.pool[submission_id]["score"]
            submission.public_score = \
                scorer.pool[submission_id]["public_score"]

            # And details.
            submission.score_details = scorer.pool[submission_id]["details"]
            submission.public_score_details = \
                scorer.pool[submission_id]["public_details"]
            submission.ranking_score_details = \
                scorer.pool[submission_id]["ranking_details"]

            # Data to send to remote rankings.
            submission_put_data = {
                "user": encode_id(submission.user.username),
                "task": encode_id(submission.task.name),
                "time": int(make_timestamp(submission.timestamp))}
            subchange_id = "%s%ss" % (int(make_timestamp(submission.timestamp)), submission_id)
            subchange_put_data = {
                "submission": encode_id(submission_id),
                "time": int(make_timestamp(submission.timestamp)),
                "score": submission.score,
                "extra": submission.ranking_score_details}

        # TODO: ScoreRelative here does not work with remote
        # rankings (it does in the ranking view) because we
        # update only the user owning the submission.

        # Adding operations to the queue.
        with self.operation_queue_lock:
            for ranking in self.rankings:
                self.submission_queue.setdefault(ranking, dict())[encode_id(submission_id)] = submission_put_data
                self.subchange_queue.setdefault(ranking, dict())[encode_id(subchange_id)] = subchange_put_data