Пример #1
0
    def do_import(self):
        """Take care of creating the database structure, delegating
        the loading of the contest data and putting them on the
        database.

        """
        logger.info("Creating database structure.")
        if self.drop:
            try:
                with SessionGen() as session:
                    FSObject.delete_all(session)
                    session.commit()
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        contest = Contest.import_from_dict(self.loader.import_contest(self.path))

        logger.info("Creating contest on the database.")
        with SessionGen() as session:
            session.add(contest)
            logger.info("Analyzing database.")
            session.commit()
            contest_id = contest.id
            analyze_all_tables(session)

        logger.info("Import finished (new contest id: %s)." % contest_id)

        return True
Пример #2
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %s" % self.contest_id
        logger.info("Starting export.")

        logger.info("Creating dir structure.")
        try:
            os.mkdir(self.spool_dir)
        except OSError:
            logger.error("The specified directory already exists, "
                         "I won't overwrite it.")
            return False
        os.mkdir(self.upload_dir)

        with SessionGen(commit=False) as session:
            self.contest = Contest.get_from_id(self.contest_id, session)

            # Creating users' directory.
            for user in self.contest.users:
                if not user.hidden:
                    os.mkdir(os.path.join(self.upload_dir, user.username))

            self.export_submissions()
            self.export_ranking()

        logger.info("Export finished.")
        logger.operation = ""

        return True
Пример #3
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, " "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(), archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, " "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions, self.skip_user_tests, light=self.light)
            for _file in files:
                if not self.safe_get_file(_file, os.path.join(files_dir, _file), os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")
            with open(os.path.join(export_dir, "contest.json"), "w") as fout:
                json.dump(contest.export_to_dict(self.skip_submissions, self.skip_user_tests), fout, indent=4)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target, archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Пример #4
0
    def initialize(self, ranking):
        """Send to the ranking all the data that are supposed to be
        sent before the contest: contest, users, tasks. No support for
        teams, flags and faces.

        ranking ((string, string)): address and authorization string
                                    of ranking server.
        return (bool): success of operation

        """
        logger.info("Initializing rankings.")
        connection = httplib.HTTPConnection(ranking[0])
        auth = ranking[1]

        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(self.contest_id, session)
            if contest is None:
                logger.error("Received request for unexistent contest id %s." %
                             self.contest_id)
                raise KeyError
            contest_name = contest.name
            contest_url = "/contests/%s" % encode_id(contest_name)
            contest_data = {"name": contest.description,
                            "begin": contest.start,
                            "end": contest.stop}

            users = [["/users/%s" % encode_id(user.username),
                      {"f_name": user.first_name,
                       "l_name": user.last_name,
                       "team": None}]
                     for user in contest.users
                     if not user.hidden]

            tasks = [["/tasks/%s" % encode_id(task.name),
                      {"name": task.title,
                       "contest": encode_id(contest.name),
                       "max_score": 100.0,
                       "extra_headers": [],
                       "order": task.num,
                       "short_name": encode_id(task.name)}]
                     for task in contest.tasks]

        safe_put_data(connection, contest_url, contest_data, auth,
                      "sending contest %s" % contest_name)

        for user in users:
            safe_put_data(connection, user[0], user[1], auth,
                          "sending user %s" % (user[1]["l_name"] + " " +
                                                user[1]["f_name"]))

        for task in tasks:
            safe_put_data(connection, task[0], task[1], auth,
                          "sending task %s" % task[1]["name"])

        return True
Пример #5
0
def add_user(contest_id, first_name, last_name, username,
             password, ip_address, email, hidden):
    with SessionGen(commit=True) as session:
        contest = Contest.get_from_id(contest_id, session)
        user = User(first_name=first_name,
                    last_name=last_name,
                    username=username,
                    password=password,
                    email=email,
                    ip=ip_address,
                    hidden=hidden,
                    contest=contest)
        session.add(user)
Пример #6
0
def add_user(contest_id, first_name, last_name, username, password, ip_address,
             email, hidden):
    with SessionGen(commit=True) as session:
        contest = Contest.get_from_id(contest_id, session)
        user = User(first_name=first_name,
                    last_name=last_name,
                    username=username,
                    password=password,
                    email=email,
                    ip=ip_address,
                    hidden=hidden,
                    contest=contest)
        session.add(user)
Пример #7
0
    def precache_files(self, contest_id):
        """RPC to ask the worker to precache of files in the contest.

        contest_id (int): the id of the contest

        """
        # TODO - Check for lock
        logger.info("Precaching files for contest %d" % contest_id)
        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(contest_id, session)
            for digest in contest.enumerate_files():
                self.file_cacher.get_file(digest)
        logger.info("Precaching finished")
Пример #8
0
    def __init__(self, contest_id, export_target, skip_submissions, skip_user_tests, light):
        self.contest_id = contest_id
        self.skip_submissions = skip_submissions
        self.skip_user_tests = skip_user_tests
        self.light = light

        # If target is not provided, we use the contest's name.
        if export_target == "":
            with SessionGen(commit=False) as session:
                contest = Contest.get_from_id(self.contest_id, session)
                self.export_target = "dump_%s.tar.gz" % contest.name
        else:
            self.export_target = export_target

        self.file_cacher = FileCacher()
Пример #9
0
    def prepare(self):
        """This method is executed at the beginning of each request.

        """
        self.set_header("Cache-Control", "no-cache, must-revalidate")

        self.sql_session = Session()
        self.contest = Contest.get_from_id(self.application.service.contest, self.sql_session)

        localization_dir = os.path.join(os.path.dirname(__file__), "mo")
        if os.path.exists(localization_dir):
            tornado.locale.load_gettext_translations(localization_dir, "cms")

        self._ = self.get_browser_locale().translate
        self.r_params = self.render_params()
Пример #10
0
    def __init__(self, contest_id, export_target, skip_submissions,
                 skip_user_tests, light):
        self.contest_id = contest_id
        self.skip_submissions = skip_submissions
        self.skip_user_tests = skip_user_tests
        self.light = light

        # If target is not provided, we use the contest's name.
        if export_target == "":
            with SessionGen(commit=False) as session:
                contest = Contest.get_from_id(self.contest_id, session)
                self.export_target = "dump_%s.tar.gz" % contest.name
        else:
            self.export_target = export_target

        self.file_cacher = FileCacher()
Пример #11
0
    def precache_files(self, contest_id):
        """RPC to ask the worker to precache of files in the contest.

        contest_id (int): the id of the contest

        """
        # Lock is not needed if the admins correctly placed cache and
        # temp directories in the same filesystem. This is what
        # usually happens since they are children of the same,
        # cms-created, directory.
        logger.info("Precaching files for contest %d." % contest_id)
        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(contest_id, session)
            for digest in contest.enumerate_files(skip_submissions=True):
                self.file_cacher.get_file(digest)
        logger.info("Precaching finished.")
Пример #12
0
    def precache_files(self, contest_id):
        """RPC to ask the worker to precache of files in the contest.

        contest_id (int): the id of the contest

        """
        # Lock is not needed if the admins correctly placed cache and
        # temp directories in the same filesystem. This is what
        # usually happens since they are children of the same,
        # cms-created, directory.
        logger.info("Precaching files for contest %d." % contest_id)
        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(contest_id, session)
            for digest in contest.enumerate_files(skip_submissions=True):
                self.file_cacher.get_file(digest)
        logger.info("Precaching finished.")
Пример #13
0
    def _initialize_scorers(self):
        """Initialize scorers, the ScoreType objects holding all
        submissions for a given task and deciding scores, and create
        an empty ranking view for the contest.

        """
        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(self.contest_id, session)

            for task in contest.tasks:
                try:
                    self.scorers[task.id] = get_score_type(task=task)
                except Exception as error:
                    logger.critical("Cannot get score type for task %s: %r" %
                                    (task.name, error))
                    self.exit()
            session.commit()
Пример #14
0
    def _initialize_scorers(self):
        """Initialize scorers, the ScoreType objects holding all
        submissions for a given task and deciding scores, and create
        an empty ranking view for the contest.

        """
        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(self.contest_id, session)

            for task in contest.tasks:
                try:
                    self.scorers[task.id] = get_score_type(task=task)
                except Exception as error:
                    logger.critical("Cannot get score type for task %s: %r" %
                                    (task.name, error))
                    self.exit()
            session.commit()
Пример #15
0
    def rankings_initialize(self):
        """Send to all the rankings all the data that are supposed to be
        sent before the contest: contest, users, tasks. No support for
        teams, flags and faces.

        """
        logger.info("Initializing rankings.")

        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(self.contest_id, session)

            if contest is None:
                logger.error("Received request for unexistent contest "
                             "id %s." % self.contest_id)
                raise KeyError

            contest_id = encode_id(contest.name)
            contest_data = {
                "name": contest.description,
                "begin": int(make_timestamp(contest.start)),
                "end": int(make_timestamp(contest.stop)),
                "score_precision": contest.score_precision}

            users = dict((encode_id(user.username),
                          {"f_name": user.first_name,
                           "l_name": user.last_name,
                           "team": None})
                         for user in contest.users
                         if not user.hidden)

            tasks = dict((encode_id(task.name),
                          {"name": task.title,
                           "contest": encode_id(contest.name),
                           "max_score": 100.0,
                           "score_precision": task.score_precision,
                           "extra_headers": [],
                           "order": task.num,
                           "short_name": task.name})
                         for task in contest.tasks)

        for ranking in self.rankings:
            ranking.data_queue.put((ranking.CONTEST_TYPE,
                                    {contest_id: contest_data}))
            ranking.data_queue.put((ranking.USER_TYPE, users))
            ranking.data_queue.put((ranking.TASK_TYPE, tasks))
Пример #16
0
    def search_jobs_not_done(self):
        """Look in the database for submissions that have not been
        scored for no good reasons. Put the missing job in the queue.

        """
        # Do this only if we are not still loading old submission
        # (from the start of the service).
        if self.scoring_old_submission:
            return True

        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(self.contest_id, session)

            new_submission_results_to_score = set()
            new_submissions_to_token = set()

            for submission in contest.get_submissions():
                for dataset in get_datasets_to_judge(submission.task):
                    sr = submission.get_result(dataset)
                    sr_id = (submission.id, dataset.id)

                    if sr is not None and (sr.evaluated() or
                            sr.compilation_outcome == "fail") and \
                            sr_id not in self.submission_results_scored:
                        new_submission_results_to_score.add(sr_id)

                if submission.tokened() and \
                        submission.id not in self.submissions_tokened:
                    new_submissions_to_token.add(submission.id)

        new_s = len(new_submission_results_to_score)
        old_s = len(self.submission_results_to_score)
        new_t = len(new_submissions_to_token)
        old_t = len(self.submissions_to_token)
        logger.info("Submissions found to score/token: %d, %d." %
                    (new_s, new_t))
        if new_s + new_t > 0:
            self.submission_results_to_score |= new_submission_results_to_score
            self.submissions_to_token |= new_submissions_to_token
            if old_s + old_t == 0:
                self.add_timeout(self.score_old_submissions, None,
                                 0.5, immediately=False)

        # Run forever.
        return True
Пример #17
0
def harvest_contest_data(contest_id):
    """Retrieve the couples username, password and the task list for a
    given contest.

    contest_id (int): the id of the contest we want.
    return (tuple): the first element is a dictionary mapping
                    usernames to passwords; the second one is the list
                    of the task names.

    """
    users = {}
    tasks = []
    with SessionGen() as session:
        contest = Contest.get_from_id(contest_id, session)
        for user in contest.users:
            users[user.username] = {'password': user.password}
        for task in contest.tasks:
            tasks.append((task.id, task.name))
    return users, tasks
Пример #18
0
def harvest_contest_data(contest_id):
    """Retrieve the couples username, password and the task list for a
    given contest.

    contest_id (int): the id of the contest we want.
    return (tuple): the first element is a dictionary mapping
                    usernames to passwords; the second one is the list
                    of the task names.

    """
    users = {}
    tasks = []
    with SessionGen() as session:
        contest = Contest.get_from_id(contest_id, session)
        for user in contest.users:
            users[user.username] = {'password': user.password}
        for task in contest.tasks:
            tasks.append((task.id, task.name))
    return users, tasks
Пример #19
0
    def search_jobs_not_done(self):
        """Look in the database for submissions that have not been
        scored for no good reasons. Put the missing job in the queue.

        """
        # Do this only if we are not still loading old submission
        # (from the start of the service).
        if self.scoring_old_submission:
            return True

        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(self.contest_id, session)

            new_submissions_to_score = set()
            new_submissions_to_token = set()

            for submission in contest.get_submissions():
                if (submission.evaluated()
                    or submission.compilation_outcome == "fail") \
                        and submission.id not in self.submissions_scored:
                    new_submissions_to_score.add(submission.id)
                if submission.tokened() \
                        and submission.id not in self.submissions_tokened:
                    new_submissions_to_token.add(submission.id)

        new_s = len(new_submissions_to_score)
        old_s = len(self.submissions_to_score)
        new_t = len(new_submissions_to_token)
        old_t = len(self.submissions_to_token)
        logger.info("Submissions found to score/token: %d, %d." %
                    (new_s, new_t))
        if new_s + new_t > 0:
            self.submissions_to_score |= new_submissions_to_score
            self.submissions_to_token |= new_submissions_to_token
            if old_s + old_t == 0:
                self.add_timeout(self.score_old_submissions,
                                 None,
                                 0.5,
                                 immediately=False)

        # Run forever.
        return True
Пример #20
0
    def __init__(self, contest_id, export_target,
                 dump_files, dump_model, light,
                 skip_submissions, skip_user_tests):
        self.contest_id = contest_id
        self.dump_files = dump_files
        self.dump_model = dump_model
        self.light = light
        self.skip_submissions = skip_submissions
        self.skip_user_tests = skip_user_tests

        # If target is not provided, we use the contest's name.
        if export_target == "":
            with SessionGen(commit=False) as session:
                contest = Contest.get_from_id(self.contest_id, session)
                self.export_target = "dump_%s.tar.gz" % contest.name
                logger.warning("export_target not given, using \"%s\""
                               % self.export_target)
        else:
            self.export_target = export_target

        self.file_cacher = FileCacher()
Пример #21
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %s" % self.contest_id
        logger.info("Starting export.")

        logger.info("Creating dir structure.")
        try:
            os.mkdir(self.spool_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False
        os.mkdir(self.upload_dir)

        with SessionGen(commit=False) as session:
            self.contest = Contest.get_from_id(self.contest_id, session)
            self.submissions = sorted(
                (submission for submission in self.contest.get_submissions()
                 if not submission.user.hidden),
                key=lambda submission: submission.timestamp)

            # Creating users' directory.
            for user in self.contest.users:
                if not user.hidden:
                    os.mkdir(os.path.join(self.upload_dir, user.username))

            try:
                self.export_submissions()
                self.export_ranking()
            except Exception as error:
                logger.critical("Generic error. %r" % error)
                return False

        logger.info("Export finished.")
        logger.operation = ""

        return True
Пример #22
0
    def do_import(self):
        """Take care of creating the database structure, delegating
        the loading of the contest data and putting them on the
        database.

        """
        logger.info("Creating database structure.")
        if self.drop:
            try:
                with SessionGen() as session:
                    FSObject.delete_all(session)
                    session.commit()
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        contest = Contest.import_from_dict(
            self.loader.import_contest(self.path))

        logger.info("Creating contest on the database.")
        with SessionGen() as session:
            session.add(contest)
            logger.info("Analyzing database.")
            session.commit()
            contest_id = contest.id
            analyze_all_tables(session)

        logger.info("Import finished (new contest id: %s)." % contest_id)

        return True
    def do_reimport(self):
        """Ask the loader to load the contest and actually merge the
        two.

        """
        # Create the dict corresponding to the new contest.
        yaml_contest = self.loader.import_contest(self.path)
        yaml_users = dict(((x['username'], x) for x in yaml_contest['users']))
        yaml_tasks = dict(((x['name'], x) for x in yaml_contest['tasks']))

        with SessionGen(commit=False) as session:

            # Create the dict corresponding to the old contest, from
            # the database.
            contest = Contest.get_from_id(self.contest_id, session)
            cms_contest = contest.export_to_dict()
            cms_users = dict((x['username'], x) for x in cms_contest['users'])
            cms_tasks = dict((x['name'], x) for x in cms_contest['tasks'])

            # Delete the old contest from the database.
            session.delete(contest)
            session.flush()

            # Do the actual merge: first of all update all users of
            # the old contest with the corresponding ones from the new
            # contest; if some user is present in the old contest but
            # not in the new one we check if we have to fail or remove
            # it and, in the latter case, add it to a list
            users_to_remove = []
            for user_num, user in enumerate(cms_contest['users']):
                try:
                    user_submissions = \
                        cms_contest['users'][user_num]['submissions']
                    cms_contest['users'][user_num] = \
                        yaml_users[user['username']]
                    cms_contest['users'][user_num]['submissions'] = \
                        user_submissions
                except KeyError:
                    if self.force:
                        logger.warning("User %s exists in old contest, but "
                                       "not in the new one" % user['username'])
                        users_to_remove.append(user_num)
                        session.delete(contest.users[user_num])
                    else:
                        logger.error("User %s exists in old contest, but "
                                     "not in the new one" % user['username'])
                        return False

            # Delete the users
            for user_num in users_to_remove:
                del cms_contest['users'][user_num]

            # The append the users in the new contest, not present in
            # the old one.
            for user in yaml_contest['users']:
                if user['username'] not in cms_users.keys():
                    cms_contest['users'].append(user)

            # The same for tasks: update old tasks.
            tasks_to_remove = []
            for task_num, task in enumerate(cms_contest['tasks']):
                try:
                    cms_contest['tasks'][task_num] = yaml_tasks[task['name']]
                except KeyError:
                    if self.force:
                        logger.warning("Task %s exists in old contest, but "
                                       "not in the new one" % task['name'])
                        tasks_to_remove.append(task_num)
                        session.delete(contest.tasks[task_num])
                    else:
                        logger.error("Task %s exists in old contest, but "
                                     "not in the new one" % task['name'])
                        return False

            # Delete the tasks
            for task_num in tasks_to_remove:
                del cms_contest['tasks'][task_num]

            # And add new tasks.
            for task in yaml_contest['tasks']:
                if task['name'] not in cms_tasks.keys():
                    cms_contest['tasks'].append(task)

            # Reimport the contest in the db, with the previous ID.
            contest = Contest.import_from_dict(cms_contest)
            contest.id = self.contest_id
            session.add(contest)
            session.flush()

            logger.info("Analyzing database.")
            analyze_all_tables(session)
            session.commit()

        logger.info("Reimport of contest %s finished." % self.contest_id)

        return True
Пример #24
0
    def do_export(self):
        """Run the actual export code."""
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, "
                                "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            if self.dump_files:
                logger.info("Exporting files.")
                files = contest.enumerate_files(self.skip_submissions,
                                                self.skip_user_tests,
                                                self.light)
                for file_ in files:
                    if not self.safe_get_file(file_,
                                              os.path.join(files_dir, file_),
                                              os.path.join(descr_dir, file_)):
                        return False

            # Export the contest in JSON format.
            if self.dump_model:
                logger.info("Exporting the contest to a JSON file.")

                # We use strings because they'll be the keys of a JSON
                # object; the contest will have ID 0.
                self.ids = {contest.sa_identity_key: "0"}
                self.queue = [contest]

                data = dict()
                while len(self.queue) > 0:
                    obj = self.queue.pop(0)
                    data[self.ids[obj.sa_identity_key]] = self.export_object(obj)

                # Specify the "root" of the data graph
                data["_objects"] = ["0"]

                with io.open(os.path.join(export_dir,
                                          "contest.json"), "wb") as fout:
                    json.dump(data, fout, encoding="utf-8",
                              indent=4, sort_keys=True)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Пример #25
0
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            files = set(os.listdir(files_dir))
            for _file in files:
                if not self.safe_put_file(os.path.join(files_dir, _file),
                                          os.path.join(descr_dir, _file)):
                    return False

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")
                with open(os.path.join(self.import_dir,
                                       "contest.json")) as fin:
                    contest = Contest.import_from_dict(json.load(fin))
                    session.add(contest)

                # Check that no files were missing (only if files were
                # imported).
                if not self.no_files:
                    contest_files = contest.enumerate_files()
                    missing_files = contest_files.difference(files)
                    if len(missing_files) > 0:
                        logger.warning("Some files needed to the contest "
                                       "are missing in the import directory.")

                session.flush()
                contest_id = contest.id
                session.commit()

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Пример #26
0
def run(contest_id):
    session = Session()
    contest = Contest.get_from_id(contest_id, session)

    task_by_team = set()
    task_by_lang = set()

    task_dir = os.path.join(os.path.dirname(__file__), "tasks")

    for t in os.listdir(task_dir):
        if t.endswith('.json'):
            task = t[:-5]
            task_path = os.path.join(task_dir, t)
            with open(task_path) as task_file:
                data = json.load(task_file)
                if "teams" in data:
                    for team, v in data["teams"].iteritems():
                        for lang in v:
                            task_by_team.add((task, lang, team))
                if "langs" in data:
                    for lang, v in data["langs"].iteritems():
                        for team in v:
                            task_by_lang.add((task, lang, team))

    if task_by_team != task_by_lang:
        print "ERROR: data in 'tasks' is not self-consistent"
        print repr(task_by_team - task_by_lang)
        print repr(task_by_lang - task_by_team)
        return

    team_by_task = set()
    team_by_lang = set()

    team_dir = os.path.join(os.path.dirname(__file__), "teams")

    for t in os.listdir(team_dir):
        if t.endswith('.json'):
            team = t[:-5]
            team_path = os.path.join(team_dir, t)
            with open(team_path) as team_file:
                data = json.load(team_file)
                if "tasks" in data:
                    for task, v in data["tasks"].iteritems():
                        for lang in v:
                            team_by_task.add((task, lang, team))
                if "langs" in data:
                    for lang, v in data["langs"].iteritems():
                        for task in v:
                            team_by_lang.add((task, lang, team))

    if team_by_task != team_by_lang:
        print "ERROR: data in 'teams' is not self-consistent"
        print repr(team_by_task - team_by_lang)
        print repr(team_by_lang - team_by_task)
        return

    if task_by_team != team_by_task:
        print "ERROR: data in 'tasks' and 'teams' is different"
        print repr(task_by_team - team_by_task)
        print repr(team_by_task - task_by_team)
        return

    data_by_lang = set()
    data_by_team = set()

    data_dir = os.path.join(os.path.dirname(__file__), "data")

    for task in os.listdir(data_dir):
        if os.path.isdir(os.path.join(data_dir, task)):
            for f in os.listdir(os.path.join(data_dir, task, "by_lang")):
                # f == "lang (team).pdf"
                lang, team = re.findall("^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0]
                data_by_lang.add((task, lang, team))
            for f in os.listdir(os.path.join(data_dir, task, "by_team")):
                # f == "team (lang).pdf"
                team, lang = re.findall("^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0]
                data_by_team.add((task, lang, team))

    if data_by_lang != data_by_team:
        print "ERROR: PDF files in 'data' are not complete"
        print repr(data_by_lang - data_by_team)
        print repr(data_by_team - data_by_lang)
        return

    if task_by_team != data_by_lang:
        print "ERROR: PDF files in 'data' do not match JSON data"
        print repr(task_by_team - data_by_lang)
        print repr(data_by_lang - task_by_team)
        return

    print "Hooray! Data is consistent!"


    # Pick one at random: they're all equal.
    translations = task_by_team

    # Determine language codes used in CMS.
    codes = dict()

    # Read JSON files in 'tasks' again as it provides data already
    # grouped as we need it, and not simply as a list of tuples.
    for t in os.listdir(task_dir):
        if t.endswith('.json'):
            task = t[:-5]
            task_path = os.path.join(task_dir, t)
            with open(task_path) as task_file:
                data = json.load(task_file)
                if "langs" in data:
                    for lang, v in data["langs"].iteritems():
                        if len(v) == 0:
                            pass
                        elif len(v) == 1 and v[0] != official_team:
                            for team in v:
                                codes[(task, lang, team)] = "%s" % lang
                        else:
                            for team in v:
                                codes[(task, lang, team)] = "%s_%s" % (lang, ioi_to_iso2[team])

    # Store the files as Statement objects.
    file_cacher = FileCacher()

    for task, lang, team in translations:
        if team == official_team:
            assert lang == "en"
            digest = file_cacher.put_file(
                path=os.path.join(data_dir, task, "by_lang", "%s (%s).pdf" % (lang, team)),
                description="Statement for task %s" % task)
        else:
            digest = file_cacher.put_file(
                path=os.path.join(data_dir, task, "by_lang", "%s (%s).pdf" % (lang, team)),
                description="Statement for task %s, translated into %s (%s) by %s (%s)" %
                            (task, langs[lang], lang, teams[team], team))

        s = Statement(codes[(task, lang, team)], digest, task=contest.get_task(task))

        session.add(s)

    session.commit()


    primary = dict()

    # Retrieve the statements selected by each team.
    for t in os.listdir(team_dir):
        if t.endswith('.json'):
            team = t[:-5]
            team_path = os.path.join(team_dir, t)
            with open(team_path) as team_file:
                data = json.load(team_file)

                for team2, lang, task in data.get("selected", []):
                    # A team could have selected a statement that later got removed.
                    if (task, lang, team2) in codes:
                        primary.setdefault(team, {}).setdefault(task, []).append(codes[(task, lang, team2)])

    # Add the ones they uploaded themselves.
    for task, lang, team in translations:
        # Don't worry about duplicates, CWS filters them out.
        primary.setdefault(team, {}).setdefault(task, []).append(codes[(task, lang, team)])

    # Set the primary statements for tasks (i.e. the ones of the official team)
    for task, primary2 in primary.get(official_team, {}).iteritems():
        contest.get_task(task).primary_statements = json.dumps(primary2)

    # Set the primary statements for teams
    for team, primary2 in primary.iteritems():
        session.execute("UPDATE users SET primary_statements = '%s' WHERE username LIKE '%s%%';" % (json.dumps(primary2), team))

    session.commit()

    print "Statements stored in the DB!"
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            files = set(os.listdir(files_dir))
            for _file in files:
                if not self.safe_put_file(os.path.join(files_dir, _file),
                                          os.path.join(descr_dir, _file)):
                    return False

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")
                with open(os.path.join(self.import_dir,
                                       "contest.json")) as fin:
                    contest = Contest.import_from_dict(json.load(fin))
                    session.add(contest)

                # Check that no files were missing (only if files were
                # imported).
                if not self.no_files:
                    contest_files = contest.enumerate_files()
                    missing_files = contest_files.difference(files)
                    if len(missing_files) > 0:
                        logger.warning("Some files needed to the contest "
                                       "are missing in the import directory.")

                session.flush()
                contest_id = contest.id
                session.commit()

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Пример #28
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, "
                                "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions,
                                            self.skip_user_tests,
                                            light=self.light)
            for _file in files:
                if not self.safe_get_file(_file, os.path.join(
                        files_dir, _file), os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")

            self.ids = {contest: "0"}
            self.queue = [contest]

            data = dict()
            i = 0
            while i < len(self.queue):
                obj = self.queue[i]
                data[self.ids[obj]] = self.export_object(obj)
                i += 1

            def maybe_sort_numerically(x):
                try:
                    if isinstance(x, tuple) or isinstance(x, list):
                        x = x[0]
                    x = int(x)
                except:
                    pass
                return x

            with open(os.path.join(export_dir, "contest.json"), 'w') as fout:
                json.dump(data,
                          fout,
                          indent=4,
                          sort_keys=True,
                          item_sort_key=maybe_sort_numerically)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Пример #29
0
def run(contest_id):
    session = Session()
    contest = Contest.get_from_id(contest_id, session)

    task_by_team = set()
    task_by_lang = set()

    task_dir = os.path.join(os.path.dirname(__file__), "tasks")

    for t in os.listdir(task_dir):
        if t.endswith('.json'):
            task = t[:-5]
            task_path = os.path.join(task_dir, t)
            with open(task_path) as task_file:
                data = json.load(task_file)
                if "teams" in data:
                    for team, v in data["teams"].iteritems():
                        for lang in v:
                            task_by_team.add((task, lang, team))
                if "langs" in data:
                    for lang, v in data["langs"].iteritems():
                        for team in v:
                            task_by_lang.add((task, lang, team))

    if task_by_team != task_by_lang:
        print "ERROR: data in 'tasks' is not self-consistent"
        print repr(task_by_team - task_by_lang)
        print repr(task_by_lang - task_by_team)
        return

    team_by_task = set()
    team_by_lang = set()

    team_dir = os.path.join(os.path.dirname(__file__), "teams")

    for t in os.listdir(team_dir):
        if t.endswith('.json'):
            team = t[:-5]
            team_path = os.path.join(team_dir, t)
            with open(team_path) as team_file:
                data = json.load(team_file)
                if "tasks" in data:
                    for task, v in data["tasks"].iteritems():
                        for lang in v:
                            team_by_task.add((task, lang, team))
                if "langs" in data:
                    for lang, v in data["langs"].iteritems():
                        for task in v:
                            team_by_lang.add((task, lang, team))

    if team_by_task != team_by_lang:
        print "ERROR: data in 'teams' is not self-consistent"
        print repr(team_by_task - team_by_lang)
        print repr(team_by_lang - team_by_task)
        return

    if task_by_team != team_by_task:
        print "ERROR: data in 'tasks' and 'teams' is different"
        print repr(task_by_team - team_by_task)
        print repr(team_by_task - task_by_team)
        return

    data_by_lang = set()
    data_by_team = set()

    data_dir = os.path.join(os.path.dirname(__file__), "data")

    for task in os.listdir(data_dir):
        if os.path.isdir(os.path.join(data_dir, task)):
            for f in os.listdir(os.path.join(data_dir, task, "by_lang")):
                # f == "lang (team).pdf"
                lang, team = re.findall(
                    "^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0]
                data_by_lang.add((task, lang, team))
            for f in os.listdir(os.path.join(data_dir, task, "by_team")):
                # f == "team (lang).pdf"
                team, lang = re.findall(
                    "^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0]
                data_by_team.add((task, lang, team))

    if data_by_lang != data_by_team:
        print "ERROR: PDF files in 'data' are not complete"
        print repr(data_by_lang - data_by_team)
        print repr(data_by_team - data_by_lang)
        return

    if task_by_team != data_by_lang:
        print "ERROR: PDF files in 'data' do not match JSON data"
        print repr(task_by_team - data_by_lang)
        print repr(data_by_lang - task_by_team)
        return

    print "Hooray! Data is consistent!"

    # Pick one at random: they're all equal.
    translations = task_by_team

    # Determine language codes used in CMS.
    codes = dict()

    # Read JSON files in 'tasks' again as it provides data already
    # grouped as we need it, and not simply as a list of tuples.
    for t in os.listdir(task_dir):
        if t.endswith('.json'):
            task = t[:-5]
            task_path = os.path.join(task_dir, t)
            with open(task_path) as task_file:
                data = json.load(task_file)
                if "langs" in data:
                    for lang, v in data["langs"].iteritems():
                        if len(v) == 0:
                            pass
                        elif len(v) == 1 and v[0] != official_team:
                            for team in v:
                                codes[(task, lang, team)] = "%s" % lang
                        else:
                            for team in v:
                                codes[(task, lang,
                                       team)] = "%s_%s" % (lang,
                                                           ioi_to_iso2[team])

    # Store the files as Statement objects.
    file_cacher = FileCacher()

    for task, lang, team in translations:
        if team == official_team:
            assert lang == "en"
            digest = file_cacher.put_file(
                path=os.path.join(data_dir, task, "by_lang",
                                  "%s (%s).pdf" % (lang, team)),
                description="Statement for task %s" % task)
        else:
            digest = file_cacher.put_file(
                path=os.path.join(data_dir, task, "by_lang",
                                  "%s (%s).pdf" % (lang, team)),
                description=
                "Statement for task %s, translated into %s (%s) by %s (%s)" %
                (task, langs[lang], lang, teams[team], team))

        s = Statement(codes[(task, lang, team)],
                      digest,
                      task=contest.get_task(task))

        session.add(s)

    session.commit()

    primary = dict()

    # Retrieve the statements selected by each team.
    for t in os.listdir(team_dir):
        if t.endswith('.json'):
            team = t[:-5]
            team_path = os.path.join(team_dir, t)
            with open(team_path) as team_file:
                data = json.load(team_file)

                for team2, lang, task in data.get("selected", []):
                    # A team could have selected a statement that later got removed.
                    if (task, lang, team2) in codes:
                        primary.setdefault(team, {}).setdefault(
                            task, []).append(codes[(task, lang, team2)])

    # Add the ones they uploaded themselves.
    for task, lang, team in translations:
        # Don't worry about duplicates, CWS filters them out.
        primary.setdefault(team,
                           {}).setdefault(task,
                                          []).append(codes[(task, lang, team)])

    # Set the primary statements for tasks (i.e. the ones of the official team)
    for task, primary2 in primary.get(official_team, {}).iteritems():
        contest.get_task(task).primary_statements = json.dumps(primary2)

    # Set the primary statements for teams
    for team, primary2 in primary.iteritems():
        session.execute(
            "UPDATE users SET primary_statements = '%s' WHERE username LIKE '%s%%';"
            % (json.dumps(primary2), team))

    session.commit()

    print "Statements stored in the DB!"
Пример #30
0
    def do_reimport(self):
        """Ask the loader to load the contest and actually merge the
        two.

        """
        # Create the dict corresponding to the new contest.
        yaml_contest = self.loader.import_contest(self.path)
        yaml_users = dict(((x['username'], x) for x in yaml_contest['users']))
        yaml_tasks = dict(((x['name'], x) for x in yaml_contest['tasks']))

        with SessionGen(commit=False) as session:

            # Create the dict corresponding to the old contest, from
            # the database.
            contest = Contest.get_from_id(self.contest_id, session)
            cms_contest = contest.export_to_dict()
            cms_users = dict((x['username'], x) for x in cms_contest['users'])
            cms_tasks = dict((x['name'], x) for x in cms_contest['tasks'])

            # Delete the old contest from the database.
            session.delete(contest)
            session.flush()

            # Do the actual merge: first of all update all users of
            # the old contest with the corresponding ones from the new
            # contest; if some user is present in the old contest but
            # not in the new one we check if we have to fail or remove
            # it and, in the latter case, add it to a list
            users_to_remove = []
            for user_num, user in enumerate(cms_contest['users']):
                if user['username'] in yaml_users:
                    yaml_user = yaml_users[user['username']]

                    yaml_user['submissions'] = user['submissions']
                    yaml_user['user_tests'] = user['user_tests']
                    yaml_user['questions'] = user['questions']
                    yaml_user['messages'] = user['messages']

                    cms_contest['users'][user_num] = yaml_user
                else:
                    if self.force:
                        logger.warning("User %s exists in old contest, but "
                                       "not in the new one" % user['username'])
                        users_to_remove.append(user_num)
                        # FIXME Do we need really to do this, given that
                        # we already deleted the whole contest?
                        session.delete(contest.users[user_num])
                    else:
                        logger.error("User %s exists in old contest, but "
                                     "not in the new one" % user['username'])
                        return False

            # Delete the users
            for user_num in users_to_remove:
                del cms_contest['users'][user_num]

            # The append the users in the new contest, not present in
            # the old one.
            for user in yaml_contest['users']:
                if user['username'] not in cms_users.keys():
                    cms_contest['users'].append(user)

            # The same for tasks: update old tasks.
            tasks_to_remove = []
            for task_num, task in enumerate(cms_contest['tasks']):
                if task['name'] in yaml_tasks:
                    yaml_task = yaml_tasks[task['name']]

                    cms_contest['tasks'][task_num] = yaml_task
                else:
                    if self.force:
                        logger.warning("Task %s exists in old contest, but "
                                       "not in the new one" % task['name'])
                        tasks_to_remove.append(task_num)
                        # FIXME Do we need really to do this, given that
                        # we already deleted the whole contest?
                        session.delete(contest.tasks[task_num])
                    else:
                        logger.error("Task %s exists in old contest, but "
                                     "not in the new one" % task['name'])
                        return False

            # Delete the tasks
            for task_num in tasks_to_remove:
                del cms_contest['tasks'][task_num]

            # And add new tasks.
            for task in yaml_contest['tasks']:
                if task['name'] not in cms_tasks.keys():
                    cms_contest['tasks'].append(task)

            # Reimport the contest in the db, with the previous ID.
            contest = Contest.import_from_dict(cms_contest)
            contest.id = self.contest_id
            session.add(contest)
            session.flush()

            logger.info("Analyzing database.")
            analyze_all_tables(session)
            session.commit()

        logger.info("Reimport of contest %s finished." % self.contest_id)

        return True
Пример #31
0
    def initialize(self, ranking, log_bridge):
        """Send to the ranking all the data that are supposed to be
        sent before the contest: contest, users, tasks. No support for
        teams, flags and faces.

        ranking ((str, str, str)): protocol, address and authorization
                                   string of ranking server.
        log_bridge (LogBridge): the bridge to use to write logs.

        raise CannotSendError in case of communication errors.

        """
        log_bridge.info("Initializing ranking %s." % ranking[1])

        try:
            connection = get_connection(ranking[:2], log_bridge)
            auth = ranking[2]

            with SessionGen(commit=False) as session:
                contest = Contest.get_from_id(self.contest_id, session)

                if contest is None:
                    log_bridge.error("Received request for unexistent contest "
                                   "id %s." % self.contest_id)
                    raise KeyError
                contest_name = contest.name
                contest_url = "/contests/%s" % encode_id(contest_name)
                contest_data = {
                    "name": contest.description,
                    "begin": int(make_timestamp(contest.start)),
                    "end": int(make_timestamp(contest.stop)),
                    "score_precision": contest.score_precision}

                users = dict((encode_id(user.username),
                              {"f_name": user.first_name,
                               "l_name": user.last_name,
                               "team": None})
                             for user in contest.users
                             if not user.hidden)

                tasks = dict((encode_id(task.name),
                              {"name": task.title,
                               "contest": encode_id(contest.name),
                               "max_score": 100.0,
                               "score_precision": task.score_precision,
                               "extra_headers": [],
                               "order": task.num,
                               "short_name": task.name})
                             for task in contest.tasks)

            safe_put_data(connection, contest_url, contest_data, auth,
                          "sending contest %s to ranking %s" %
                          (contest_name, ranking[1]), log_bridge)

            safe_put_data(connection, "/users/", users, auth,
                          "sending users to ranking %s" % ranking[1],
                          log_bridge)

            safe_put_data(connection, "/tasks/", tasks, auth,
                          "sending tasks to ranking %s" % ranking[1],
                          log_bridge)

        except CannotSendError as error:
            # Delete it to make get_connection try to create it again.
            del active_connections[ranking[1]]
            raise error
Пример #32
0
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        logger.info("Reading JSON file...")
        with open(os.path.join(self.import_dir, "contest.json")) as fin:
            contest_json = json.load(fin)
        if self.no_submissions:
            for user in contest_json["users"]:
                user["submissions"] = []
                user["user_tests"] = []

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")
                contest = Contest.import_from_dict(contest_json)
                session.add(contest)

                session.flush()
                contest_id = contest.id
                contest_files = contest.enumerate_files()
                session.commit()

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            for digest in contest_files:
                file_ = os.path.join(files_dir, digest)
                desc = os.path.join(descr_dir, digest)
                if not os.path.exists(file_) or not os.path.exists(desc):
                    logger.error("Some files needed to the contest "
                                 "are missing in the import directory. "
                                 "The import will continue. Be aware.")
                if not self.safe_put_file(file_, desc):
                    logger.critical("Unable to put file `%s' in the database. "
                                    "Aborting. Please remove the contest "
                                    "from the database." % file_)
                    # TODO: remove contest from the database.
                    return False

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Пример #33
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, "
                                "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions,
                                            self.skip_user_tests,
                                            light=self.light)
            for _file in files:
                if not self.safe_get_file(_file,
                                          os.path.join(files_dir, _file),
                                          os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")

            self.ids = {contest: "0"}
            self.queue = [contest]

            data = dict()
            i = 0
            while i < len(self.queue):
                obj = self.queue[i]
                data[self.ids[obj]] = self.export_object(obj)
                i += 1

            def maybe_sort_numerically(x):
                try:
                    if isinstance(x, tuple) or isinstance(x, list):
                        x = x[0]
                    x = int(x)
                except:
                    pass
                return x
            with open(os.path.join(export_dir, "contest.json"), 'w') as fout:
                json.dump(data, fout, indent=4, sort_keys=True, item_sort_key=maybe_sort_numerically)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Пример #34
0
    def do_reimport(self):
        """Get the contest from the Loader and merge it."""
        with SessionGen(commit=False) as session:
            # Load the old contest from the database.
            old_contest = Contest.get_from_id(self.old_contest_id, session)
            old_users = dict((x.username, x) for x in old_contest.users)
            old_tasks = dict((x.name, x) for x in old_contest.tasks)

            # Load the new contest from the filesystem.
            new_contest, new_tasks, new_users = self.loader.get_contest()
            new_users = dict((x["username"], x) for x in new_users)
            new_tasks = dict((x["name"], x) for x in new_tasks)

            # Updates contest-global settings that are set in new_contest.
            self._update_columns(old_contest, new_contest)

            # Do the actual merge: compare all users of the old and of
            # the new contest and see if we need to create, update or
            # delete them. Delete only if authorized, fail otherwise.
            users = set(old_users.keys()) | set(new_users.keys())
            for user in users:
                old_user = old_users.get(user, None)
                new_user = new_users.get(user, None)

                if old_user is None:
                    # Create a new user.
                    logger.info("Creating user %s" % user)
                    new_user = self.loader.get_user(new_user)
                    old_contest.users.append(new_user)
                elif new_user is not None:
                    # Update an existing user.
                    logger.info("Updating user %s" % user)
                    new_user = self.loader.get_user(new_user)
                    self._update_object(old_user, new_user)
                else:
                    # Delete an existing user.
                    if self.force:
                        logger.info("Deleting user %s" % user)
                        old_contest.users.remove(old_user)
                    else:
                        logger.critical(
                            "User %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            user)
                        return False

            # The same for tasks.
            tasks = set(old_tasks.keys()) | set(new_tasks.keys())
            for task in tasks:
                old_task = old_tasks.get(task, None)
                new_task = new_tasks.get(task, None)

                if old_task is None:
                    # Create a new task.
                    logger.info("Creating task %s" % task)
                    new_task = self.loader.get_task(new_task)
                    old_contest.tasks.append(new_task)
                elif new_task is not None:
                    # Update an existing task.
                    logger.info("Updating task %s" % task)
                    new_task = self.loader.get_task(new_task)
                    self._update_object(old_task, new_task)
                else:
                    # Delete an existing task.
                    if self.force:
                        logger.info("Deleting task %s" % task)
                        old_contest.tasks.remove(old_task)
                    else:
                        logger.critical(
                            "Task %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            task)
                        return False

            session.commit()

        logger.info("Reimport finished (contest id: %s)." %
                    self.old_contest_id)

        return True
Пример #35
0
    def do_import(self):
        """Run the actual import code.

        """
        logger.operation = "importing contest from %s" % self.import_source
        logger.info("Starting import.")

        if not os.path.isdir(self.import_source):
            if self.import_source.endswith(".zip"):
                archive = zipfile.ZipFile(self.import_source, "r")
                file_names = archive.infolist()

                self.import_dir = tempfile.mkdtemp()
                archive.extractall(self.import_dir)
            elif self.import_source.endswith(".tar.gz") \
                     or self.import_source.endswith(".tgz") \
                     or self.import_source.endswith(".tar.bz2") \
                     or self.import_source.endswith(".tbz2") \
                     or self.import_source.endswith(".tar"):
                archive = tarfile.open(name=self.import_source)
                file_names = archive.getnames()
            else:
                logger.critical("Unable to import from %s." %
                                self.import_source)
                return False

            root = find_root_of_archive(file_names)
            if root is None:
                logger.critical("Cannot find a root directory in %s." %
                                self.import_source)
                return False

            self.import_dir = tempfile.mkdtemp()
            archive.extractall(self.import_dir)
            self.import_dir = os.path.join(self.import_dir, root)

        if self.drop:
            logger.info("Dropping and recreating the database.")
            try:
                metadata.drop_all()
            except sqlalchemy.exc.OperationalError as error:
                logger.critical("Unable to access DB.\n%r" % error)
                return False
        try:
            metadata.create_all()
        except sqlalchemy.exc.OperationalError as error:
            logger.critical("Unable to access DB.\n%r" % error)
            return False

        logger.info("Reading JSON file...")
        with open(os.path.join(self.import_dir, "contest.json")) as fin:
            contest_json = json.load(fin)
        if self.no_submissions:
            for user in contest_json["users"]:
                user["submissions"] = []
                user["user_tests"] = []

        if not self.only_files:
            with SessionGen(commit=False) as session:

                # Import the contest in JSON format.
                logger.info("Importing the contest from JSON file.")
                contest = Contest.import_from_dict(contest_json)
                session.add(contest)

                session.flush()
                contest_id = contest.id
                contest_files = contest.enumerate_files()
                session.commit()

        if not self.no_files:
            logger.info("Importing files.")
            files_dir = os.path.join(self.import_dir, "files")
            descr_dir = os.path.join(self.import_dir, "descriptions")
            for digest in contest_files:
                file_ = os.path.join(files_dir, digest)
                desc = os.path.join(descr_dir, digest)
                print open(desc).read()
                if not os.path.exists(file_) or not os.path.exists(desc):
                    logger.error("Some files needed to the contest "
                                 "are missing in the import directory. "
                                 "The import will continue. Be aware.")
                if not self.safe_put_file(file_, desc):
                    logger.critical("Unable to put file `%s' in the database. "
                                    "Aborting. Please remove the contest "
                                    "from the database." % file_)
                    # TODO: remove contest from the database.
                    return False

        logger.info("Import finished (contest id: %s)." % contest_id)
        logger.operation = ""

        # If we extracted an archive, we remove it.
        if self.import_dir != self.import_source:
            shutil.rmtree(self.import_dir)

        return True
Пример #36
0
    def do_reimport(self):
        """Get the contest from the Loader and merge it."""
        with SessionGen(commit=False) as session:
            # Load the old contest from the database.
            old_contest = Contest.get_from_id(self.old_contest_id, session)
            old_users = dict((x.username, x) for x in old_contest.users)
            old_tasks = dict((x.name, x) for x in old_contest.tasks)

            # Load the new contest from the filesystem.
            new_contest, new_tasks, new_users = self.loader.get_contest()

            # Updates contest-global settings that are set in new_contest.
            self._update_columns(old_contest, new_contest)

            # Do the actual merge: compare all users of the old and of
            # the new contest and see if we need to create, update or
            # delete them. Delete only if authorized, fail otherwise.
            users = set(old_users.keys()) | set(new_users)
            for username in users:
                old_user = old_users.get(username, None)

                if old_user is None:
                    # Create a new user.
                    logger.info("Creating user %s" % username)
                    new_user = self.loader.get_user(username)
                    old_contest.users.append(new_user)
                elif username in new_users:
                    # Update an existing user.
                    logger.info("Updating user %s" % username)
                    new_user = self.loader.get_user(username)
                    self._update_object(old_user, new_user)
                else:
                    # Delete an existing user.
                    if self.force:
                        logger.info("Deleting user %s" % username)
                        old_contest.users.remove(old_user)
                    else:
                        logger.critical(
                            "User %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            username)
                        return False

            # The same for tasks. Setting num for tasks requires a bit
            # of trickery, since we have to avoid triggering a
            # duplicate key constraint violation while we're messing
            # with the task order. To do that we just set sufficiently
            # high number on the first pass and then fix it on a
            # second pass.
            tasks = set(old_tasks.keys()) | set(new_tasks)
            current_num = max(len(old_tasks), len(new_tasks))
            for task in tasks:
                old_task = old_tasks.get(task, None)

                if old_task is None:
                    # Create a new task.
                    logger.info("Creating task %s" % task)
                    new_task = self.loader.get_task(task)
                    new_task.num = current_num
                    current_num += 1
                    old_contest.tasks.append(new_task)
                elif task in new_tasks:
                    # Update an existing task.
                    if self.full or self.loader.has_changed(task):
                        logger.info("Updating task %s" % task)
                        new_task = self.loader.get_task(task)
                        new_task.num = current_num
                        current_num += 1
                        self._update_object(old_task, new_task)
                    else:
                        logger.info("Task %s has not changed" % task)
                else:
                    # Delete an existing task.
                    if self.force:
                        logger.info("Deleting task %s" % task)
                        session.delete(old_task)
                    else:
                        logger.critical(
                            "Task %s exists in old contest, but "
                            "not in the new one. Use -f to force." %
                            task)
                        return False

                session.flush()

            # And finally we fix the numbers; old_contest must be
            # refreshed because otherwise SQLAlchemy doesn't get aware
            # that some tasks may have been deleted
            tasks_order = dict((name, num)
                               for num, name in enumerate(new_tasks))
            session.refresh(old_contest)
            for task in old_contest.tasks:
                task.num = tasks_order[task.name]

            session.commit()

        logger.info("Reimport finished (contest id: %s)." %
                    self.old_contest_id)

        return True
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.error("The specified file already exists, "
                             "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.error("The specified directory already exists, "
                         "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions,
                                            self.skip_user_tests,
                                            light=self.light)
            for _file in files:
                if not self.safe_get_file(_file,
                                          os.path.join(files_dir, _file),
                                          os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")
            with open(os.path.join(export_dir, "contest.json"), 'w') as fout:
                json.dump(contest.export_to_dict(
                        self.skip_submissions,
                        self.skip_user_tests),
                          fout, indent=4)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True
Пример #38
0
    def initialize(self, ranking, log_file):
        """Send to the ranking all the data that are supposed to be
        sent before the contest: contest, users, tasks. No support for
        teams, flags and faces.

        ranking ((str, str, str)): protocol, address and authorization
                                   string of ranking server.
        log_file (writable file object): the file to use to write logs.

        raise CannotSendError in case of communication errors.

        """
        #logger.info("Initializing ranking %s." % ranking[1])
        log_file.write("Initializing ranking %s.\n" % ranking[1])
        log_file.flush()

        try:
            connection = get_connection(ranking[:2], log_file)
            auth = ranking[2]

            with SessionGen(commit=False) as session:
                contest = Contest.get_from_id(self.contest_id, session)
                if contest is None:
                    #logger.error("Received request for unexistent contest id %s." %
                    #             self.contest_id)
                    log_file.write("Received request for unexistent contest id %s.\n" %
                                   self.contest_id)
                    log_file.flush()
                    raise KeyError
                contest_name = contest.name
                contest_url = "/contests/%s" % encode_id(contest_name)
                contest_data = {
                    "name": contest.description,
                    "begin": int(make_timestamp(contest.start)),
                    "end": int(make_timestamp(contest.stop))}

                users = dict((encode_id(user.username),
                              {"f_name": user.first_name,
                               "l_name": user.last_name,
                               "team": None})
                             for user in contest.users
                             if not user.hidden)

                tasks = dict((encode_id(task.name),
                              {"name": task.title,
                               "contest": encode_id(contest.name),
                               "max_score": 100.0,
                               "extra_headers": [],
                               "order": task.num,
                               "short_name": task.name})
                             for task in contest.tasks)

            safe_put_data(connection, contest_url, contest_data, auth,
                          "sending contest %s to ranking %s" % (contest_name, ranking[1]),
                          log_file)

            safe_put_data(connection, "/users/", users, auth,
                          "sending users to ranking %s" % ranking[1],
                          log_file)

            safe_put_data(connection, "/tasks/", tasks, auth,
                          "sending tasks to ranking %s" % ranking[1],
                          log_file)

        except CannotSendError as error:
            # Delete it to make get_connection try to create it again.
            del active_connections[ranking[1]]
            raise error