示例#1
0
def get_score_type(submission=None, task=None):
    """Given a task, instantiate the corresponding ScoreType class.

    submission (Submission): the submission that needs the task type.
    task (Task): the task we want to score.

    return (object): an instance of the correct ScoreType class.

    """
    # Validate arguments.
    if [x is not None
        for x in [submission, task]].count(True) != 1:
        raise ValueError("Need at most one way to get the score type.")

    if submission is not None:
        task = submission.task

    score_type_name = task.score_type
    try:
        score_type_parameters = json.loads(task.score_type_parameters)
    except json.decoder.JSONDecodeError as error:
        logger.error("Cannot decode score type parameters.\n%r." % error)
        raise
    public_testcases = dict((testcase.num, testcase.public)
                            for testcase in task.testcases)

    cls = plugin_lookup(score_type_name,
                        "cms.grading.scoretypes", "scoretypes")

    return cls(score_type_parameters, public_testcases)
示例#2
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %s" % self.contest_id
        logger.info("Starting export.")

        logger.info("Creating dir structure.")
        try:
            os.mkdir(self.spool_dir)
        except OSError:
            logger.error("The specified directory already exists, "
                         "I won't overwrite it.")
            return False
        os.mkdir(self.upload_dir)

        with SessionGen(commit=False) as session:
            self.contest = Contest.get_from_id(self.contest_id, session)

            # Creating users' directory.
            for user in self.contest.users:
                if not user.hidden:
                    os.mkdir(os.path.join(self.upload_dir, user.username))

            self.export_submissions()
            self.export_ranking()

        logger.info("Export finished.")
        logger.operation = ""

        return True
示例#3
0
def get_score_type(submission=None, task=None):
    """Given a task, istantiate the corresponding ScoreType class.

    submission (Submission): the submission that needs the task type.
    task (Task): the task we want to score.

    return (object): an instance of the correct ScoreType class.

    """
    # Validate arguments.
    if [x is not None for x in [submission, task]].count(True) != 1:
        raise ValueError("Need at most one way to get the score type.")

    if submission is not None:
        task = submission.task

    score_type_name = task.score_type
    try:
        score_type_parameters = json.loads(task.score_parameters)
    except json.decoder.JSONDecodeError as error:
        logger.error("Cannot decode score type parameters.\n%r." % error)
        raise
    public_testcases = dict(
        (testcase.num, testcase.public) for testcase in task.testcases)

    cls = plugin_lookup(score_type_name, "cms.grading.scoretypes",
                        "scoretypes")

    return cls(score_type_parameters, public_testcases)
示例#4
0
        def fetch(self, digest, content_type, filename):
            """Sends the RPC to the FS.

            """
            if digest == "":
                logger.error("No digest given")
                self.finish()
                return
            try:
                self.temp_filename = \
                    self.application.service.file_cacher.get_file(
                    digest, temp_path=True)
            except Exception as error:
                logger.error("Exception while retrieving file `%s'. %r" %
                             (filename, error))
                self.finish()
                return

            self.set_header("Content-Type", content_type)
            self.set_header("Content-Disposition",
                            "attachment; filename=\"%s\"" % filename)
            self.start_time = time.time()
            self.size = 0
            self.temp_file = open(self.temp_filename, "rb")
            self.application.service.add_timeout(self._fetch_write_chunk,
                                                 None,
                                                 0.02,
                                                 immediately=True)
示例#5
0
    def safe_put_file(self, path, descr_path):
        """Put a file to FileCacher signaling every error (including
        digest mismatch).

        path (string): the path from which to load the file.
        descr_path (string): same for description.

        return (bool): True if all ok, False if something wrong.

        """
        # First read the description.
        try:
            with open(descr_path) as fin:
                description = fin.read()
        except IOError:
            description = ''

        # Put the file.
        try:
            digest = self.file_cacher.put_file(path=path,
                                               description=description)
        except Exception as error:
            logger.error("File %s could not be put to file server (%r), "
                         "aborting." % (path, error))
            return False

        # Then check the digest.
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.error("File %s has hash %s, but the server returned %s, "
                         "aborting." % (path, calc_digest, digest))
            return False

        return True
示例#6
0
    def release_worker(self, shard):
        """To be called by ES when it receives a notification that a
        job finished.

        Note: if the worker is scheduled to be disabled, then we
        disable it, and notify the ES to discard the outcome obtained
        by the worker.

        shard (int): the worker to release.

        returns (bool): if the result is to be ignored.

        """
        if self._job[shard] == WorkerPool.WORKER_INACTIVE:
            err_msg = "Trying to release worker while it's inactive."
            logger.error(err_msg)
            raise ValueError(err_msg)
        ret = self._ignore[shard]
        self._start_time[shard] = None
        self._side_data[shard] = None
        self._ignore[shard] = False
        if self._schedule_disabling[shard]:
            self._job[shard] = WorkerPool.WORKER_DISABLED
            self._schedule_disabling[shard] = False
            logger.info("Worker %s released and disabled." % shard)
        else:
            self._job[shard] = WorkerPool.WORKER_INACTIVE
            logger.debug("Worker %s released." % shard)
        return ret
    def safe_get_file(self, digest, path, descr_path=None):
        """Get file from FileCacher ensuring that the digest is
        correct.

        digest (string): the digest of the file to retrieve.
        path (string): the path where to save the file.
        descr_path (string): the path where to save the description.

        return (bool): True if all ok, False if something wrong.

        """
        # First get the file
        try:
            self.file_cacher.get_file(digest, path=path)
        except Exception as error:
            logger.error("File %s could not retrieved from file server (%r)." %
                         (digest, error))
            return False

        # Then check the digest
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.error("File %s has wrong hash %s." % (digest, calc_digest))
            return False

        # If applicable, retrieve also the description
        if descr_path is not None:
            with codecs.open(descr_path, 'w', encoding='utf-8') as fout:
                fout.write(self.file_cacher.describe(digest))

        return True
示例#8
0
文件: __init__.py 项目: beyondai/cms
def get_task_type(name=None, parameters=None,
                  dataset=None):
    """Construct the TaskType specified by parameters.

    Load the TaskType class named "name" and instantiate it with the
    data structure obtained by JSON-decoding "parameters".
    If "dataset" is given then all other arguments should be omitted as
    they are obtained from the dataset.

    name (str): the name of the TaskType class
    parameters (str): the JSON-encoded parameters
    dataset (Dataset): the dataset whose TaskType we want

    return (TaskType): an instance of the correct TaskType class.

    """
    if dataset is not None:
        if any(x is not None for x in (name, parameters)):
            raise ValueError("Need exactly one way to get the task type.")

        name = dataset.task_type
        parameters = dataset.task_type_parameters

    elif any(x is None for x in (name, parameters)):
        raise ValueError("Need exactly one way to get the task type.")

    class_ = get_task_type_class(name)

    try:
        parameters = json.loads(parameters)
    except json.decoder.JSONDecodeError as error:
        logger.error("Cannot decode task type parameters.\n%r." % error)
        raise

    return class_(parameters)
示例#9
0
文件: Checker.py 项目: strogo/cms-1
    def echo_callback(self, data, error=None):
        """Callback for check.

        """
        current = time.time()
        logger.debug("Checker.echo_callback")
        if error is not None:
            return
        try:
            service, time_ = data.split()
            time_ = float(time_)
            name, shard = service.split(",")
            shard = int(shard)
            service = ServiceCoord(name, shard)
            if service not in self.waiting_for or current - time_ > 10:
                logger.warning("Got late reply (%5.3lf s) from %s."
                            % (current - time_, service))
            else:
                if time_ - self.waiting_for[service] > 0.001:
                    logger.warning("Someone cheated on the timestamp?!")
                logger.info("Got reply (%5.3lf s) from %s."
                            % (current - time_, service))
                del self.waiting_for[service]
        except KeyError:
            logger.error("Echo answer mis-shapen.")
示例#10
0
    def __init__(self, service=None, path=None):
        """Initialization.

        service (Service): the service we are running in. If None, we
                           simply avoid caching and allowing the
                           service to step in once in a while.
        path (string): if specified, back the FileCacher with a file
                       system-based storage instead that the default
                       database-based one. The specified directory
                       will be used as root for the storage and it
                       will be created if it doesn't exist.

        """
        self.service = service
        if path is None:
            self.backend = DBBackend(self.service)
        else:
            self.backend = FSBackend(path, self.service)
        if self.service is None:
            self.base_dir = tempfile.mkdtemp(dir=config.temp_dir)
        else:
            self.base_dir = os.path.join(
                config.cache_dir,
                "fs-cache-%s-%d" % (service._my_coord.name,
                                    service._my_coord.shard))
        self.tmp_dir = os.path.join(self.base_dir, "tmp")
        self.obj_dir = os.path.join(self.base_dir, "objects")
        if not mkdir(config.cache_dir) or \
               not mkdir(self.base_dir) or \
               not mkdir(self.tmp_dir) or \
               not mkdir(self.obj_dir):
            logger.error("Cannot create necessary directories.")
示例#11
0
    def __init__(self, service=None, path=None):
        """Initialization.

        service (Service): the service we are running in. If None, we
                           simply avoid caching and allowing the
                           service to step in once in a while.
        path (string): if specified, back the FileCacher with a file
                       system-based storage instead that the default
                       database-based one. The specified directory
                       will be used as root for the storage and it
                       will be created if it doesn't exist.

        """
        self.service = service
        if path is None:
            self.backend = DBBackend(self.service)
        else:
            self.backend = FSBackend(path, self.service)
        if self.service is None:
            self.base_dir = tempfile.mkdtemp(dir=config.temp_dir)
        else:
            self.base_dir = os.path.join(
                config.cache_dir, "fs-cache-%s-%d" %
                (service._my_coord.name, service._my_coord.shard))
        self.tmp_dir = os.path.join(self.base_dir, "tmp")
        self.obj_dir = os.path.join(self.base_dir, "objects")
        if not mkdir(config.cache_dir) or \
               not mkdir(self.base_dir) or \
               not mkdir(self.tmp_dir) or \
               not mkdir(self.obj_dir):
            logger.error("Cannot create necessary directories.")
示例#12
0
    def evaluation_ended(self, submission_id,
                         timestamp, evaluation_tries,
                         evaluated):
        """Actions to be performed when we have a submission that has
        been evaluated. In particular: we inform ScoringService on
        success, we requeue on failure.

        submission_id (string): db id of the submission.
        timestamp (int): time of submission.
        compilation_tries (int): # of tentative evaluations.
        evaluated (bool): if the submission is successfully evaluated.

        """
        # Evaluation successful, we inform ScoringService so it can
        # update the score.
        if evaluated:
            self.scoring_service.new_evaluation(submission_id=submission_id)
        # Evaluation unsuccessful, we requeue (or not).
        elif evaluation_tries > EvaluationService.MAX_EVALUATION_TRIES:
            logger.error("Maximum tries reached for the "
                         "evaluation of submission %s. I will "
                         "not try again." % submission_id)
        else:
            # Note: lower priority (LOW instead of MEDIUM) for
            # evaluations that are probably failing again.
            self.push_in_queue((EvaluationService.JOB_TYPE_EVALUATION,
                                submission_id),
                               EvaluationService.JOB_PRIORITY_LOW,
                               timestamp)
示例#13
0
    def evaluation_ended(self, submission):
        """Actions to be performed when we have a submission that has
        been evaluated. In particular: we inform ScoringService on
        success, we requeue on failure.

        submission (Submission): the submission.

        """
        # Evaluation successful, we inform ScoringService so it can
        # update the score. We need to commit the session beforehand,
        # otherwise the ScoringService wouldn't receive the updated
        # submission.
        if submission.evaluated():
            submission.sa_session.commit()
            self.scoring_service.new_evaluation(submission_id=submission.id)
        # Evaluation unsuccessful, we requeue (or not).
        elif submission.evaluation_tries > EvaluationService.MAX_EVALUATION_TRIES:
            logger.error(
                "Maximum tries reached for the " "evaluation of submission %d. I will " "not try again." % submission.id
            )
        else:
            # Note: lower priority (LOW instead of MEDIUM) for
            # evaluations that are probably failing again.
            self.push_in_queue(
                (EvaluationService.JOB_TYPE_EVALUATION, submission.id),
                EvaluationService.JOB_PRIORITY_LOW,
                submission.timestamp,
            )
示例#14
0
    def user_test_compilation_ended(self, user_test):
        """Actions to be performed when we have a user test that has
        ended compilation. In particular: we queue evaluation if
        compilation was ok; we requeue compilation if it failed.

        user_test (UserTest): the user test.

        """
        # Compilation was ok, so we evaluate
        if user_test.compilation_outcome == "ok":
            self.push_in_queue(
                (EvaluationService.JOB_TYPE_TEST_EVALUATION, user_test.id),
                EvaluationService.JOB_PRIORITY_MEDIUM,
                user_test.timestamp,
            )
        # If instead user test failed compilation, we don't evaluatate
        elif user_test.compilation_outcome == "fail":
            logger.info("User test %d did not compile. Not going " "to evaluate." % user_test.id)
        # If compilation failed for our fault, we requeue or not
        elif user_test.compilation_outcome is None:
            if user_test.compilation_tries > EvaluationService.MAX_TEST_COMPILATION_TRIES:
                logger.error(
                    "Maximum tries reached for the "
                    "compilation of user test %d. I will "
                    "not try again." % (user_test.id)
                )
            else:
                # Note: lower priority (MEDIUM instead of HIGH) for
                # compilations that are probably failing again
                self.push_in_queue(
                    (EvaluationService.JOB_TYPE_TEST_COMPILATION, user_test.id),
                    EvaluationService.JOB_PRIORITY_MEDIUM,
                    user_test.timestamp,
                )
示例#15
0
文件: __init__.py 项目: strogo/cms-1
        def fetch(self, digest, content_type, filename):
            """Sends the RPC to the FS.

            """
            if digest == "":
                logger.error("No digest given")
                self.finish()
                return
            try:
                self.temp_filename = \
                    self.application.service.file_cacher.get_file(
                        digest, temp_path=True)
            except Exception as error:
                logger.error("Exception while retrieving file `%s'. %r" %
                             (filename, error))
                self.finish()
                return

            self.set_header("Content-Type", content_type)
            self.set_header("Content-Disposition",
                            "attachment; filename=\"%s\"" % filename)
            self.start_time = time.time()
            self.size = 0
            self.temp_file = open(self.temp_filename, "rb")
            self.application.service.add_timeout(self._fetch_write_chunk,
                                                 None, 0.02,
                                                 immediately=True)
示例#16
0
文件: Worker.py 项目: kennyboy/cms
    def execute_job(self, job_dict):
        job = Job.import_from_dict_with_type(job_dict)

        if self.work_lock.acquire(False):

            try:
                logger.operation = "job '%s'" % (job.info)
                logger.info("Request received")
                job.shard = self.shard

                self.task_type = get_task_type(job, self.file_cacher)
                self.task_type.execute_job()
                logger.info("Request finished.")

                return job.export_to_dict()

            except:
                err_msg = "Worker failed on operation `%s'" % logger.operation
                logger.error("%s\n%s" % (err_msg, traceback.format_exc()))
                raise JobException(err_msg)

            finally:
                self.task_type = None
                self.session = None
                logger.operation = ""
                self.work_lock.release()

        else:
            err_msg = "Request '%s' received, " \
                "but declined because of acquired lock" % \
                (job.info)
            logger.warning(err_msg)
            raise JobException(err_msg)
示例#17
0
文件: Worker.py 项目: Mloc/cms
    def execute_job(self, job_dict):
        job = Job.import_from_dict_with_type(job_dict)

        if self.work_lock.acquire(False):

            try:
                logger.operation = "job '%s'" % (job.info)
                logger.info("Request received")
                job.shard = self.shard

                self.task_type = get_task_type(job, self.file_cacher)
                self.task_type.execute_job()
                logger.info("Request finished.")

                return job.export_to_dict()

            except:
                err_msg = "Worker failed on operation `%s'" % logger.operation
                logger.error("%s\n%s" % (err_msg, traceback.format_exc()))
                raise JobException(err_msg)

            finally:
                self.task_type = None
                self.session = None
                logger.operation = ""
                self.work_lock.release()

        else:
            err_msg = "Request '%s' received, " \
                "but declined because of acquired lock" % \
                (job.info)
            logger.warning(err_msg)
            raise JobException(err_msg)
示例#18
0
    def safe_get_file(self, digest, path, descr_path=None):
        """Get file from FileCacher ensuring that the digest is
        correct.

        digest (string): the digest of the file to retrieve.
        path (string): the path where to save the file.
        descr_path (string): the path where to save the description.

        return (bool): True if all ok, False if something wrong.

        """
        # First get the file
        try:
            self.file_cacher.get_file(digest, path=path)
        except Exception as error:
            logger.error("File %s could not retrieved from file server (%r)." %
                         (digest, error))
            return False

        # Then check the digest
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.critical("File %s has wrong hash %s." %
                            (digest, calc_digest))
            return False

        # If applicable, retrieve also the description
        if descr_path is not None:
            with codecs.open(descr_path, 'w', encoding='utf-8') as fout:
                fout.write(self.file_cacher.describe(digest))

        return True
示例#19
0
    def submission_tokened(self, submission_id):
        """This RPC inform ScoringService that the user has played the
        token on a submission.

        submission_id (int): the id of the submission that changed.
        timestamp (int): the time of the token.

        """
        with SessionGen(commit=False) as session:
            submission = Submission.get_from_id(submission_id, session)

            if submission is None:
                logger.error("[submission_tokened] Received token request for "
                             "unexistent submission id %s." % submission_id)
                raise KeyError

            if submission.user.hidden:
                logger.info("[submission_tokened] Token for submission %d "
                            "not sent because user is hidden." % submission_id)
                return

            # Mark submission as tokened.
            self.submissions_tokened.add(submission_id)

            # Update RWS.
            self.rankings_send_token(submission)
示例#20
0
def get_task_type(name=None, parameters=None,
                  dataset=None):
    """Given a job, instantiate the corresponding TaskType class.

    job (Job): the job to perform.
    file_cacher (FileCacher): a file cacher object.
    dataset (Dataset): if we don't want to grade, but just to get
                 information, we can provide only the
                 dataset and not the submission.
    task_type_name (string): again, if we only need the class, we can
                             give only the task type name.

    return (object): an instance of the correct TaskType class.

    """
    if dataset is not None:
        if any(x is not None for x in (name, parameters)):
            raise ValueError("Need exactly one way to get the task type.")

        name = dataset.task_type
        parameters = dataset.task_type_parameters

    elif any(x is None for x in (name, parameters)):
        raise ValueError("Need exactly one way to get the task type.")

    class_ = get_task_type_class(name)

    try:
        parameters = json.loads(parameters)
    except json.decoder.JSONDecodeError as error:
        logger.error("Cannot decode task type parameters.\n%r." % error)
        raise

    return class_(parameters)
示例#21
0
    def submit(self, timestamp, username, password, t_id, t_short, files, language):
        """Execute the request for a submission.

        timestamp (int): seconds from the start.
        username (string): username issuing the submission.
        password (string): password of username.
        t_id (string): id of the task.
        t_short (string): short name of the task.
        files ([dict]): list of dictionaries with keys 'filename' and
                        'digest'.
        language (string): the extension the files should have.

        """
        logger.info("%s - Submitting for %s on task %s." % (to_time(timestamp), username, t_short))
        if len(files) != 1:
            logger.error("We cannot submit more than one file.")
            return

        # Copying submission files into a temporary directory with the
        # correct name. Otherwise, SubmissionRequest does not know how
        # to interpret the file (and which language are they in).
        temp_dir = tempfile.mkdtemp(dir=config.temp_dir)
        for file_ in files:
            temp_filename = os.path.join(temp_dir, file_["filename"].replace("%l", language))
            shutil.copy(os.path.join(self.import_source, "files", files[0]["digest"]), temp_filename)
            file_["filename"] = temp_filename

        filename = os.path.join(files[0]["filename"])
        browser = Browser()
        browser.set_handle_robots(False)
        step(LoginRequest(browser, username, password, base_url=self.cws_address))
        step(SubmitRequest(browser, (int(t_id), t_short), filename=filename, base_url=self.cws_address))
        shutil.rmtree(temp_dir)
示例#22
0
    def toggle_autorestart(self, service):
        """If the service is scheduled for autorestart, disable it,
        otherwise enable it.

        service (string): format: name,shard.

        return (bool/None): current status of will_restart.

        """
        # If the contest_id is not set, we cannot autorestart.
        if self.contest_id is None:
            return None

        # Decode name,shard
        try:
            idx = service.rindex(",")
        except ValueError:
            logger.error("Unable to decode service string.")
        name = service[:idx]
        try:
            shard = int(service[idx + 1:])
        except ValueError:
            logger.error("Unable to decode service shard.")
        service = ServiceCoord(name, shard)

        self._will_restart[service] = not self._will_restart[service]
        logger.info("Will restart %s,%s is now %s." %
                    (service.name, service.shard, self._will_restart[service]))

        return self._will_restart[service]
示例#23
0
文件: Checker.py 项目: sekouperry/cms
    def echo_callback(self, data, error=None):
        """Callback for check.

        """
        current = time.time()
        logger.debug("Checker.echo_callback")
        if error is not None:
            return
        try:
            service, time_ = data.split()
            time_ = float(time_)
            name, shard = service.split(",")
            shard = int(shard)
            service = ServiceCoord(name, shard)
            if service not in self.waiting_for or current - time_ > 10:
                logger.warning("Got late reply (%5.3lf s) from %s." %
                               (current - time_, service))
            else:
                if time_ - self.waiting_for[service] > 0.001:
                    logger.warning("Someone cheated on the timestamp?!")
                logger.info("Got reply (%5.3lf s) from %s." %
                            (current - time_, service))
                del self.waiting_for[service]
        except KeyError:
            logger.error("Echo answer mis-shapen.")
    def safe_put_file(self, path, descr_path):
        """Put a file to FileCacher signaling every error (including
        digest mismatch).

        path (string): the path from which to load the file.
        descr_path (string): same for description.

        return (bool): True if all ok, False if something wrong.

        """
        # First read the description.
        try:
            with open(descr_path) as fin:
                description = fin.read()
        except IOError:
            description = ''

        # Put the file.
        try:
            digest = self.file_cacher.put_file(path=path,
                                               description=description)
        except Exception as error:
            logger.error("File %s could not be put to file server (%r), "
                         "aborting." % (path, error))
            return False

        # Then check the digest.
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.error("File %s has hash %s, but the server returned %s, "
                         "aborting." % (path, calc_digest, digest))
            return False

        return True
示例#25
0
    def get_submission_data(self, submission_id):
        """Given the id, returns the submission object and a new task
        type object of the correct type.

        submission_id (int): id of the submission.

        return (Submission, TaskType): corresponding objects.

        raise: JobException if id or task type not found.

        """
        submission = Submission.get_from_id(submission_id, self.session)
        if submission is None:
            err_msg = "Couldn't find submission %s " \
                      "in the database." % submission_id
            logger.critical(err_msg)
            raise JobException(err_msg)

        try:
            task_type = get_task_type(submission, self.file_cacher)
        except KeyError as error:
            err_msg = "Task type `%s' not known for " \
                "submission %s (error: %s)." % (
                submission.task.task_type, submission_id, error)
            logger.error(err_msg)
            raise JobException(err_msg)

        return (submission, task_type)
示例#26
0
    def put_file(self,
                 description="",
                 binary_data=None,
                 file_obj=None,
                 path=None):
        """Put a file in the storage, and keep a copy locally. The
        caller has to provide exactly one among binary_data, file_obj
        and path.

        description (string): a human-readable description of the
                              content.
        binary_data (string): the content of the file to send.
        file_obj (file): the file-like object to send.
        path (string): the file to send.

        """
        temp_fd, temp_path = tempfile.mkstemp(dir=self.tmp_dir)
        os.close(temp_fd)

        # Input checking
        if [binary_data, file_obj, path].count(None) != 2:
            error_string = "No content (or too many) specified in put_file."
            logger.error(error_string)
            raise ValueError(error_string)

        logger.debug("Reading input file to store on the database.")

        # Copy the file content, whatever forms it arrives, into the
        # temporary file
        # TODO - This could be long lasting: probably it would be wise
        # to call self.service._step() periodically, but this would
        # require reimplementing of shutil functions
        if path is not None:
            shutil.copy(path, temp_path)
        elif binary_data is not None:
            with open(temp_path, 'wb') as temp_file:
                temp_file.write(binary_data)
        else:  # file_obj is not None.
            with open(temp_path, 'wb') as temp_file:
                shutil.copyfileobj(file_obj, temp_file)

        hasher = hashlib.sha1()

        # Calculate the file SHA1 digest
        with open(temp_path, 'rb') as temp_file:
            buf = temp_file.read(self.CHUNK_SIZE)
            while buf != '':
                hasher.update(buf)
                buf = temp_file.read(self.CHUNK_SIZE)
        digest = hasher.hexdigest()

        logger.debug("File has digest %s." % digest)

        self.backend.put_file(digest, temp_path, description=description)

        # Move the temporary file in the cache
        shutil.move(temp_path, os.path.join(self.obj_dir, digest))

        return digest
示例#27
0
    def put_file(self, description="", binary_data=None,
                 file_obj=None, path=None):
        """Put a file in the storage, and keep a copy locally. The
        caller has to provide exactly one among binary_data, file_obj
        and path.

        description (string): a human-readable description of the
                              content.
        binary_data (string): the content of the file to send.
        file_obj (file): the file-like object to send.
        path (string): the file to send.

        """
        temp_fd, temp_path = tempfile.mkstemp(dir=self.tmp_dir)
        os.close(temp_fd)

        # Input checking
        if [binary_data, file_obj, path].count(None) != 2:
            error_string = "No content (or too many) specified in put_file."
            logger.error(error_string)
            raise ValueError(error_string)

        logger.debug("Reading input file to store on the database.")

        # Copy the file content, whatever forms it arrives, into the
        # temporary file
        # TODO - This could be long lasting: probably it would be wise
        # to call self.service._step() periodically, but this would
        # require reimplementing of shutil functions
        if path is not None:
            shutil.copy(path, temp_path)
        elif binary_data is not None:
            with open(temp_path, 'wb') as temp_file:
                temp_file.write(binary_data)
        else:  # file_obj is not None.
            with open(temp_path, 'wb') as temp_file:
                shutil.copyfileobj(file_obj, temp_file)

        hasher = hashlib.sha1()

        # Calculate the file SHA1 digest
        with open(temp_path, 'rb') as temp_file:
            buf = temp_file.read(self.CHUNK_SIZE)
            while buf != '':
                hasher.update(buf)
                buf = temp_file.read(self.CHUNK_SIZE)
        digest = hasher.hexdigest()

        logger.debug("File has digest %s." % digest)

        self.backend.put_file(digest, temp_path, description=description)

        # Move the temporary file in the cache
        shutil.move(temp_path,
                    os.path.join(self.obj_dir, digest))

        return digest
示例#28
0
    def purge_cache(self):
        """Delete all the content of the cache.

        """
        shutil.rmtree(self.base_dir)
        if not mkdir(config.cache_dir) or \
               not mkdir(self.base_dir) or \
               not mkdir(self.tmp_dir) or \
               not mkdir(self.obj_dir):
            logger.error("Cannot create necessary directories.")
示例#29
0
    def purge_cache(self):
        """Delete all the content of the cache.

        """
        shutil.rmtree(self.base_dir)
        if not mkdir(config.cache_dir) or \
               not mkdir(self.base_dir) or \
               not mkdir(self.tmp_dir) or \
               not mkdir(self.obj_dir):
            logger.error("Cannot create necessary directories.")
示例#30
0
    def initialize(self, ranking):
        """Send to the ranking all the data that are supposed to be
        sent before the contest: contest, users, tasks. No support for
        teams, flags and faces.

        ranking ((string, string)): address and authorization string
                                    of ranking server.
        return (bool): success of operation

        """
        logger.info("Initializing rankings.")
        connection = httplib.HTTPConnection(ranking[0])
        auth = ranking[1]

        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(self.contest_id, session)
            if contest is None:
                logger.error("Received request for unexistent contest id %s." %
                             self.contest_id)
                raise KeyError
            contest_name = contest.name
            contest_url = "/contests/%s" % encode_id(contest_name)
            contest_data = {"name": contest.description,
                            "begin": contest.start,
                            "end": contest.stop}

            users = [["/users/%s" % encode_id(user.username),
                      {"f_name": user.first_name,
                       "l_name": user.last_name,
                       "team": None}]
                     for user in contest.users
                     if not user.hidden]

            tasks = [["/tasks/%s" % encode_id(task.name),
                      {"name": task.title,
                       "contest": encode_id(contest.name),
                       "max_score": 100.0,
                       "extra_headers": [],
                       "order": task.num,
                       "short_name": encode_id(task.name)}]
                     for task in contest.tasks]

        safe_put_data(connection, contest_url, contest_data, auth,
                      "sending contest %s" % contest_name)

        for user in users:
            safe_put_data(connection, user[0], user[1], auth,
                          "sending user %s" % (user[1]["l_name"] + " " +
                                                user[1]["f_name"]))

        for task in tasks:
            safe_put_data(connection, task[0], task[1], auth,
                          "sending task %s" % task[1]["name"])

        return True
示例#31
0
    def compile(self):
        """See TaskType.compile."""
        # Detect the submission's language. The checks about the
        # formal correctedness of the submission are done in CWS,
        # before accepting it.
        language = self.job.language

        # TODO: here we are sure that submission.files are the same as
        # task.submission_format. The following check shouldn't be
        # here, but in the definition of the task, since this actually
        # checks that task's task type and submission format agree.
        if len(self.job.files) != 1:
            self.job.success = True
            self.job.compilation_success = False
            self.job.text = "Invalid files in submission"
            logger.error("Submission contains %d files, expecting 1" % len(self.job.files))
            return True

        # Create the sandbox
        sandbox = create_sandbox(self)
        self.job.sandboxes.append(sandbox.path)

        # Prepare the source files in the sandbox
        files_to_get = {}
        format_filename = self.job.files.keys()[0]
        source_filenames = []
        # Stub.
        source_filenames.append("stub.%s" % language)
        files_to_get[source_filenames[-1]] = self.job.managers["stub.%s" % language].digest
        # User's submission.
        source_filenames.append(format_filename.replace("%l", language))
        files_to_get[source_filenames[-1]] = self.job.files[format_filename].digest
        for filename, digest in files_to_get.iteritems():
            sandbox.create_file_from_storage(filename, digest)

        # Prepare the compilation command
        executable_filename = format_filename.replace(".%l", "")
        command = get_compilation_command(language, source_filenames, executable_filename)

        # Run the compilation
        operation_success, compilation_success, text, plus = compilation_step(sandbox, command)

        # Retrieve the compiled executables
        self.job.success = operation_success
        self.job.compilation_success = compilation_success
        self.job.plus = plus
        self.job.text = text
        if operation_success and compilation_success:
            digest = sandbox.get_file_to_storage(
                executable_filename, "Executable %s for %s" % (executable_filename, self.job.info)
            )
            self.job.executables[executable_filename] = Executable(executable_filename, digest)

        # Cleanup
        delete_sandbox(sandbox)
示例#32
0
    def update_scores(self, new_submission_id):
        """Update the scores of the users assuming that only this
        submission appeared or was modified (i.e., tokened). The way
        to do this depends on the subclass, so we leave this
        unimplemented.

        new_submission_id (int): id of the newly added submission.

        """
        logger.error("Unimplemented method update_scores.")
        raise NotImplementedError
示例#33
0
文件: Worker.py 项目: s546360316/cms
    def execute_job_group(self, job_group_dict):
        job_group = JobGroup.import_from_dict(job_group_dict)

        if self.work_lock.acquire(False):

            try:
                self.ignore_job = False

                for k, job in job_group.jobs.iteritems():
                    logger.operation = "job '%s'" % (job.info)
                    logger.info("Request received")

                    job.shard = self.shard

                    # FIXME This is actually kind of a workaround...
                    # The only TaskType that needs it is OutputOnly.
                    job._key = k

                    # FIXME We're creating a new TaskType for each Job
                    # even if, at the moment, a JobGroup always uses
                    # the same TaskType and the same parameters. Yet,
                    # this could change in the future, so the best
                    # solution is to keep a cache of TaskTypes objects
                    # (like ScoringService does with ScoreTypes, except
                    # that we cannot index by Dataset ID here...).
                    task_type = get_task_type(job.task_type,
                                              job.task_type_parameters)
                    task_type.execute_job(job, self.file_cacher)

                    logger.info("Request finished.")

                    if not job.success or self.ignore_job:
                        job_group.success = False
                        break
                else:
                    job_group.success = True

                return job_group.export_to_dict()

            except:
                err_msg = "Worker failed on operation `%s'" % logger.operation
                logger.error("%s\n%s" % (err_msg, traceback.format_exc()))
                raise JobException(err_msg)

            finally:
                logger.operation = ""
                self.work_lock.release()

        else:
            err_msg = "Request '%s' received, " \
                "but declined because of acquired lock" % \
                (job.info)
            logger.warning(err_msg)
            raise JobException(err_msg)
示例#34
0
    def max_scores(self):
        """Returns the maximum score that one could aim to in this
        problem. Also return the maximum score from the point of view
        of a user that did not play the token. Depend on the subclass.

        return (float, float): maximum score and maximum score with
                               only public testcases.

        """
        logger.error("Unimplemented method max_scores.")
        raise NotImplementedError
示例#35
0
    def max_scores(self):
        """Returns the maximum score that one could aim to in this
        problem. Also return the maximum score from the point of view
        of a user that did not play the token. Depend on the subclass.

        return (float, float): maximum score and maximum score with
                               only public testcases.

        """
        logger.error("Unimplemented method max_scores.")
        raise NotImplementedError
示例#36
0
    def update_scores(self, new_submission_id):
        """Update the scores of the users assuming that only this
        submission appeared or was modified (i.e., tokened). The way
        to do this depends on the subclass, so we leave this
        unimplemented.

        new_submission_id (int): id of the newly added submission.

        """
        logger.error("Unimplemented method update_scores.")
        raise NotImplementedError
示例#37
0
    def reduce(self, outcomes, parameter):
        """Return the score of a subtask given the outcomes.

        outcomes ([float]): the outcomes of the submission in the
                            testcases of the group.
        parameter (list): the parameters of the group.

        return (float): the public output.

        """
        logger.error("Unimplemented method reduce.")
        raise NotImplementedError
示例#38
0
    def reduce(self, outcomes, parameter):
        """Return the score of a subtask given the outcomes.

        outcomes ([float]): the outcomes of the submission in the
                            testcases of the group.
        parameter (list): the parameters of the group.

        return (float): the public output.

        """
        logger.error("Unimplemented method reduce.")
        raise NotImplementedError
示例#39
0
    def action(self, submission_id, job_type):
        """The actual work - that can be compilation or evaluation
        (the code is pretty much the same, the differencies are in
        what we ask TaskType to do).

        submission_id (string): the submission to which act on.
        job_type (string): a constant JOB_TYPE_*.

        """
        if self.work_lock.acquire(False):

            try:
                logger.operation = "%s of submission %s" % (job_type,
                                                            submission_id)
                logger.info("Request received: %s of submission %s." %
                            (job_type, submission_id))

                with SessionGen(commit=False) as self.session:

                    # Retrieve submission and task_type.
                    unused_submission, self.task_type = \
                        self.get_submission_data(submission_id)

                    # Store in the task type the shard number.
                    self.task_type.worker_shard = self.shard

                    # Do the actual work.
                    if job_type == Worker.JOB_TYPE_COMPILATION:
                        task_type_action = self.task_type.compile
                    elif job_type == Worker.JOB_TYPE_EVALUATION:
                        task_type_action = self.task_type.evaluate
                    else:
                        raise KeyError("Unexpected job type %s." % job_type)

                    logger.info("Request finished.")
                    return task_type_action()

            except:
                err_msg = "Worker failed on operation `%s'" % logger.operation
                logger.error("%s\n%s" % (err_msg, traceback.format_exc()))
                raise JobException(err_msg)

            finally:
                self.task_type = None
                self.session = None
                logger.operation = ""
                self.work_lock.release()

        else:
            logger.warning("Request of %s of submission %s received, "
                           "but declined because of acquired lock" %
                           (job_type, submission_id))
            return False
示例#40
0
    def add_token(self, submission_id):
        """To call when a token is played, so that the scores updates.

        submission_id (int): id of the tokened submission.

        """
        try:
            self.pool[submission_id]["tokened"] = True
        except KeyError:
            logger.error("Submission %d not found in ScoreType's pool." %
                         submission_id)

        self.update_scores(submission_id)
示例#41
0
    def add_token(self, submission_id):
        """To call when a token is played, so that the scores updates.

        submission_id (int): id of the tokened submission.

        """
        try:
            self.pool[submission_id]["tokened"] = True
        except KeyError:
            logger.error("Submission %d not found in ScoreType's pool." %
                         submission_id)

        self.update_scores(submission_id)
示例#42
0
    def compute_score(self, submission_id):
        """Computes a score of a single submission. We don't know here
        how to do it, but our subclasses will.

        submission_id (int): the submission to evaluate.

        returns (float, str, float, str, [str]): respectively: the
            score, the HTML string with additional information (e.g.
            testcases' and subtasks' score), and the same information
            from the point of view of a user that did not play a
            token, the list of strings to send to RWS.

        """
        logger.error("Unimplemented method compute_score.")
        raise NotImplementedError
示例#43
0
    def compute_score(self, submission_id):
        """Computes a score of a single submission. We don't know here
        how to do it, but our subclasses will.

        submission_id (int): the submission to evaluate.

        returns (float, str, float, str, [str]): respectively: the
            score, the HTML string with additional information (e.g.
            testcases' and subtasks' score), and the same information
            from the point of view of a user that did not play a
            token, the list of strings to send to RWS.

        """
        logger.error("Unimplemented method compute_score.")
        raise NotImplementedError
示例#44
0
文件: __init__.py 项目: bblackham/cms
def get_score_type(submission=None, task=None, dataset_id=None):
    """Given a task, instantiate the corresponding ScoreType class.

    submission (Submission): the submission that needs the task type.
    task (Task): the task we want to score.
    dataset_id (int): the dataset id to use, or None for active.

    return (object): an instance of the correct ScoreType class.

    """
    # Validate arguments.
    if [x is not None
        for x in [submission, task]].count(True) != 1:
        raise ValueError("Need at most one way to get the score type.")

    if submission is not None:
        task = submission.task

    if dataset_id is None:
        dataset_id = task.active_dataset_id

    dataset = Dataset.get_from_id(dataset_id, task.sa_session)

    score_type_name = dataset.score_type
    try:
        score_type_parameters = json.loads(dataset.score_type_parameters)
    except json.decoder.JSONDecodeError as error:
        logger.error("Cannot decode score type parameters for task "
            "%d \"%s\", dataset %d \"%s\"\n%r." % (
                task.id, task.name, dataset.id, dataset.description,
                error))
        return None

    public_testcases = dict(
        (testcase.num, testcase.public)
        for testcase in dataset.testcases)

    cls = plugin_lookup(score_type_name,
                        "cms.grading.scoretypes", "scoretypes")

    try:
        return cls(score_type_parameters, public_testcases)
    except Exception as error:
        logger.error("Cannot instantiate score type for task "
            "%d \"%s\", dataset %d \"%s\"\n%r." % (
                task.id, task.name, dataset.id, dataset.description,
                error))
        return None
示例#45
0
    def submission_tokened(self, submission_id):
        """This RPC inform ScoringService that the user has played the
        token on a submission.

        submission_id (int): the id of the submission that changed.
        timestamp (int): the time of the token.

        """
        with SessionGen(commit=False) as session:
            submission = Submission.get_from_id(submission_id, session)
            if submission is None:
                logger.error("[submission_tokened] Received token request for "
                             "unexistent submission id %s." % submission_id)
                raise KeyError
            elif submission.user.hidden:
                logger.info("[submission_tokened] Token for submission %d "
                            "not sent because user is hidden." % submission_id)
                return

            # Mark submission as tokened.
            self.submissions_tokened.add(submission_id)

            # Data to send to remote rankings.
            submission_put_data = {
                "user": encode_id(submission.user.username),
                "task": encode_id(submission.task.name),
                "time": int(make_timestamp(submission.timestamp))
            }
            subchange_id = "%s%st" % \
                (int(make_timestamp(submission.token.timestamp)),
                 submission_id)
            subchange_put_data = {
                "submission": encode_id(submission_id),
                "time": int(make_timestamp(submission.token.timestamp)),
                "token": True
            }

        # Adding operations to the queue.
        with self.operation_queue_lock:
            for ranking in self.rankings:
                self.submission_queue.setdefault(
                    ranking,
                    dict())[encode_id(submission_id)] = \
                    submission_put_data
                self.subchange_queue.setdefault(
                    ranking,
                    dict())[encode_id(subchange_id)] = \
                    subchange_put_data
示例#46
0
文件: TaskType.py 项目: kennyboy/cms
def create_sandbox(task_type):
    """Create a sandbox, and return it.

    task_type (TaskType): a task type instance.

    return (Sandbox): a sandbox.

    raise: JobException

    """
    try:
        sandbox = Sandbox(task_type.file_cacher)
    except (OSError, IOError):
        err_msg = "Couldn't create sandbox."
        logger.error("%s\n%s" % (err_msg, traceback.format_exc()))
        raise JobException(err_msg)
    return sandbox
示例#47
0
    def get_public_outcome(self, outcome, parameter):
        """Return a public outcome from an outcome.

        The public outcome is shown to the user, and this method
        return the public outcome associated to the outcome of a
        submission in a testcase contained in the group identified by
        parameter.

        outcome (float): the outcome of the submission in the
                         testcase.
        parameter (list): the parameters of the current group.

        return (float): the public output.

        """
        logger.error("Unimplemented method get_public_outcome.")
        raise NotImplementedError
示例#48
0
def get_score_type(submission=None, task=None, dataset_id=None):
    """Given a task, instantiate the corresponding ScoreType class.

    submission (Submission): the submission that needs the task type.
    task (Task): the task we want to score.
    dataset_id (int): the dataset id to use, or None for active.

    return (object): an instance of the correct ScoreType class.

    """
    # Validate arguments.
    if [x is not None for x in [submission, task]].count(True) != 1:
        raise ValueError("Need at most one way to get the score type.")

    if submission is not None:
        task = submission.task

    if dataset_id is None:
        dataset_id = task.active_dataset_id

    dataset = Dataset.get_from_id(dataset_id, task.sa_session)

    score_type_name = dataset.score_type
    try:
        score_type_parameters = json.loads(dataset.score_type_parameters)
    except json.decoder.JSONDecodeError as error:
        logger.error(
            "Cannot decode score type parameters for task "
            "%d \"%s\", dataset %d \"%s\"\n%r." %
            (task.id, task.name, dataset.id, dataset.description, error))
        return None

    public_testcases = dict(
        (testcase.num, testcase.public) for testcase in dataset.testcases)

    cls = plugin_lookup(score_type_name, "cms.grading.scoretypes",
                        "scoretypes")

    try:
        return cls(score_type_parameters, public_testcases)
    except Exception as error:
        logger.error(
            "Cannot instantiate score type for task "
            "%d \"%s\", dataset %d \"%s\"\n%r." %
            (task.id, task.name, dataset.id, dataset.description, error))
        return None
示例#49
0
    def submit(self, timestamp, username, password, t_id, t_short, files,
               language):
        """Execute the request for a submission.

        timestamp (int): seconds from the start.
        username (string): username issuing the submission.
        password (string): password of username.
        t_id (string): id of the task.
        t_short (string): short name of the task.
        files ([dict]): list of dictionaries with keys 'filename' and
                        'digest'.
        language (string): the extension the files should have.

        """
        logger.info("%s - Submitting for %s on task %s." %
                    (to_time(timestamp), username, t_short))
        if len(files) != 1:
            logger.error("We cannot submit more than one file.")
            return

        # Copying submission files into a temporary directory with the
        # correct name. Otherwise, SubmissionRequest does not know how
        # to interpret the file (and which language are they in).
        temp_dir = tempfile.mkdtemp(dir=config.temp_dir)
        for file_ in files:
            temp_filename = os.path.join(
                temp_dir, file_["filename"].replace("%l", language))
            shutil.copy(
                os.path.join(self.import_source, "files", files[0]["digest"]),
                temp_filename)
            file_["filename"] = temp_filename

        filename = os.path.join(files[0]["filename"])
        browser = Browser()
        browser.set_handle_robots(False)
        step(
            LoginRequest(browser,
                         username,
                         password,
                         base_url=self.cws_address))
        step(
            SubmitRequest(browser, (int(t_id), t_short),
                          filename=filename,
                          base_url=self.cws_address))
        shutil.rmtree(temp_dir)
示例#50
0
    def push_logs(self, logger):
        """Push all log lines written since the last call to
        get_logs() to the logger object.

        """
        with self.log_lock:
            tmp = self.logs
            self.logs = []
        for (line, severity) in tmp:
            if severity == 'debug':
                logger.debug(line)
            elif severity == 'info':
                logger.info(line)
            elif severity == 'warning':
                logger.warning(line)
            elif severity == 'error':
                logger.error(line)
            elif severity == 'critical':
                logger.critical(line)
示例#51
0
def get_task_type(job=None,
                  file_cacher=None,
                  dataset=None,
                  task_type_name=None):
    """Given a job, instantiate the corresponding TaskType class.

    job (Job): the job to perform.
    file_cacher (FileCacher): a file cacher object.
    dataset (Dataset): if we don't want to grade, but just to get
                 information, we can provide only the
                 dataset and not the submission.
    task_type_name (string): again, if we only need the class, we can
                             give only the task type name.

    return (object): an instance of the correct TaskType class.

    """
    # Validate arguments.
    if [x is not None
            for x in [job, dataset, task_type_name]].count(True) != 1:
        raise ValueError("Need exactly one way to get the task type.")
    elif [x is not None for x in [job, file_cacher]].count(True) not in [0, 2]:
        raise ValueError("Need file cacher to perform a job.")

    # Recover information from the arguments.
    task_type_parameters = None
    if job is not None:
        task_type_name = job.task_type
    if dataset is not None:
        task_type_name = dataset.task_type
        try:
            task_type_parameters = json.loads(dataset.task_type_parameters)
        except json.decoder.JSONDecodeError as error:
            logger.error("Cannot decode task type parameters.\n%r." % error)
            raise
        job = Job()
        job.task_type = task_type_name
        job.task_type_parameters = task_type_parameters

    cls = plugin_lookup(task_type_name, "cms.grading.tasktypes", "tasktypes")

    return cls(job, file_cacher)
示例#52
0
    def __init__(self, shard):
        logger.initialize(ServiceCoord("LogService", shard))
        Service.__init__(self, shard, custom_logger=logger)

        log_dir = os.path.join(config.log_dir, "cms")
        if not mkdir(config.log_dir) or \
               not mkdir(log_dir):
            logger.error("Cannot create necessary directories.")
            self.exit()
            return

        log_filename = "%d.log" % int(time.time())
        self._log_file = codecs.open(os.path.join(log_dir, log_filename), "w",
                                     "utf-8")
        try:
            os.remove(os.path.join(log_dir, "last.log"))
        except OSError:
            pass
        os.symlink(log_filename, os.path.join(log_dir, "last.log"))

        self._last_messages = []
示例#53
0
    def kill_service(self, service):
        """Restart the service. Note that after calling successfully
        this method, get_resource could still report the service
        running untile we call _store_resources again.

        service (string): format: name,shard.

        """
        logger.info("Killing %s as asked." % service)
        try:
            idx = service.rindex(",")
        except ValueError:
            logger.error("Unable to decode service string.")
        name = service[:idx]
        try:
            shard = int(service[idx + 1:])
        except ValueError:
            logger.error("Unable to decode service shard.")

        remote_service = RemoteService(self, ServiceCoord(name, shard))
        remote_service.quit(reason="Asked by ResourceService")
示例#54
0
    def get_file_to_string(self, path, maxlen=1024):
        """Return the content of a file in the sandbox given its
        relative path.

        path (string): relative path of the file inside the sandbox.
        maxlen (int): maximum number of bytes to read, or None if no
                      limit.
        return (string): the content of the file up to maxlen bytes.

        """
        file_ = self.get_file(path)
        try:
            if maxlen is None:
                content = file_.read()
            else:
                content = file_.read(maxlen)
        except UnicodeDecodeError as error:
            logger.error("Unable to interpret file as UTF-8. %r" % error)
            return None
        file_.close()
        return content
示例#55
0
def extract_outcome_and_text(sandbox):
    """Extract the outcome and the text from the two outputs of a
    manager (stdout contains the outcome, and stderr the text).

    stdout (Sandbox): the sandbox whose last execution was a
                      comparator.

    return (float, string): outcome and text.
    raise: ValueError if cannot decode the data.

    """
    stdout = sandbox.relative_path(sandbox.stdout_file)
    stderr = sandbox.relative_path(sandbox.stderr_file)
    with codecs.open(stdout, "r", "utf-8") as stdout_file:
        with codecs.open(stderr, "r", "utf-8") as stderr_file:
            try:
                outcome = stdout_file.readline().strip()
            except UnicodeDecodeError as error:
                logger.error("Unable to interpret manager stdout "
                             "(outcome) as unicode. %r" % error)
                raise ValueError("Cannot decode the outcome.")
            try:
                text = filter_ansi_escape(stderr_file.readline())
            except UnicodeDecodeError as error:
                logger.error("Unable to interpret manager stderr "
                             "(text) as unicode. %r" % error)
                raise ValueError("Cannot decode the text.")

    try:
        outcome = float(outcome)
    except ValueError:
        logger.error("Wrong outcome `%s' from manager." % outcome)
        raise ValueError("Outcome is not a float.")

    return outcome, text
示例#56
0
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %s" % self.contest_id
        logger.info("Starting export.")

        logger.info("Creating dir structure.")
        try:
            os.mkdir(self.spool_dir)
        except OSError:
            logger.error("The specified directory already exists, "
                         "I won't overwrite it.")
            return False
        os.mkdir(self.upload_dir)

        with SessionGen(commit=False) as session:
            self.contest = Contest.get_from_id(self.contest_id, session)
            self.submissions = sorted(
                (submission for submission in self.contest.get_submissions()
                 if not submission.user.hidden),
                key=lambda submission: submission.timestamp)

            # Creating users' directory.
            for user in self.contest.users:
                if not user.hidden:
                    os.mkdir(os.path.join(self.upload_dir, user.username))

            try:
                self.export_submissions()
                self.export_ranking()
            except Exception as error:
                logger.error("Generic error. %r" % error)
                return False

        logger.info("Export finished.")
        logger.operation = ""

        return True
示例#57
0
    def get_html_details(self, score_details, translator=None):
        """Return an HTML string representing the score details of a
        submission.

        score_details (dict): the data saved by the score type itself
                              in the database; can be public or
                              private.
        translator (function): the function to localize strings.
        return (string): an HTML string representing score_details.

        """
        if translator is None:
            translator = lambda string: string
        try:
            score_details = json.loads(score_details)
        except (json.decoder.JSONDecodeError, TypeError):
            # TypeError raised if score_details is None
            logger.error("Found a null or non-JSON score details string. "
                         "Try invalidating scores.")
            return translator("Score details temporarily unavailable.")
        else:
            return Template(self.TEMPLATE).generate(details=score_details,
                                                    _=translator)
示例#58
0
def encode_id(entity_id):
    """Encode the id using only A-Za-z0-9_.

    entity_id (string): the entity id to encode.
    return (string): encoded entity id.

    """
    encoded_id = ""
    for char in str(entity_id):
        if char not in "ABCDEFGHIJKLMNOPQRSTUVWXYZ" \
               "abcdefghijklmnopqrstuvwxyz" \
               "0123456789":
            try:
                encoded_id += "_" + hex(ord(char))[-2:]
            except TypeError:
                # FIXME We should use log_file here too, but given how
                # we create IDs it's unlikely this error will ever
                # happen.
                logger.error("Entity %s cannot be send correctly, "
                             "sending anyway (this may cause errors)." %
                             entity_id)
        else:
            encoded_id += char
    return encoded_id
    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.error("The specified file already exists, "
                             "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.error("The specified directory already exists, "
                         "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            logger.info("Exporting files.")
            files = contest.enumerate_files(self.skip_submissions,
                                            self.skip_user_tests,
                                            light=self.light)
            for _file in files:
                if not self.safe_get_file(_file,
                                          os.path.join(files_dir, _file),
                                          os.path.join(descr_dir, _file)):
                    return False

            # Export the contest in JSON format.
            logger.info("Exporting the contest in JSON format.")
            with open(os.path.join(export_dir, "contest.json"), 'w') as fout:
                json.dump(contest.export_to_dict(
                        self.skip_submissions,
                        self.skip_user_tests),
                          fout, indent=4)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            shutil.rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True