Esempio n. 1
0
class ContestExporter:

    """This service exports every data about the contest that CMS
    knows. The process of exporting and importing again should be
    idempotent.

    """

    def __init__(self, contest_id, export_target,
                 dump_files, dump_model, light,
                 skip_submissions, skip_user_tests):
        self.contest_id = contest_id
        self.dump_files = dump_files
        self.dump_model = dump_model
        self.light = light
        self.skip_submissions = skip_submissions
        self.skip_user_tests = skip_user_tests

        # If target is not provided, we use the contest's name.
        if export_target == "":
            with SessionGen(commit=False) as session:
                contest = Contest.get_from_id(self.contest_id, session)
                self.export_target = "dump_%s.tar.gz" % contest.name
                logger.warning("export_target not given, using \"%s\""
                               % self.export_target)
        else:
            self.export_target = export_target

        self.file_cacher = FileCacher()

    def do_export(self):
        """Run the actual export code."""
        logger.operation = "exporting contest %d" % self.contest_id
        logger.info("Starting export.")

        export_dir = self.export_target
        archive_info = get_archive_info(self.export_target)

        if archive_info["write_mode"] != "":
            # We are able to write to this archive.
            if os.path.exists(self.export_target):
                logger.critical("The specified file already exists, "
                                "I won't overwrite it.")
                return False
            export_dir = os.path.join(tempfile.mkdtemp(),
                                      archive_info["basename"])

        logger.info("Creating dir structure.")
        try:
            os.mkdir(export_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False

        files_dir = os.path.join(export_dir, "files")
        descr_dir = os.path.join(export_dir, "descriptions")
        os.mkdir(files_dir)
        os.mkdir(descr_dir)

        with SessionGen(commit=False) as session:

            contest = Contest.get_from_id(self.contest_id, session)

            # Export files.
            if self.dump_files:
                logger.info("Exporting files.")
                files = contest.enumerate_files(self.skip_submissions,
                                                self.skip_user_tests,
                                                self.light)
                for file_ in files:
                    if not self.safe_get_file(file_,
                                              os.path.join(files_dir, file_),
                                              os.path.join(descr_dir, file_)):
                        return False

            # Export the contest in JSON format.
            if self.dump_model:
                logger.info("Exporting the contest to a JSON file.")

                # We use strings because they'll be the keys of a JSON
                # object; the contest will have ID 0.
                self.ids = {contest.sa_identity_key: "0"}
                self.queue = [contest]

                data = dict()
                while len(self.queue) > 0:
                    obj = self.queue.pop(0)
                    data[self.ids[obj.sa_identity_key]] = self.export_object(obj)

                # Specify the "root" of the data graph
                data["_objects"] = ["0"]

                with io.open(os.path.join(export_dir,
                                          "contest.json"), "wb") as fout:
                    json.dump(data, fout, encoding="utf-8",
                              indent=4, sort_keys=True)

        # If the admin requested export to file, we do that.
        if archive_info["write_mode"] != "":
            archive = tarfile.open(self.export_target,
                                   archive_info["write_mode"])
            archive.add(export_dir, arcname=archive_info["basename"])
            archive.close()
            rmtree(export_dir)

        logger.info("Export finished.")
        logger.operation = ""

        return True

    def get_id(self, obj):
        obj_key = obj.sa_identity_key
        if obj_key not in self.ids:
            # We use strings because they'll be the keys of a JSON object
            self.ids[obj_key] = str(len(self.ids))
            self.queue.append(obj)

        return self.ids[obj_key]

    def export_object(self, obj):

        """Export the given object, returning a JSON-encodable dict.

        The returned dict will contain a "_class" item (the name of the
        class of the given object), an item for each column property
        (with a value properly translated to a JSON-compatible type)
        and an item for each relationship property (which will be an ID
        or a collection of IDs).

        The IDs used in the exported dict aren't related to the ones
        used in the DB: they are newly generated and their scope is
        limited to the exported file only. They are shared among all
        classes (that is, two objects can never share the same ID, even
        if they are of different classes).

        If, when exporting the relationship, we find an object without
        an ID we generate a new ID, assign it to the object and append
        the object to the queue of objects to export.

        The self.skip_submissions flag controls wheter we export
        submissions (and all other objects that can be reached only by
        passing through a submission) or not.

        """

        cls = type(obj)

        data = {"_class": cls.__name__}

        for prp in cls._col_props:
            col, = prp.columns
            col_type = type(col.type)

            val = getattr(obj, prp.key)
            if col_type in [Boolean, Integer, Float, String]:
                data[prp.key] = val
            elif col_type is DateTime:
                data[prp.key] = \
                    make_timestamp(val) if val is not None else None
            elif col_type is Interval:
                data[prp.key] = \
                    val.total_seconds() if val is not None else None
            else:
                raise RuntimeError("Unknown SQLAlchemy column type: %s"
                                   % col_type)

        for prp in cls._rel_props:
            other_cls = prp.mapper.class_

            # Skip submissions if requested
            if self.skip_submissions and other_cls is Submission:
                continue

            # Skip user_tests if requested
            if self.skip_user_tests and other_cls is UserTest:
                continue

            val = getattr(obj, prp.key)
            if val is None:
                data[prp.key] = None
            elif isinstance(val, other_cls):
                data[prp.key] = self.get_id(val)
            elif isinstance(val, list):
                data[prp.key] = list(self.get_id(i) for i in val)
            elif isinstance(val, dict):
                data[prp.key] = \
                    dict((k, self.get_id(v)) for k, v in val.iteritems())
            else:
                raise RuntimeError("Unknown SQLAlchemy relationship type: %s"
                                   % type(val))

        return data

    def safe_get_file(self, digest, path, descr_path=None):

        """Get file from FileCacher ensuring that the digest is
        correct.

        digest (string): the digest of the file to retrieve.
        path (string): the path where to save the file.
        descr_path (string): the path where to save the description.

        return (bool): True if all ok, False if something wrong.

        """

        # TODO - Probably this method could be merged in FileCacher

        # First get the file
        try:
            self.file_cacher.get_file(digest, path=path)
        except Exception as error:
            logger.error("File %s could not retrieved from file server (%r)."
                         % (digest, error))
            return False

        # Then check the digest
        calc_digest = sha1sum(path)
        if digest != calc_digest:
            logger.critical("File %s has wrong hash %s."
                            % (digest, calc_digest))
            return False

        # If applicable, retrieve also the description
        if descr_path is not None:
            with io.open(descr_path, 'wt', encoding='utf-8') as fout:
                fout.write(self.file_cacher.describe(digest))

        return True
Esempio n. 2
0
class TestFileCacher(TestService):
    """Service that performs automatically some tests for the
    FileCacher service.

    """

    def __init__(self, shard):
        logger.initialize(ServiceCoord("TestFileCacher", shard))
        TestService.__init__(self, shard, custom_logger=logger)

        # Assume we store the cache in "./cache/fs-cache-TestFileCacher-0/"
        self.cache_base_path = os.path.join(config.cache_dir,
                                            "fs-cache-TestFileCacher-0")
        self.cache_path = None
        self.content = None
        self.fake_content = None
        self.digest = None
        self.file_obj = None
        self.file_cacher = FileCacher(self)
        #self.file_cacher = FileCacher(self, path="fs-storage")

    def prepare(self):
        """Initialization for the test code - make sure that the cache
        is empty before testing.

        """
        logger.info("Please delete directory %s before." %
                    self.cache_base_path)

### TEST 000 ###

    def test_000(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a file-like object. FC should cache the content
        locally.

        """
        self.size = 100
        self.content = "".join(chr(random.randint(0, 255))
                               for unused_i in xrange(self.size))

        logger.info("  I am sending the ~100B binary file to FileCacher")
        try:
            data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
                                                       u"Test #000")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif open(os.path.join(self.cache_base_path, data), "rb").read() != \
                self.content:
            self.test_end(False, "Local cache's content differ "
                          "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 001 ###

    def test_001(self):
        """Retrieve the file.

        """
        logger.info("  I am retrieving the ~100B binary file from FileCacher")
        self.fake_content = "Fake content.\n"
        with open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.fake_content:
            if received == self.content:
                self.test_end(False,
                              "Did not use the cache even if it could.")
            else:
                self.test_end(False, "Content differ.")
        else:
            self.test_end(True, "Data object received correctly.")

### TEST 002 ###

    def test_002(self):
        """Check the size of the file.

        """
        logger.info("  I am checking the size of the ~100B binary file")
        try:
            size = self.file_cacher.get_size(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if size == self.size:
            self.test_end(True, "The size is correct.")
        else:
            self.test_end(False, "The size is wrong: %d instead of %d" %
                          (size, self.size))

### TEST 003 ###

    def test_003(self):
        """Get file from FileCacher.

        """
        logger.info("  I am retrieving the file from FileCacher " +
                    "after deleting the cache.")
        os.unlink(self.cache_path)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.content:
            self.test_end(False, "Content differ.")
        elif not os.path.exists(self.cache_path):
            self.test_end(False, "File not stored in local cache.")
        elif open(self.cache_path).read() != self.content:
            self.test_end(False, "Local cache's content differ " +
                          "from original file.")
        else:
            self.test_end(True, "Content object received " +
                          "and cached correctly.")

### TEST 004 ###

    def test_004(self):
        """Delete the file through FS and tries to get it again through FC.

        """
        logger.info("  I am deleting the file from FileCacher.")
        try:
            self.file_cacher.delete(digest=self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %s." % error)
            return

        else:
            logger.info("  File deleted correctly.")
            logger.info("  I am getting the file from FileCacher.")
            try:
                self.file_cacher.get_file(self.digest)
            except Exception as error:
                self.test_end(True, "Correctly received an error: %r." % error)
            else:
                self.test_end(False, "Did not receive error.")

### TEST 005 ###

    def test_005(self):
        """Get unexisting file from FileCacher.

        """
        logger.info("  I am retrieving an unexisting file from FileCacher.")
        try:
            self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(True, "Correctly received an error: %r." % error)
        else:
            self.test_end(False, "Did not receive error.")

### TEST 006 ###

    def test_006(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a string. FC should cache the content locally.

        """
        self.content = "".join(chr(random.randint(0, 255))
                               for unused_i in xrange(100))

        logger.info("  I am sending the ~100B binary file to FileCacher")
        try:
            data = self.file_cacher.put_file_content(self.content,
                                                     u"Test #005")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif open(os.path.join(self.cache_base_path, data),
                  "rb").read() != self.content:
            self.test_end(False, "Local cache's content differ "
                          "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 007 ###

    def test_007(self):
        """Retrieve the file as a string.

        """
        logger.info("  I am retrieving the ~100B binary file from FileCacher "
                    "using get_file_to_string()")
        self.fake_content = "Fake content.\n"
        with open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file_content(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if data != self.fake_content:
            if data == self.content:
                self.test_end(False,
                              "Did not use the cache even if it could.")
            else:
                self.test_end(False, "Content differ.")
        else:
            self.test_end(True, "Data received correctly.")

### TEST 008 ###

    def test_008(self):
        """Put a ~100MB file into the storage (using a specially
        crafted file-like object).

        """
        logger.info("  I am sending the ~100MB binary file to FileCacher")
        rand_file = RandomFile(100000000)
        try:
            data = self.file_cacher.put_file_from_fobj(rand_file, u"Test #007")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return
        if rand_file.dim != 0:
            self.test_end(False, "The input file wasn't read completely.")
        my_digest = rand_file.digest
        rand_file.close()

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif my_digest != data:
            self.test_end(False, "File received with wrong hash.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 009 ###

    def test_009(self):
        """Get the ~100MB file from FileCacher.

        """
        logger.info("  I am retrieving the ~100MB file from FileCacher " +
                    "after deleting the cache.")
        os.unlink(self.cache_path)
        hash_file = HashingFile()
        try:
            self.file_cacher.get_file_to_fobj(self.digest, hash_file)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return
        my_digest = hash_file.digest
        hash_file.close()

        try:
            if self.digest != my_digest:
                self.test_end(False, "Content differs.")
            elif not os.path.exists(self.cache_path):
                self.test_end(False, "File not stored in local cache.")
            else:
                self.test_end(True, "Content object received " +
                              "and cached correctly.")
        finally:
            self.file_cacher.delete(self.digest)
Esempio n. 3
0
def test_testcases(base_dir, soluzione, language, assume=None):
    global task, file_cacher

    # Use a disabled FileCacher with a FSBackend in order to avoid to fill
    # the database with junk and to save up space.
    if file_cacher is None:
        file_cacher = FileCacher(path=os.path.join(config.cache_dir,
                                                   'cmsMake'),
                                 enabled=False)

    # Load the task
    if task is None:
        loader = YamlLoader(os.path.realpath(os.path.join(base_dir, "..")),
                            file_cacher)
        # Normally we should import the contest before, but YamlLoader
        # accepts get_task() even without previous get_contest() calls
        task = loader.get_task(os.path.split(os.path.realpath(base_dir))[1])

    # Prepare the EvaluationJob
    dataset = task.active_dataset
    if dataset.task_type != "OutputOnly":
        digest = file_cacher.put_file_from_path(
            os.path.join(base_dir, soluzione),
            "Solution %s for task %s" % (soluzione, task.name))
        executables = {
            task.name: Executable(filename=task.name, digest=digest)
        }
        jobs = [(t,
                 EvaluationJob(language=language,
                               task_type=dataset.task_type,
                               task_type_parameters=json.loads(
                                   dataset.task_type_parameters),
                               managers=dict(dataset.managers),
                               executables=executables,
                               input=dataset.testcases[t].input,
                               output=dataset.testcases[t].output,
                               time_limit=dataset.time_limit,
                               memory_limit=dataset.memory_limit))
                for t in dataset.testcases]
        tasktype = get_task_type(dataset=dataset)
    else:
        print("Generating outputs...", end='')
        files = {}
        for t in sorted(dataset.testcases.keys()):
            with file_cacher.get_file(dataset.testcases[t].input) as fin:
                with TemporaryFile() as fout:
                    print(str(t), end='')
                    call(soluzione, stdin=fin, stdout=fout, cwd=base_dir)
                    fout.seek(0)
                    digest = file_cacher.put_file_from_fobj(fout)
                    outname = "output_%s.txt" % t
                    files[outname] = File(filename=outname, digest=digest)
        jobs = [(t,
                 EvaluationJob(task_type=dataset.task_type,
                               task_type_parameters=json.loads(
                                   dataset.task_type_parameters),
                               managers=dict(dataset.managers),
                               files=files,
                               input=dataset.testcases[t].input,
                               output=dataset.testcases[t].output,
                               time_limit=dataset.time_limit,
                               memory_limit=dataset.memory_limit))
                for t in dataset.testcases]
        for k, job in jobs:
            job._key = k
        tasktype = get_task_type(dataset=dataset)
        print()

    ask_again = True
    last_status = "ok"
    status = "ok"
    stop = False
    info = []
    points = []
    comments = []
    tcnames = []
    for jobinfo in sorted(jobs):
        print(jobinfo[0], end='')
        sys.stdout.flush()
        job = jobinfo[1]
        # Skip the testcase if we decide to consider everything to
        # timeout
        if stop:
            info.append("Time limit exceeded")
            points.append(0.0)
            comments.append("Timeout.")
            continue

        # Evaluate testcase
        last_status = status
        tasktype.evaluate(job, file_cacher)
        if dataset.task_type != "OutputOnly":
            status = job.plus["exit_status"]
            info.append("Time: %5.3f   Wall: %5.3f   Memory: %s" %
                        (job.plus["execution_time"],
                         job.plus["execution_wall_clock_time"],
                         mem_human(job.plus["execution_memory"])))
        else:
            status = "ok"
            info.append("N/A")
        points.append(float(job.outcome))
        comments.append(format_status_text(job.text))
        tcnames.append(jobinfo[0])

        # If we saw two consecutive timeouts, ask wether we want to
        # consider everything to timeout
        if ask_again and status == "timeout" and last_status == "timeout":
            print()
            print("Want to stop and consider everything to timeout? [y/N]",
                  end='')
            if assume is not None:
                print(assume)
                tmp = assume
            else:
                tmp = raw_input().lower()
            if tmp in ['y', 'yes']:
                stop = True
            else:
                ask_again = False

    # Result pretty printing
    print()
    clen = max(len(c) for c in comments)
    ilen = max(len(i) for i in info)
    for (i, p, c, b) in zip(tcnames, points, comments, info):
        print("%s) %5.2lf --- %s [%s]" % (i, p, c.ljust(clen), b.center(ilen)))

    return zip(points, comments, info)
Esempio n. 4
0
File: Worker.py Progetto: cbolk/cms
class Worker(Service):
    """This service implement the possibility to compile and evaluate
    submissions in a sandbox. The instructions to follow for the
    operations are in the TaskType classes, while the sandbox is in
    the Sandbox module.

    """

    JOB_TYPE_COMPILATION = "compile"
    JOB_TYPE_EVALUATION = "evaluate"

    def __init__(self, shard):
        logger.initialize(ServiceCoord("Worker", shard))
        Service.__init__(self, shard, custom_logger=logger)
        self.file_cacher = FileCacher(self)

        self.work_lock = gevent.coros.RLock()
        self._ignore_job = False

    @rpc_method
    def ignore_job(self):
        """RPC that inform the worker that its result for the current
        action will be discarded. The worker will try to return as
        soon as possible even if this means that the result are
        inconsistent.

        """
        # We remember to quit as soon as possible.
        logger.info("Trying to interrupt job as requested.")
        self._ignore_job = True

    @rpc_method
    def precache_files(self, contest_id):
        """RPC to ask the worker to precache of files in the contest.

        contest_id (int): the id of the contest

        """
        # Lock is not needed if the admins correctly placed cache and
        # temp directories in the same filesystem. This is what
        # usually happens since they are children of the same,
        # cms-created, directory.
        logger.info("Precaching files for contest %d." % contest_id)
        with SessionGen(commit=False) as session:
            contest = Contest.get_from_id(contest_id, session)
            for digest in contest.enumerate_files(skip_submissions=True,
                                                  skip_user_tests=True):
                self.file_cacher.get_file(digest)
        logger.info("Precaching finished.")

    @rpc_method
    def execute_job_group(self, job_group_dict):
        """Receive a group of jobs in a dict format and executes them
        one by one.

        job_group_dict (dict): a dictionary suitable to be imported
            from JobGroup.

        """
        job_group = JobGroup.import_from_dict(job_group_dict)

        if self.work_lock.acquire(False):

            try:
                self._ignore_job = False

                for k, job in job_group.jobs.iteritems():
                    logger.operation = "job '%s'" % (job.info)
                    logger.info("Request received")

                    job.shard = self.shard

                    # FIXME This is actually kind of a workaround...
                    # The only TaskType that needs it is OutputOnly.
                    job._key = k

                    # FIXME We're creating a new TaskType for each Job
                    # even if, at the moment, a JobGroup always uses
                    # the same TaskType and the same parameters. Yet,
                    # this could change in the future, so the best
                    # solution is to keep a cache of TaskTypes objects
                    # (like ScoringService does with ScoreTypes, except
                    # that we cannot index by Dataset ID here...).
                    task_type = get_task_type(job.task_type,
                                              job.task_type_parameters)
                    task_type.execute_job(job, self.file_cacher)

                    logger.info("Request finished.")

                    if not job.success or self._ignore_job:
                        job_group.success = False
                        break
                else:
                    job_group.success = True

                return job_group.export_to_dict()

            except:
                err_msg = "Worker failed on operation `%s'" % logger.operation
                logger.error(err_msg, exc_info=True)
                raise JobException(err_msg)

            finally:
                logger.operation = ""
                self.work_lock.release()

        else:
            err_msg = "Request received, but declined because of acquired " \
                "lock (Worker is busy executing another job group, this " \
                "should not happen: check if there are more than one ES " \
                "running, or for bugs in ES."
            logger.warning(err_msg)
            raise JobException(err_msg)
Esempio n. 5
0
def test_testcases(base_dir, soluzione, language, assume=None):
    global task, file_cacher

    # Use a FileCacher with a NullBackend in order to avoid to fill
    # the database with junk
    if file_cacher is None:
        file_cacher = FileCacher(null=True)

    # Load the task
    # TODO - This implies copying a lot of data to the FileCacher,
    # which is annoying if you have to do it continuously; it would be
    # better to use a persistent cache (although local, possibly
    # filesystem-based instead of database-based) and somehow detect
    # when the task has already been loaded
    if task is None:
        loader = YamlLoader(
            os.path.realpath(os.path.join(base_dir, "..")),
            file_cacher)
        # Normally we should import the contest before, but YamlLoader
        # accepts get_task() even without previous get_contest() calls
        task = loader.get_task(os.path.split(os.path.realpath(base_dir))[1])

    # Prepare the EvaluationJob
    dataset = task.active_dataset
    if dataset.task_type != "OutputOnly":
        digest = file_cacher.put_file_from_path(
            os.path.join(base_dir, soluzione),
            "Solution %s for task %s" % (soluzione, task.name))
        executables = {task.name: Executable(filename=task.name,
                                             digest=digest)}
        jobs = [(t, EvaluationJob(
            language=language,
            task_type=dataset.task_type,
            task_type_parameters=json.loads(dataset.task_type_parameters),
            managers=dict(dataset.managers),
            executables=executables,
            input=dataset.testcases[t].input,
            output=dataset.testcases[t].output,
            time_limit=dataset.time_limit,
            memory_limit=dataset.memory_limit)) for t in dataset.testcases]
        tasktype = get_task_type(dataset=dataset)
    else:
        print("Generating outputs...", end='')
        files = {}
        for t in sorted(dataset.testcases.keys()):
            with file_cacher.get_file(dataset.testcases[t].input) as fin:
                with TemporaryFile() as fout:
                    print("%s" % t, end='')
                    call(soluzione, stdin=fin, stdout=fout, cwd=base_dir)
                    fout.seek(0)
                    digest = file_cacher.put_file_from_fobj(fout)
                    outname = "output_%s.txt" % t
                    files[outname] = File(filename=outname, digest=digest)
        jobs = [(t, EvaluationJob(
            task_type=dataset.task_type,
            task_type_parameters=json.loads(dataset.task_type_parameters),
            managers=dict(dataset.managers),
            files=files,
            input=dataset.testcases[t].input,
            output=dataset.testcases[t].output,
            time_limit=dataset.time_limit,
            memory_limit=dataset.memory_limit)) for t in dataset.testcases]
        for k, job in jobs:
            job._key = k
        tasktype = get_task_type(dataset=dataset)
        print()

    ask_again = True
    last_status = "ok"
    status = "ok"
    stop = False
    info = []
    points = []
    comments = []
    tcnames = []
    for jobinfo in sorted(jobs):
        print(jobinfo[0], end='')
        sys.stdout.flush()
        job = jobinfo[1]
        # Skip the testcase if we decide to consider everything to
        # timeout
        if stop:
            info.append("Time limit exceeded")
            points.append(0.0)
            comments.append("Timeout.")
            continue

        # Evaluate testcase
        last_status = status
        tasktype.evaluate(job, file_cacher)
        if dataset.task_type != "OutputOnly":
            status = job.plus["exit_status"]
            info.append("Time: %5.3f   Wall: %5.3f   Memory: %s" %
                       (job.plus["execution_time"],
                        job.plus["execution_wall_clock_time"],
                        mem_human(job.plus["execution_memory"])))
        else:
            status = "ok"
            info.append("N/A")
        points.append(float(job.outcome))
        comments.append(format_status_text(job.text))
        tcnames.append(jobinfo[0])

        # If we saw two consecutive timeouts, ask wether we want to
        # consider everything to timeout
        if ask_again and status == "timeout" and last_status == "timeout":
            print()
            print("Want to stop and consider everything to timeout? [y/N]",
                  end='')
            if assume is not None:
                print(assume)
                tmp = assume
            else:
                tmp = raw_input().lower()
            if tmp in ['y', 'yes']:
                stop = True
            else:
                ask_again = False

    # Result pretty printing
    print()
    clen = max(len(c) for c in comments)
    ilen = max(len(i) for i in info)
    for (i, p, c, b) in zip(tcnames, points, comments, info):
        print("%s) %5.2lf --- %s [%s]" % (i, p, c.ljust(clen), b.center(ilen)))

    return zip(points, comments, info)
Esempio n. 6
0
class TestFileCacher(unittest.TestCase):
    """Service that performs automatically some tests for the
    FileCacher service.

    """
    def setUp(self):
        self.file_cacher = FileCacher()
        #self.file_cacher = FileCacher(self, path="fs-storage")
        self.cache_base_path = self.file_cacher.file_dir
        self.cache_path = None
        self.content = None
        self.fake_content = None
        self.digest = None
        self.file_obj = None

    def tearDown(self):
        shutil.rmtree(self.cache_base_path, ignore_errors=True)

    def test_file_life(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a file-like object. FC should cache the content
        locally.

        Then retrieve it.

        Then check its size.

        Then get it back.

        Then delete it.

        """
        self.size = 100
        self.content = b"".join(
            chr(random.randint(0, 255)) for unused_i in xrange(self.size))

        data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
                                                   u"Test #000")

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif io.open(os.path.join(self.cache_base_path, data),
                     "rb").read() != self.content:
            self.fail("Local cache's content differ " "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Retrieve the file.
        self.fake_content = "Fake content.\n"
        with io.open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.fake_content:
            if received == self.content:
                self.fail("Did not use the cache even if it could.")
            else:
                self.fail("Content differ.")

        # Check the size of the file.
        try:
            size = self.file_cacher.get_size(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if size != self.size:
            self.fail("The size is wrong: %d instead of %d" %
                      (size, self.size))

        # Get file from FileCacher.
        os.unlink(self.cache_path)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.content:
            self.fail("Content differ.")
        elif not os.path.exists(self.cache_path):
            self.fail("File not stored in local cache.")
        elif io.open(self.cache_path, "rb").read() != self.content:
            self.fail("Local cache's content differ " + "from original file.")

        # Delete the file through FS and tries to get it again through
        # FC.
        try:
            self.file_cacher.delete(digest=self.digest)
        except Exception as error:
            self.fail("Error received: %s." % error)
            return

        else:
            with self.assertRaises(Exception):
                self.file_cacher.get_file(self.digest)

    def test_fetch_missing_file(self):
        """Get unexisting file from FileCacher.

        """
        with self.assertRaises(Exception):
            self.file_cacher.get_file(self.digest)

    def test_file_as_content(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a string. FC should cache the content locally.

        Then retrieve it as a string.

        """
        self.content = b"".join(
            chr(random.randint(0, 255)) for unused_i in xrange(100))

        try:
            data = self.file_cacher.put_file_content(self.content,
                                                     u"Test #005")
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif io.open(os.path.join(self.cache_base_path, data),
                     "rb").read() != self.content:
            self.fail("Local cache's content differ " "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Retrieve the file as a string.
        self.fake_content = "Fake content.\n"
        with io.open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file_content(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if data != self.fake_content:
            if data == self.content:
                self.fail("Did not use the cache even if it could.")
            else:
                self.fail("Content differ.")

    def test_big_file(self):
        """Put a ~10MB file into the storage (using a specially
        crafted file-like object).

        Then get it back.

        """
        rand_file = RandomFile(10000000)
        try:
            data = self.file_cacher.put_file_from_fobj(rand_file, u"Test #007")
        except Exception as error:
            self.fail("Error received: %r." % error)
            return
        if rand_file.dim != 0:
            self.fail("The input file wasn't read completely.")
        my_digest = rand_file.digest
        rand_file.close()

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif my_digest != data:
            self.fail("File received with wrong hash.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Get the ~100MB file from FileCacher.
        os.unlink(self.cache_path)
        hash_file = HashingFile()
        try:
            self.file_cacher.get_file_to_fobj(self.digest, hash_file)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return
        my_digest = hash_file.digest
        hash_file.close()

        try:
            if self.digest != my_digest:
                self.fail("Content differs.")
            elif not os.path.exists(self.cache_path):
                self.fail("File not stored in local cache.")
        finally:
            self.file_cacher.delete(self.digest)
Esempio n. 7
0
class TestFileCacher(unittest.TestCase):
    """Service that performs automatically some tests for the
    FileCacher service.

    """

    def setUp(self):
        self.file_cacher = FileCacher()
        #self.file_cacher = FileCacher(self, path="fs-storage")
        self.cache_base_path = self.file_cacher.file_dir
        self.cache_path = None
        self.content = None
        self.fake_content = None
        self.digest = None
        self.file_obj = None

    def tearDown(self):
        shutil.rmtree(self.cache_base_path, ignore_errors=True)

    def test_file_life(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a file-like object. FC should cache the content
        locally.

        Then retrieve it.

        Then check its size.

        Then get it back.

        Then delete it.

        """
        self.size = 100
        self.content = b"".join(chr(random.randint(0, 255))
                                for unused_i in xrange(self.size))

        data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
                                                   u"Test #000")

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif io.open(os.path.join(self.cache_base_path, data),
                     "rb").read() != self.content:
            self.fail("Local cache's content differ "
                      "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Retrieve the file.
        self.fake_content = "Fake content.\n"
        with io.open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.fake_content:
            if received == self.content:
                self.fail("Did not use the cache even if it could.")
            else:
                self.fail("Content differ.")

        # Check the size of the file.
        try:
            size = self.file_cacher.get_size(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if size != self.size:
            self.fail("The size is wrong: %d instead of %d" %
                      (size, self.size))

        # Get file from FileCacher.
        os.unlink(self.cache_path)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.content:
            self.fail("Content differ.")
        elif not os.path.exists(self.cache_path):
            self.fail("File not stored in local cache.")
        elif io.open(self.cache_path, "rb").read() != self.content:
            self.fail("Local cache's content differ " +
                      "from original file.")

        # Delete the file through FS and tries to get it again through
        # FC.
        try:
            self.file_cacher.delete(digest=self.digest)
        except Exception as error:
            self.fail("Error received: %s." % error)
            return

        else:
            with self.assertRaises(Exception):
                self.file_cacher.get_file(self.digest)

    def test_fetch_missing_file(self):
        """Get unexisting file from FileCacher.

        """
        with self.assertRaises(Exception):
            self.file_cacher.get_file(self.digest)

    def test_file_as_content(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a string. FC should cache the content locally.

        Then retrieve it as a string.

        """
        self.content = b"".join(chr(random.randint(0, 255))
                                for unused_i in xrange(100))

        try:
            data = self.file_cacher.put_file_content(self.content,
                                                     u"Test #005")
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif io.open(os.path.join(self.cache_base_path, data),
                     "rb").read() != self.content:
            self.fail("Local cache's content differ "
                      "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Retrieve the file as a string.
        self.fake_content = "Fake content.\n"
        with io.open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file_content(self.digest)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return

        if data != self.fake_content:
            if data == self.content:
                self.fail("Did not use the cache even if it could.")
            else:
                self.fail("Content differ.")

    def test_big_file(self):
        """Put a ~10MB file into the storage (using a specially
        crafted file-like object).

        Then get it back.

        """
        rand_file = RandomFile(10000000)
        try:
            data = self.file_cacher.put_file_from_fobj(rand_file, u"Test #007")
        except Exception as error:
            self.fail("Error received: %r." % error)
            return
        if rand_file.dim != 0:
            self.fail("The input file wasn't read completely.")
        my_digest = rand_file.digest
        rand_file.close()

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.fail("File not stored in local cache.")
        elif my_digest != data:
            self.fail("File received with wrong hash.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data

        # Get the ~100MB file from FileCacher.
        os.unlink(self.cache_path)
        hash_file = HashingFile()
        try:
            self.file_cacher.get_file_to_fobj(self.digest, hash_file)
        except Exception as error:
            self.fail("Error received: %r." % error)
            return
        my_digest = hash_file.digest
        hash_file.close()

        try:
            if self.digest != my_digest:
                self.fail("Content differs.")
            elif not os.path.exists(self.cache_path):
                self.fail("File not stored in local cache.")
        finally:
            self.file_cacher.delete(self.digest)
Esempio n. 8
0
class TestFileCacher(TestService):
    """Service that performs automatically some tests for the
    FileCacher service.

    """
    def __init__(self, shard):
        logger.initialize(ServiceCoord("TestFileCacher", shard))
        TestService.__init__(self, shard, custom_logger=logger)

        # Assume we store the cache in "./cache/fs-cache-TestFileCacher-0/"
        self.cache_base_path = os.path.join(config.cache_dir,
                                            "fs-cache-TestFileCacher-0")
        self.cache_path = None
        self.content = None
        self.fake_content = None
        self.digest = None
        self.file_obj = None
        self.file_cacher = FileCacher(self)
        #self.file_cacher = FileCacher(self, path="fs-storage")

    def prepare(self):
        """Initialization for the test code - make sure that the cache
        is empty before testing.

        """
        logger.info("Please delete directory %s before." %
                    self.cache_base_path)

### TEST 000 ###

    def test_000(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a file-like object. FC should cache the content
        locally.

        """
        self.size = 100
        self.content = "".join(
            chr(random.randint(0, 255)) for unused_i in xrange(self.size))

        logger.info("  I am sending the ~100B binary file to FileCacher")
        try:
            data = self.file_cacher.put_file_from_fobj(StringIO(self.content),
                                                       u"Test #000")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif open(os.path.join(self.cache_base_path, data), "rb").read() != \
                self.content:
            self.test_end(
                False, "Local cache's content differ "
                "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 001 ###

    def test_001(self):
        """Retrieve the file.

        """
        logger.info("  I am retrieving the ~100B binary file from FileCacher")
        self.fake_content = "Fake content.\n"
        with open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.fake_content:
            if received == self.content:
                self.test_end(False, "Did not use the cache even if it could.")
            else:
                self.test_end(False, "Content differ.")
        else:
            self.test_end(True, "Data object received correctly.")

### TEST 002 ###

    def test_002(self):
        """Check the size of the file.

        """
        logger.info("  I am checking the size of the ~100B binary file")
        try:
            size = self.file_cacher.get_size(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if size == self.size:
            self.test_end(True, "The size is correct.")
        else:
            self.test_end(
                False,
                "The size is wrong: %d instead of %d" % (size, self.size))

### TEST 003 ###

    def test_003(self):
        """Get file from FileCacher.

        """
        logger.info("  I am retrieving the file from FileCacher " +
                    "after deleting the cache.")
        os.unlink(self.cache_path)
        try:
            data = self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        received = data.read()
        data.close()
        if received != self.content:
            self.test_end(False, "Content differ.")
        elif not os.path.exists(self.cache_path):
            self.test_end(False, "File not stored in local cache.")
        elif open(self.cache_path).read() != self.content:
            self.test_end(
                False, "Local cache's content differ " + "from original file.")
        else:
            self.test_end(True,
                          "Content object received " + "and cached correctly.")

### TEST 004 ###

    def test_004(self):
        """Delete the file through FS and tries to get it again through FC.

        """
        logger.info("  I am deleting the file from FileCacher.")
        try:
            self.file_cacher.delete(digest=self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %s." % error)
            return

        else:
            logger.info("  File deleted correctly.")
            logger.info("  I am getting the file from FileCacher.")
            try:
                self.file_cacher.get_file(self.digest)
            except Exception as error:
                self.test_end(True, "Correctly received an error: %r." % error)
            else:
                self.test_end(False, "Did not receive error.")

### TEST 005 ###

    def test_005(self):
        """Get unexisting file from FileCacher.

        """
        logger.info("  I am retrieving an unexisting file from FileCacher.")
        try:
            self.file_cacher.get_file(self.digest)
        except Exception as error:
            self.test_end(True, "Correctly received an error: %r." % error)
        else:
            self.test_end(False, "Did not receive error.")

### TEST 006 ###

    def test_006(self):
        """Send a ~100B random binary file to the storage through
        FileCacher as a string. FC should cache the content locally.

        """
        self.content = "".join(
            chr(random.randint(0, 255)) for unused_i in xrange(100))

        logger.info("  I am sending the ~100B binary file to FileCacher")
        try:
            data = self.file_cacher.put_file_content(self.content,
                                                     u"Test #005")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif open(os.path.join(self.cache_base_path, data),
                  "rb").read() != self.content:
            self.test_end(
                False, "Local cache's content differ "
                "from original file.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")

### TEST 007 ###

    def test_007(self):
        """Retrieve the file as a string.

        """
        logger.info("  I am retrieving the ~100B binary file from FileCacher "
                    "using get_file_to_string()")
        self.fake_content = "Fake content.\n"
        with open(self.cache_path, "wb") as cached_file:
            cached_file.write(self.fake_content)
        try:
            data = self.file_cacher.get_file_content(self.digest)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return

        if data != self.fake_content:
            if data == self.content:
                self.test_end(False, "Did not use the cache even if it could.")
            else:
                self.test_end(False, "Content differ.")
        else:
            self.test_end(True, "Data received correctly.")

### TEST 008 ###

    def test_008(self):
        """Put a ~100MB file into the storage (using a specially
        crafted file-like object).

        """
        logger.info("  I am sending the ~100MB binary file to FileCacher")
        rand_file = RandomFile(100000000)
        try:
            data = self.file_cacher.put_file_from_fobj(rand_file, u"Test #007")
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return
        if rand_file.dim != 0:
            self.test_end(False, "The input file wasn't read completely.")
        my_digest = rand_file.digest
        rand_file.close()

        if not os.path.exists(os.path.join(self.cache_base_path, data)):
            self.test_end(False, "File not stored in local cache.")
        elif my_digest != data:
            self.test_end(False, "File received with wrong hash.")
        else:
            self.cache_path = os.path.join(self.cache_base_path, data)
            self.digest = data
            self.test_end(True, "Data sent and cached without error.")


### TEST 009 ###

    def test_009(self):
        """Get the ~100MB file from FileCacher.

        """
        logger.info("  I am retrieving the ~100MB file from FileCacher " +
                    "after deleting the cache.")
        os.unlink(self.cache_path)
        hash_file = HashingFile()
        try:
            self.file_cacher.get_file_to_fobj(self.digest, hash_file)
        except Exception as error:
            self.test_end(False, "Error received: %r." % error)
            return
        my_digest = hash_file.digest
        hash_file.close()

        try:
            if self.digest != my_digest:
                self.test_end(False, "Content differs.")
            elif not os.path.exists(self.cache_path):
                self.test_end(False, "File not stored in local cache.")
            else:
                self.test_end(
                    True, "Content object received " + "and cached correctly.")
        finally:
            self.file_cacher.delete(self.digest)
Esempio n. 9
0
class SpoolExporter:
    """This service creates a tree structure "similar" to the one used
    in Italian IOI repository for storing the results of a contest.

    """
    def __init__(self, contest_id, spool_dir):
        self.contest_id = contest_id
        self.spool_dir = spool_dir
        self.upload_dir = os.path.join(self.spool_dir, "upload")
        self.contest = None
        self.submissions = None

        self.file_cacher = FileCacher()

    def run(self):
        """Interface to make the class do its job."""
        return self.do_export()

    def do_export(self):
        """Run the actual export code.

        """
        logger.operation = "exporting contest %s" % self.contest_id
        logger.info("Starting export.")

        logger.info("Creating dir structure.")
        try:
            os.mkdir(self.spool_dir)
        except OSError:
            logger.critical("The specified directory already exists, "
                            "I won't overwrite it.")
            return False
        os.mkdir(self.upload_dir)

        with SessionGen(commit=False) as session:
            self.contest = Contest.get_from_id(self.contest_id, session)
            self.submissions = sorted(
                (submission
                 for submission in self.contest.get_submissions()
                 if not submission.user.hidden),
                key=lambda submission: submission.timestamp)

            # Creating users' directory.
            for user in self.contest.users:
                if not user.hidden:
                    os.mkdir(os.path.join(self.upload_dir, user.username))

            try:
                self.export_submissions()
                self.export_ranking()
            except Exception:
                logger.critical("Generic error.", exc_info=True)
                return False

        logger.info("Export finished.")
        logger.operation = ""

        return True

    def export_submissions(self):
        """Export submissions' source files.

        """
        logger.info("Exporting submissions.")

        queue_file = codecs.open(os.path.join(self.spool_dir, "queue"), "w",
                                 encoding="utf-8")
        for submission in sorted(self.submissions, key=lambda x: x.timestamp):
            logger.info("Exporting submission %s." % submission.id)
            username = submission.user.username
            task = submission.task.name
            timestamp = time.mktime(submission.timestamp.timetuple())

            # Get source files to the spool directory.
            submission_dir = os.path.join(
                self.upload_dir, username, "%s.%d.%s" %
                (task, timestamp, submission.language))
            os.mkdir(submission_dir)
            for filename, file_ in submission.files.iteritems():
                self.file_cacher.get_file(
                    file_.digest,
                    path=os.path.join(submission_dir, filename))
            last_submission_dir = os.path.join(
                self.upload_dir, username, "%s.%s" %
                (task, submission.language))
            try:
                os.unlink(last_submission_dir)
            except OSError:
                pass
            os.symlink(os.path.basename(submission_dir), last_submission_dir)
            print >> queue_file, "./upload/%s/%s.%d.%s" % \
                (username, task, timestamp, submission.language)

            # Write results file for the submission.
            active_dataset = submission.task.active_dataset
            result = submission.get_result(active_dataset)
            if result.evaluated():
                res_file = codecs.open(os.path.join(
                        self.spool_dir,
                        "%d.%s.%s.%s.res" % (timestamp, username,
                                             task, submission.language)),
                                       "w", encoding="utf-8")
                res2_file = codecs.open(os.path.join(
                        self.spool_dir,
                        "%s.%s.%s.res" % (username, task,
                                          submission.language)),
                                        "w", encoding="utf-8")
                total = 0.0
                for evaluation in result.evaluations:
                    outcome = float(evaluation.outcome)
                    total += outcome
                    line = "Executing on file with codename '%s' %s (%.4f)" % \
                        (evaluation.testcase.codename,
                         evaluation.text, outcome)
                    print >> res_file, line
                    print >> res2_file, line
                line = "Score: %.6f" % total
                print >> res_file, line
                print >> res2_file, line
                res_file.close()
                res2_file.close()

        print >> queue_file
        queue_file.close()

    def export_ranking(self):
        """Exports the ranking in csv and txt (human-readable) form.

        """
        logger.info("Exporting ranking.")

        # Create the structure to store the scores.
        scores = dict((user.username, 0.0)
                      for user in self.contest.users
                      if not user.hidden)
        task_scores = dict((task.id, dict((user.username, 0.0)
                                          for user in self.contest.users
                                          if not user.hidden))
                           for task in self.contest.tasks)
        last_scores = dict((task.id, dict((user.username, 0.0)
                                          for user in self.contest.users
                                          if not user.hidden))
                           for task in self.contest.tasks)

        # Make the score type compute the scores.
        scorers = {}
        for task in self.contest.tasks:
            scorers[task.id] = get_score_type(dataset=task.active_dataset)

        for submission in self.submissions:
            active_dataset = submission.task.active_dataset
            result = submission.get_result(active_dataset)
            scorers[submission.task_id].add_submission(
                submission.id, submission.timestamp,
                submission.user.username,
                result.evaluated(),
                dict((ev.codename,
                      {"outcome": ev.outcome,
                       "text": ev.text,
                       "time": ev.execution_time,
                       "memory": ev.memory_used})
                     for ev in result.evaluations),
                submission.tokened())

        # Put together all the scores.
        for submission in self.submissions:
            task_id = submission.task_id
            username = submission.user.username
            details = scorers[task_id].pool[submission.id]
            last_scores[task_id][username] = details["score"]
            if details["tokened"]:
                task_scores[task_id][username] = max(
                    task_scores[task_id][username],
                    details["score"])

        # Merge tokened and last submissions.
        for username in scores:
            for task_id in task_scores:
                task_scores[task_id][username] = max(
                    task_scores[task_id][username],
                    last_scores[task_id][username])
            #print username, [task_scores[task_id][username]
            #                        for task_id in task_scores]
            scores[username] = sum(task_scores[task_id][username]
                                   for task_id in task_scores)

        sorted_usernames = sorted(scores.keys(),
                                  key=lambda username: (scores[username],
                                                        username),
                                  reverse=True)
        sorted_tasks = sorted(self.contest.tasks,
                              key=lambda task: task.num)

        ranking_file = codecs.open(
            os.path.join(self.spool_dir, "classifica.txt"),
            "w", encoding="utf-8")
        ranking_csv = codecs.open(
            os.path.join(self.spool_dir, "classifica.csv"),
            "w", encoding="utf-8")

        # Write rankings' header.
        n_tasks = len(sorted_tasks)
        print >> ranking_file, "Classifica finale del contest `%s'" % \
            self.contest.description
        points_line = " %10s" * n_tasks
        csv_points_line = ",%s" * n_tasks
        print >> ranking_file, ("%20s %10s" % ("Utente", "Totale")) + \
            (points_line % tuple([t.name for t in sorted_tasks]))
        print >> ranking_csv, ("%s,%s" % ("utente", "totale")) + \
            (csv_points_line % tuple([t.name for t in sorted_tasks]))

        # Write rankings' content.
        points_line = " %10.3f" * n_tasks
        csv_points_line = ",%.6f" * n_tasks
        for username in sorted_usernames:
            user_scores = [task_scores[task.id][username]
                           for task in sorted_tasks]
            print >> ranking_file, ("%20s %10.3f" % (
                    username,
                    scores[username])) + \
                    (points_line % tuple(user_scores))
            print >> ranking_csv, ("%s,%.6f" % (
                    username,
                    scores[username])) + \
                    (csv_points_line % tuple(user_scores))

        ranking_file.close()
        ranking_csv.close()
Esempio n. 10
0
    def get(self, contest_id, format="online"):
        self.contest = self.sql_session.query(Contest)\
            .filter(Contest.id == contest_id)\
            .options(joinedload('participations'))\
            .options(joinedload('participations.submissions'))\
            .options(joinedload('participations.submissions.token'))\
            .options(joinedload('participations.submissions.results'))\
            .first()

        self.set_header("Content-Type", "application/zip")
        self.set_header("Content-Disposition",
                        "attachment; filename=\"users_data.zip\"")

        shutil.rmtree(BASE_PATH, ignore_errors=True)

        fc = FileCacher()

        for p in self.contest.participations:
            path = "%s/%s/" % (BASE_PATH, p.user.username)
            os.makedirs(path)

            # Identify all the files submitted by the user for each task
            task_sr = defaultdict(list)
            for sub in p.submissions:
                sub_sr = sub.get_result(sub.task.active_dataset)

                file = sub.files.items()[0][1]
                filename = file.filename
                if sub.language is not None:
                    filename = filename.replace(
                        ".%l",
                        get_language(sub.language).source_extension)
                if sub_sr.score:
                    task_sr[sub.task_id].append(
                        (sub_sr.score, sub.timestamp, (filename, file.digest)))

            # Select the last file submitted with maximum score for each task
            task_last_best = [
                sorted(task_sr[tid], key=lambda x: (x[0], x[1]),
                       reverse=True)[0][2] for tid in task_sr
            ]

            # Write the selected file for each task
            for filename, digest in task_last_best:
                file_content = fc.get_file(digest).read()
                with open("%s%s" % (path, filename), "w") as f:
                    f.write(file_content.decode("utf8"))

            # Find the users' scores for each task
            scores = []
            for task in self.contest.tasks:
                t_score, _ = task_score(p, task)
                t_score = round(t_score, task.score_precision)
                scores.append((task.id, t_score))

            # Find the users' last progress for each task
            task_last_progress = {
                tid: sorted(task_sr[tid], key=lambda x: (-x[0], x[1]))[0][1]
                for tid in task_sr
            }

            # Write a csv with some information on the participation
            info_csv = [["Username", "User"]]
            for task in self.contest.tasks:
                info_csv[0].append("%s (score)" % task.name)
                info_csv[0].append("%s (last progress)" % task.name)
            info_csv[0].append("Last progress")
            full_name = "%s %s" % (p.user.first_name, p.user.last_name)
            info_csv.append(
                [p.user.username.encode('utf-8'),
                 full_name.encode('utf-8')])
            for tid, t_score in scores:
                info_csv[1].append(str(t_score))
                if tid in task_last_progress:
                    last_progress = \
                        task_last_progress[tid].strftime('%H:%M:%S')
                else:
                    last_progress = ""
                info_csv[1].append(last_progress)
            if task_last_progress:
                last_progress_overall_ts = max(task_last_progress.values())
                last_progress_overall = \
                    last_progress_overall_ts.strftime('%H:%M:%S')
            else:
                last_progress_overall = ""
            info_csv[1].append(last_progress_overall)
            with open("%sinfo.csv" % path, "wb") as f_out:
                csv_writer = csv.writer(f_out)
                for row in info_csv:
                    csv_writer.writerow(row)

        # Create a downloadable archive will all this data
        shutil.make_archive("users_data", "zip", ".", "users_data")

        output = open("users_data.zip", "rb", buffering=0)

        self.finish(output.read())
Esempio n. 11
0
    def get(self, contest_id, format="online"):
        self.contest = self.sql_session.query(Contest)\
            .filter(Contest.id == contest_id)\
            .options(joinedload('participations'))\
            .options(joinedload('participations.submissions'))\
            .options(joinedload('participations.submissions.token'))\
            .options(joinedload('participations.submissions.results'))\
            .first()

        self.set_header("Content-Type", "application/zip")
        self.set_header("Content-Disposition",
                        "attachment; filename=\"users_data.zip\"")

        shutil.rmtree(BASE_PATH, ignore_errors=True)

        fc = FileCacher()

        for p in self.contest.participations:
            path = "%s/%s/" % (BASE_PATH, p.user.username)
            os.makedirs(path)

            # Identify all the files submitted by the user for each task
            task_sr = defaultdict(list)
            for sub in p.submissions:
                sub_sr = sub.get_result(sub.task.active_dataset)

                file = sub.files.items()[0][1]
                filename = file.filename
                if sub.language is not None:
                    filename = filename.replace(
                        ".%l", get_language(sub.language).source_extension)
                if sub_sr.score:
                    task_sr[sub.task_id].append(
                        (sub_sr.score, sub.timestamp, (filename, file.digest)))

            # Select the last file submitted with maximum score for each task
            task_last_best = [
                sorted(task_sr[tid], key=lambda x: (x[0], x[1]),
                       reverse=True)[0][2]
                for tid in task_sr
            ]

            # Write the selected file for each task
            for filename, digest in task_last_best:
                file_content = fc.get_file(digest).read()
                with open("%s%s" % (path, filename), "w") as f:
                    f.write(file_content.decode("utf8"))

            # Find the users' scores for each task
            scores = []
            for task in self.contest.tasks:
                t_score, _ = task_score(p, task)
                t_score = round(t_score, task.score_precision)
                scores.append((task.id, t_score))

            # Find the users' last progress for each task
            task_last_progress = {
                tid:
                sorted(task_sr[tid], key=lambda x: (-x[0], x[1]))[0][1]
                for tid in task_sr
            }

            # Write a csv with some information on the participation
            info_csv = [["Username", "User"]]
            for task in self.contest.tasks:
                info_csv[0].append("%s (score)" % task.name)
                info_csv[0].append("%s (last progress)" % task.name)
            info_csv[0].append("Last progress")
            full_name = "%s %s" % (p.user.first_name, p.user.last_name)
            info_csv.append([p.user.username.encode('utf-8'),
                             full_name.encode('utf-8')])
            for tid, t_score in scores:
                info_csv[1].append(str(t_score))
                if tid in task_last_progress:
                    last_progress = \
                        task_last_progress[tid].strftime('%H:%M:%S')
                else:
                    last_progress = ""
                info_csv[1].append(last_progress)
            if task_last_progress:
                last_progress_overall_ts = max(task_last_progress.values())
                last_progress_overall = \
                    last_progress_overall_ts.strftime('%H:%M:%S')
            else:
                last_progress_overall = ""
            info_csv[1].append(last_progress_overall)
            with open("%sinfo.csv" % path, "wb") as f_out:
                csv_writer = csv.writer(f_out)
                for row in info_csv:
                    csv_writer.writerow(row)

        # Create a downloadable archive will all this data
        shutil.make_archive("users_data", "zip", ".", "users_data")

        output = open("users_data.zip", "rb", buffering=0)

        self.finish(output.read())