Beispiel #1
0
 def __init__(self, path, original_filename=None):
     self.filename = original_filename or path
     if self.filename.lower().endswith('.tar.gz'):
         ext = '.tar.gz'
     else:
         ext = os.path.splitext(self.filename)[1]
     self.archive = Archive(path, ext)
     self.config = None
     self.problem = None
     self.main_problem_instance = None
     self.rootdir = None
     self.short_name = None
     self.env = None
     self.package = None
     self.time_limits = None
     self.memory_limits = None
     self.statement_memory_limit = None
     self.prog_archive = None
     self.extra_compilation_args = {
         'c': C_EXTRA_ARGS,
         'cpp': C_EXTRA_ARGS,
         'pas': PAS_EXTRA_ARGS,
     }
     self.use_make = settings.USE_SINOLPACK_MAKEFILES
     self.use_sandboxes = not settings.USE_UNSAFE_EXEC
     self.restrict_html = (settings.SINOLPACK_RESTRICT_HTML
                           and not settings.USE_SINOLPACK_MAKEFILES)
Beispiel #2
0
 def __init__(self, path, original_filename=None):
     self.filename = original_filename or path
     if self.filename.lower().endswith('.tar.gz'):
         ext = '.tar.gz'
     else:
         ext = os.path.splitext(self.filename)[1]
     self.archive = Archive(path, ext)
     self.config = None
     self.problem = None
     self.rootdir = None
     self.short_name = self._find_main_folder()
Beispiel #3
0
def download_problem_package_file_view(request, package_id, file_name):
    package = _get_package(request, package_id, 'contests.contest_admin')
    archive = Archive(package.package_file)
    dir_path = tempfile.mkdtemp(dir=tempfile.gettempdir())
    try:
        archive.extract(to_path=dir_path)
        filepath = os.path.join(dir_path, file_name)
        if os.path.isfile(filepath):
            return stream_file(File(open(filepath)),
                               os.path.basename(os.path.normpath(file_name)))
    finally:
        shutil.rmtree(dir_path)
Beispiel #4
0
def problem_site_package_download_file(request, problem):
    original_package = OriginalPackage.objects.get(problem=problem)
    problem = original_package.problem
    package = original_package.problem_package
    contest = problem.contest
    archive = Archive(package.package_file)
    file_names = archive.filenames()
    if request.method == 'POST':
        form = PackageFileReuploadForm(file_names, contest, request.POST)
        if form.is_valid():
            if 'upload_button' in request.POST:
                package_name = file_names[0].split(os.path.sep, 1)[0]
                package_archive, tmpdir = \
                    _prepare_changed_package(request, form, archive, package_name)
                if package_archive is not None:
                    package_file = File(open(package_archive, 'rb'),
                                        os.path.basename(package_archive))
                    original_filename = package_file.name
                    file_manager = uploaded_file_name(package_file)
                    source = UploadedPackageSource()
                    try:
                        source.process_package(request, file_manager, request.user,
                                           contest, existing_problem=problem,
                                           original_filename=original_filename,
                                           visibility=problem.visibility)
                    except Exception as e:
                        logger.error("Error processing package", exc_info=True,
                                     extra={'omit_sentry': True})
                        form._errors['__all__'] = form.error_class([smart_str(e)])
                    finally:
                        shutil.rmtree(tmpdir)
                    return source._redirect_response(request)
            elif 'download_button' in request.POST:
                file_name = request.POST.get('file_name', None)
                if file_name is None:
                    form.add_error('file_name', _('No file selected'))
                else:
                    return redirect('download_package_file',
                                    package_id=package.id,
                                    file_name=file_name)
    else:
        form = PackageFileReuploadForm(file_names, contest)
    return TemplateResponse(request,
                            'problems/manage-problem-package-files.html',
                            {'form': form,
                             'can_reupload':
                                 _problem_can_be_reuploaded(request, problem)})
Beispiel #5
0
    def _extract_makefiles(self):
        sinol_makefiles_tgz = os.path.join(os.path.dirname(__file__), 'files',
                                           'sinol-makefiles.tgz')
        Archive(sinol_makefiles_tgz).extract(to_path=self.rootdir)

        makefile_in = os.path.join(self.rootdir, 'makefile.in')
        if not os.path.exists(makefile_in):
            with open(makefile_in, 'w') as f:
                f.write('MODE=wer\n')
                f.write('ID=%s\n' % (self.short_name, ))
                f.write('SIG=xxxx000\n')
Beispiel #6
0
 def __init__(self, path, original_filename=None):
     self.filename = original_filename or path
     if self.filename.lower().endswith('.tar.gz'):
         ext = '.tar.gz'
     else:
         ext = os.path.splitext(self.filename)[1]
     self.archive = Archive(path, ext)
     self.config = None
     self.problem = None
     self.rootdir = None
     self.short_name = self._find_main_folder()
Beispiel #7
0
 def validate_zip(file):
     if file.name.upper().endswith(".ZIP"):
         archive = Archive(file, '.zip')
         if len(archive.filenames()) != 1:
             raise ValidationError(
                 _("Archive should have only 1 file inside.")
             )
         if (archive.extracted_size()
                 > self.get_testrun_unzipped_input_limit()):
             raise ValidationError(
                 _("Uncompressed archive is too big to be"
                   " considered safe.")
             )
         # Extraction is safe, see:
         # https://docs.python.org/2/library/zipfile.html#zipfile.ZipFile.extract
         tmpdir = tempfile.mkdtemp()
         try:
             # The simplest way to check validity
             # All other are kinda lazy and don't check everything
             archive.extract(tmpdir)
         # Zipfile has some undocumented exception types, we shouldn't
         # rely on those, thus we better catch all
         except Exception:
             raise ValidationError(_("Archive seems to be corrupted."))
         finally:
             shutil.rmtree(tmpdir)
Beispiel #8
0
    def test_archive_submission(self):
        self.assertTrue(self.client.login(username='******'))
        kwargs = {'contest_id': Contest.objects.get().id}
        url = reverse('testrun_submit', kwargs=kwargs)

        base_dir = os.path.join(os.path.dirname(__file__), 'files')

        testruns_before = Submission.objects.filter(kind='TESTRUN').count()
        for bad_archive in ['over_limit.zip', 'two_files.zip', 'evil.zip']:
            filename = os.path.join(base_dir, bad_archive)
            with open(filename, 'rb') as input_file:
                data = {
                    'problem_instance_id': ProblemInstance.objects.get().id,
                    'file': ContentFile(b'a', name='x.cpp'),
                    'input': input_file
                }
                response = self.client.post(url, data, follow=True)
                self.assertEqual(response.status_code, 200)
                self.assertEqual(
                    Submission.objects.filter(kind='TESTRUN').count(),
                    testruns_before)

        with open(os.path.join(base_dir, "single_file.zip"),
                  'rb') as input_file:
            data = {
                'problem_instance_id': ProblemInstance.objects.get().id,
                'file': ContentFile(b'a', name='x.cpp'),
                'input': input_file,
            }
            response = self.client.post(url, data, follow=True)
            self.assertEqual(response.status_code, 200)
            self.assertEqual(
                Submission.objects.filter(kind='TESTRUN').count(),
                testruns_before + 1)
            submission = TestRunProgramSubmission.objects.latest('pk')
            self.assertEqual(submission.kind, 'TESTRUN')
            self.assertEqual(submission.source_file.read().strip(), b'a')
            archive = Archive(submission.input_file, '.zip')
            self.assertEqual(len(archive.filenames()), 1)
Beispiel #9
0
    def test_archive_submission(self):
        self.client.login(username='******')
        kwargs = {'contest_id': Contest.objects.get().id}
        url = reverse('testrun_submit', kwargs=kwargs)

        base_dir = os.path.join(os.path.dirname(__file__), 'files')

        testruns_before = Submission.objects.filter(kind='TESTRUN').count()
        for bad_archive in ['over_limit.zip', 'two_files.zip', 'evil.zip']:
            filename = os.path.join(base_dir, bad_archive)
            with open(filename) as input_file:
                data = {
                    'problem_instance_id': ProblemInstance.objects.get().id,
                    'file': ContentFile('a', name='x.cpp'),
                    'input': input_file
                }
                response = self.client.post(url, data, follow=True)
                self.assertEqual(response.status_code, 200)
                self.assertEqual(
                    Submission.objects.filter(kind='TESTRUN').count(),
                    testruns_before)

        with open(os.path.join(base_dir, "single_file.zip")) as input_file:
            data = {
                'problem_instance_id': ProblemInstance.objects.get().id,
                'file': ContentFile('a', name='x.cpp'),
                'input': input_file,
            }
            response = self.client.post(url, data, follow=True)
            self.assertEqual(response.status_code, 200)
            self.assertEqual(
                Submission.objects.filter(kind='TESTRUN').count(),
                testruns_before + 1)
            submission = \
                TestRunProgramSubmission.objects.get(pk=(testruns_before + 1))
            self.assertEqual(submission.kind, 'TESTRUN')
            self.assertEqual(submission.source_file.read().strip(), 'a')
            archive = Archive(submission.input_file, '.zip')
            self.assertEqual(len(archive.filenames()), 1)
Beispiel #10
0
 def __init__(self, path, original_filename=None):
     self.filename = original_filename or path
     if self.filename.lower().endswith('.tar.gz'):
         ext = '.tar.gz'
     else:
         ext = os.path.splitext(self.filename)[1]
     self.archive = Archive(path, ext)
     self.config = None
     self.problem = None
     self.main_problem_instance = None
     self.rootdir = None
     self.short_name = None
     self.env = None
     self.package = None
     self.time_limits = None
     self.memory_limits = None
     self.statement_memory_limit = None
     self.prog_archive = None
     self.extra_compilation_args = \
             {'c': C_EXTRA_ARGS, 'cpp': C_EXTRA_ARGS, 'pas': PAS_EXTRA_ARGS}
     self.use_make = settings.USE_SINOLPACK_MAKEFILES
     self.use_sandboxes = not settings.USE_UNSAFE_EXEC
Beispiel #11
0
class SinolPackage(object):
    controller_name = 'oioioi.sinolpack.controllers.SinolProblemController'
    package_backend_name = 'oioioi.sinolpack.package.SinolPackageBackend'

    def __init__(self, path, original_filename=None):
        self.filename = original_filename or path
        if self.filename.lower().endswith('.tar.gz'):
            ext = '.tar.gz'
        else:
            ext = os.path.splitext(self.filename)[1]
        self.archive = Archive(path, ext)
        self.config = None
        self.problem = None
        self.main_problem_instance = None
        self.rootdir = None
        self.short_name = None
        self.env = None
        self.package = None
        self.time_limits = None
        self.memory_limits = None
        self.statement_memory_limit = None
        self.prog_archive = None
        self.extra_compilation_args = \
                {'c': C_EXTRA_ARGS, 'cpp': C_EXTRA_ARGS, 'pas': PAS_EXTRA_ARGS}
        self.use_make = settings.USE_SINOLPACK_MAKEFILES
        self.use_sandboxes = not settings.USE_UNSAFE_EXEC

    def identify(self):
        return self._find_main_dir() is not None

    def get_short_name(self):
        return self._find_main_dir()

    def _find_main_dir(self):
        """Looks for the directory which contains at least the in/ and out/
           subdirectories. Only one such directory should be found.
           Otherwise None is returned.

           Note that depending on the archive type, there may be or
           may not be entries for the dirs themselves in
           self.archive.filenames()
        """

        files = map(os.path.normcase, self.archive.filenames())
        files = map(os.path.normpath, files)
        toplevel_dirs = set(f.split(os.sep)[0] for f in files)
        toplevel_dirs = filter(slug_re.match, toplevel_dirs)
        problem_dirs = []
        for dir in toplevel_dirs:
            for required_subdir in ('in', 'out'):
                if all(
                        f.split(os.sep)[:2] != [dir, required_subdir]
                        for f in files):
                    break
            else:
                problem_dirs.append(dir)
        if len(problem_dirs) == 1:
            return problem_dirs[0]

        return None

    def _save_to_field(self, field, file):
        basename = os.path.basename(filetracker_to_django_file(file).name)
        filename = os.path.join(self.rootdir, basename)
        get_client().get_file(file, filename)
        field.save(os.path.basename(filename), File(open(filename, 'rb')))
        get_client().delete_file(file)

    def _find_and_compile(self,
                          suffix,
                          command=None,
                          cwd=None,
                          log_on_failure=True,
                          out_name=None):
        if not command:
            command = suffix
        if self.use_make:
            renv = self._compile_using_make(command, cwd, suffix)
        else:
            renv = self._compile_matching_extension(command, out_name, suffix)

        if not renv and log_on_failure:
            logger.info("%s: no %s in package", self.filename, command)
        return renv

    def _compile_using_make(self, command, cwd, suffix):
        renv = None
        if glob.glob(
                os.path.join(self.rootdir, 'prog',
                             '%s%s.*' % (self.short_name, suffix))):
            logger.info("%s: %s", self.filename, command)
            renv = {}
            if not cwd:
                cwd = self.rootdir
            renv['stdout'] = execute('make %s' % (command), cwd=cwd)
            logger.info(renv['stdout'])
        return renv

    def _compile_matching_extension(self, command, out_name, suffix):
        renv = None
        name = self.short_name + suffix
        choices = (getattr(settings, 'SUBMITTABLE_EXTENSIONS', {})). \
            values()
        lang_exts = []
        for ch in choices:
            lang_exts.extend(ch)
        for ext in lang_exts:
            src = os.path.join(self.rootdir, 'prog', '%s.%s' % (name, ext))
            if os.path.isfile(src):
                renv = self._compile(src, name, ext, out_name)
                logger.info("%s: %s", self.filename, command)
                break
        return renv

    def _compile(self, filename, prog_name, ext, out_name=None):
        client = get_client()
        source_name = '%s.%s' % (prog_name, ext)
        ft_source_name = client.put_file(
            _make_filename_in_job_dir(self.env, source_name), filename)

        if not out_name:
            out_name = _make_filename_in_job_dir(self.env, '%s.e' % prog_name)

        new_env = self._run_compilation_job(ext, ft_source_name, out_name)
        client.delete_file(ft_source_name)

        self._ensure_compilation_success(filename, new_env)

        # TODO Remeber about 'exec_info' when Java support is introduced.
        new_env['compiled_file'] = new_env['out_file']
        return new_env

    def _run_compilation_job(self, ext, ft_source_name, out_name):
        compilation_job = self.env.copy()
        compilation_job['job_type'] = 'compile'
        compilation_job['task_priority'] = TASK_PRIORITY
        compilation_job['source_file'] = ft_source_name
        compilation_job['out_file'] = out_name
        lang = ext
        compilation_job['language'] = lang
        if self.use_sandboxes:
            prefix = 'default'
        else:
            prefix = 'system'
        compilation_job['compiler'] = prefix + '-' + lang
        if not self.use_make and self.prog_archive:
            compilation_job['additional_archive'] = self.prog_archive
        add_extra_files(compilation_job,
                        self.problem,
                        additional_args=self.extra_compilation_args)
        new_env = run_sioworkers_job(compilation_job)
        return new_env

    def _ensure_compilation_success(self, filename, new_env):
        compilation_message = new_env.get('compiler_output', '')
        compilation_result = new_env.get('result_code', 'CE')
        if compilation_result != 'OK':
            logger.warning("%s: compilation of file %s failed with code %s",
                           self.filename, filename, compilation_result)
            logger.warning("%s: compiler output: %r", self.filename,
                           compilation_message)

            raise ProblemPackageError(
                _("Compilation of file %(filename)s "
                  "failed. Compiler output: "
                  "%(output)s") % {
                      'filename': filename,
                      'output': compilation_message
                  })

    def _make_ins(self, re_string):
        env = self._find_and_compile('ingen')
        if env and not self.use_make:
            env['job_type'] = 'ingen'
            env['task_priority'] = TASK_PRIORITY
            env['exe_file'] = env['compiled_file']
            env['re_string'] = re_string
            env['use_sandboxes'] = self.use_sandboxes
            env['collected_files_path'] = \
                _make_filename_in_job_dir(self.env, 'in')

            renv = run_sioworkers_job(env)
            get_client().delete_file(env['compiled_file'])
            return renv['collected_files']

        return {}

    def unpack(self, env, package):
        self.short_name = self.get_short_name()
        self.env = env
        self.package = package

        self._create_problem_or_reuse_if_exists(self.package.problem)
        return self._extract_and_process_package()

    def _create_problem_or_reuse_if_exists(self, existing_problem):
        if existing_problem:
            self.problem = existing_problem
            self._ensure_short_name_equality_with(existing_problem)
        else:
            self.problem = self._create_problem_instance()
            problem_site = ProblemSite(problem=self.problem,
                                       url_key=generate_key())
            problem_site.save()
            self.problem.problem_site = problem_site

        self.main_problem_instance = self.problem.main_problem_instance
        self.problem.package_backend_name = self.package_backend_name
        self.problem.save()

    def _ensure_short_name_equality_with(self, existing_problem):
        if existing_problem.short_name != self.short_name:
            raise ProblemPackageError(
                _("Tried to replace problem "
                  "'%(oldname)s' with '%(newname)s'. For safety, changing "
                  "problem short name is not possible.") %
                dict(oldname=existing_problem.short_name,
                     newname=self.short_name))

    def _create_problem_instance(self):
        author_username = self.env.get('author')
        if author_username:
            author = User.objects.get(username=author_username)
        else:
            author = None

        return Problem.create(name=self.short_name,
                              short_name=self.short_name,
                              controller_name=self.controller_name,
                              contest=self.package.contest,
                              is_public=(author is None),
                              author=author)

    def _extract_and_process_package(self):
        tmpdir = tempfile.mkdtemp()
        logger.info("%s: tmpdir is %s", self.filename, tmpdir)
        try:
            self.archive.extract(to_path=tmpdir)
            self.rootdir = os.path.join(tmpdir, self.short_name)
            self._process_package()

            return self.problem
        finally:
            shutil.rmtree(tmpdir)
            if self.prog_archive:
                get_client().delete_file(self.prog_archive)

    def _process_package(self):
        self._process_config_yml()
        self._detect_full_name()
        self._detect_library()
        self._process_extra_files()
        if self.use_make:
            self._extract_makefiles()
        else:
            self._save_prog_dir()
        self._process_statements()
        self._generate_tests()
        self._process_checkers()
        self._process_model_solutions()
        self._process_attachments()
        self._save_original_package()

    def _process_config_yml(self):
        """Parses config file from problem dir, saves its content to
           the current instance.
        """
        config_file = os.path.join(self.rootdir, 'config.yml')
        instance, created = \
                ExtraConfig.objects.get_or_create(problem=self.problem)
        if os.path.isfile(config_file):
            instance.config = open(config_file, 'r').read()
        else:
            instance.config = ''
        instance.save()
        self.config = instance.parsed_config

    def _detect_full_name(self):
        """Sets the problem's full name from the ``config.yml`` (key ``title``)
           or from the ``title`` tag in the LateX source file.

           Example of how the ``title`` tag may look like:
           \title{A problem}
        """
        if 'title' in self.config:
            self.problem.name = self.config['title']
            self.problem.save()
            return

        source = os.path.join(self.rootdir, 'doc', self.short_name + 'zad.tex')
        if os.path.isfile(source):
            text = open(source, 'r').read()
            r = re.search(r'^[^%]*\\title{(.+)}', text, re.MULTILINE)
            if r is not None:
                self.problem.name = _decode(r.group(1), text)
                self.problem.save()

    def _detect_library(self):
        """Finds if the problem has a library.

           Tries to read a library name (filename library should be given
           during compilation) from the ``config.yml`` (key ``library``).

           If there is no such key, assumes that a library is not needed.
        """
        if 'library' in self.config and self.config['library']:
            instance, _created = LibraryProblemData.objects \
                .get_or_create(problem=self.problem)
            instance.libname = self.config['library']
            instance.save()
            logger.info("Library %s needed for this problem.",
                        instance.libname)
        else:
            LibraryProblemData.objects.filter(problem=self.problem).delete()

    def _process_extra_files(self):
        ExtraFile.objects.filter(problem=self.problem).delete()
        files = list(self.config.get('extra_compilation_files', ()))
        not_found = self._find_and_save_files(files)
        if not_found:
            raise ProblemPackageError(
                _("Expected extra files %r not found in prog/") % (not_found))

    def _find_and_save_files(self, files):
        """Saves files in the database.
           :param files: List of expected files.
           :return: List of files that were not found.
        """
        not_found = []
        for filename in files:
            fn = os.path.join(self.rootdir, 'prog', filename)
            if not os.path.isfile(fn):
                not_found.append(filename)
            else:
                instance = ExtraFile(problem=self.problem, name=filename)
                instance.file.save(filename, File(open(fn, 'rb')))
        return not_found

    def _extract_makefiles(self):
        sinol_makefiles_tgz = os.path.join(os.path.dirname(__file__), 'files',
                                           'sinol-makefiles.tgz')
        Archive(sinol_makefiles_tgz).extract(to_path=self.rootdir)

        makefile_in = os.path.join(self.rootdir, 'makefile.in')
        if not os.path.exists(makefile_in):
            with open(makefile_in, 'w') as f:
                f.write('MODE=wer\n')
                f.write('ID=%s\n' % (self.short_name, ))
                f.write('SIG=xxxx000\n')

    def _save_prog_dir(self):
        """Creates archive with programs directory.
        """
        prog_dir = os.path.join(self.rootdir, 'prog')
        if not os.path.isdir(prog_dir):
            return
        archive_name = 'compilation-dir-archive'
        archive = shutil.make_archive(os.path.join(self.rootdir, archive_name),
                                      format="zip",
                                      root_dir=prog_dir)
        self.prog_archive = get_client().put_file(
            _make_filename_in_job_dir(self.env, archive), archive)

    def _process_statements(self):
        """Creates problem statement from html or pdf source.
        """
        docdir = os.path.join(self.rootdir, 'doc')
        if not os.path.isdir(docdir):
            logger.warning("%s: no docdir", self.filename)
            return

        # pylint: disable=maybe-no-member
        self.problem.statements.all().delete()

        htmlzipfile = os.path.join(docdir, self.short_name + 'zad.html.zip')
        if os.path.isfile(htmlzipfile):
            self._force_index_encoding(htmlzipfile)
            statement = ProblemStatement(problem=self.problem)
            statement.content.save(self.short_name + '.html.zip',
                                   File(open(htmlzipfile, 'rb')))

        pdffile = os.path.join(docdir, self.short_name + 'zad.pdf')

        if os.path.isfile(pdffile):
            statement = ProblemStatement(problem=self.problem)
            statement.content.save(self.short_name + '.pdf',
                                   File(open(pdffile, 'rb')))
        else:
            if self.use_make:
                self._compile_latex_docs(docdir)

            logger.warning("%s: no problem statement", self.filename)

    def _force_index_encoding(self, htmlzipfile):
        """Ensures index.html file is utf-8 encoded, if cannot apply
           this encoding raise
           :class:`~oioioi.problems.package.ProblemPackageError`.
        """
        with zipfile.ZipFile(htmlzipfile, 'r') as archive, \
                archive.open('index.html') as index:

            data = index.read()
            # First, we check if index.html is utf-8 encoded.
            # If it is - nothing to do.
            try:
                data.decode('utf8')
            # We accept iso-8859-2 encoded files, but django doesn't
            # so index.html has to be translated to utf-8.
            except UnicodeDecodeError:
                try:
                    data = data.decode('iso-8859-2').encode('utf8')
                except (UnicodeDecodeError, UnicodeEncodeError):
                    raise ProblemPackageError(
                        _("index.html has to be utf8 or iso8859-2 encoded"))
                # We have to remove index.html from the archive and
                # then add the translated file to archive because
                # zipfile module doesn't implement editing files
                # inside archive.
                _remove_from_zip(htmlzipfile, 'index.html')
                with zipfile.ZipFile(htmlzipfile, 'a') as new_archive:
                    new_archive.writestr('index.html', data)

    def _compile_latex_docs(self, docdir):
        # fancyheadings.sty looks like a rarely available LaTeX package...
        src_fancyheadings = os.path.join(os.path.dirname(__file__), 'files',
                                         'fancyheadings.sty')
        dst_fancyheadings = os.path.join(docdir, 'fancyheadings.sty')
        if not os.path.isfile(dst_fancyheadings):
            shutil.copyfile(src_fancyheadings, dst_fancyheadings)

        # Extract sinol.cls and oilogo.*, but do not overwrite if they
        # already exist (-k).
        sinol_cls_tgz = os.path.join(os.path.dirname(__file__), 'files',
                                     'sinol-cls.tgz')
        execute(['tar', '-C', docdir, '-kzxf', sinol_cls_tgz], cwd=docdir)

        try:
            execute('make', cwd=docdir)
        except ExecuteError:
            logger.warning("%s: failed to compile statement",
                           self.filename,
                           exc_info=True)

    def _generate_tests(self, total_score=100):
        self.time_limits = _stringify_keys(self.config.get('time_limits', {}))
        self.memory_limits = _stringify_keys(
            self.config.get('memory_limits', {}))
        self.statement_memory_limit = self._detect_statement_memory_limit()

        if self.use_make:
            self._find_and_compile('', command='outgen')

        created_tests, outs_to_make, scored_groups = \
            self._create_instances_for_tests()

        self._verify_time_limits(created_tests)
        self._verify_inputs(created_tests)
        self._generate_test_outputs(created_tests, outs_to_make)
        self._validate_tests(created_tests)
        self._delete_non_existing_tests(created_tests)

        if scored_groups:
            self._assign_scores(scored_groups, total_score)

    def _detect_statement_memory_limit(self):
        """Returns the memory limit in the problem statement, converted to
           KiB or ``None``.
        """
        source = os.path.join(self.rootdir, 'doc', self.short_name + 'zad.tex')
        if os.path.isfile(source):
            text = open(source, 'r').read()
            r = re.search(r'^[^%]*\\RAM{(\d+)}', text, re.MULTILINE)
            if r is not None:
                try:
                    value = int(r.group(1))
                    # In SIO1's tradition 66000 was used instead of 65536 etc.
                    # We're trying to cope with this legacy here.
                    return (value + (value + 31) / 32) * 1000
                except ValueError:
                    pass
        return None

    def _create_instances_for_tests(self):
        """Iterate through available test inputs.
           :return: Triple (created tests instances,
                            outs that have to be generated,
                            score groups (determined by test names))
        """
        indir = os.path.join(self.rootdir, 'in')
        outdir = os.path.join(self.rootdir, 'out')

        re_string = r'^(%s(([0-9]+)([a-z]?[a-z0-9]*))).in$' \
                    % (re.escape(self.short_name))
        names_re = re.compile(re_string)

        collected_ins = self._make_ins(re_string)
        all_items = list(set(os.listdir(indir)) | set(collected_ins.keys()))

        created_tests = []
        outs_to_make = []
        scored_groups = set()

        for order, test in enumerate(sorted(all_items, key=naturalsort_key)):
            instance = self._process_test(test, order, names_re, indir, outdir,
                                          collected_ins, scored_groups,
                                          outs_to_make)
            if instance:
                created_tests.append(instance)

        return created_tests, outs_to_make, scored_groups

    def _verify_time_limits(self, tests):
        """:raises: :class:`~oioioi.problems.package.ProblemPackageError`
           if sum of tests time limits exceeds
        """
        time_limit_sum = 0
        for test in tests:
            time_limit_sum += test.time_limit
        if time_limit_sum > settings.MAX_TEST_TIME_LIMIT_PER_PROBLEM:
            time_limit_sum_rounded = (time_limit_sum + 999) / 1000.0
            limit_seconds = settings.MAX_TEST_TIME_LIMIT_PER_PROBLEM / 1000.0

            raise ProblemPackageError(
                _("Sum of time limits for all tests is too big. It's %(sum)ds, "
                  "but it shouldn't exceed %(limit)ds.") % {
                      'sum': time_limit_sum_rounded,
                      'limit': limit_seconds
                  })

    def _verify_inputs(self, tests):
        """Check if correct solution exits with code 0 on all tests.
           :raises: :class:`~oioioi.problems.package.ProblemPackageError`
           otherwise.
        """
        env = self._find_and_compile('inwer')
        if env and not self.use_make:
            jobs = {}

            for test in tests:
                job = env.copy()
                job['job_type'] = 'inwer'
                job['task_priority'] = TASK_PRIORITY
                job['exe_file'] = env['compiled_file']
                job['in_file'] = django_to_filetracker_path(test.input_file)
                job['use_sandboxes'] = self.use_sandboxes
                jobs[test.name] = job

            jobs = run_sioworkers_jobs(jobs)
            get_client().delete_file(env['compiled_file'])

            for test_name, job in jobs.iteritems():
                if job['result_code'] != 'OK':
                    raise ProblemPackageError(
                        _("Inwer failed on test "
                          "%(test)s. Inwer output %(output)s") % {
                              'test': test_name,
                              'output': '\n'.join(job['stdout'])
                          })

            logger.info("%s: inwer success", self.filename)

    def _generate_test_outputs(self, tests, outs_to_make):
        if not self.use_make:
            outs = self._make_outs(outs_to_make)
            for instance in tests:
                if instance.name in outs:
                    generated_out = outs[instance.name]
                    self._save_to_field(instance.output_file,
                                        generated_out['out_file'])

    def _validate_tests(self, created_tests):
        """Check if all tests have output files and that
           test instances are correct.
           :raises: :class:`~oioioi.problems.package.ProblemPackageError`
        """
        for instance in created_tests:
            if not instance.output_file:
                raise ProblemPackageError(
                    _("Missing out file for test %s") % instance.name)
            try:
                instance.full_clean()
            except ValidationError as e:
                raise ProblemPackageError(e.messages[0])

    def _delete_non_existing_tests(self, created_tests):
        for test in Test.objects.filter(
                problem_instance=self.main_problem_instance) \
                .exclude(id__in=[instance.id for instance in created_tests]):
            logger.info("%s: deleting test %s", self.filename, test.name)
            test.delete()

    def _process_test(self, test, order, names_re, indir, outdir,
                      collected_ins, scored_groups, outs_to_make):
        """Responsible for saving test in and out files,
           setting test limits, assigning test kind and group.
           :param test: Test name.
           :param order: Test number.
           :param names_re: Compiled regex to match test details from name.
                  Should extract basename, test name,
                  group number and test type.
           :param indir: Directory with tests inputs.
           :param outdir: Directory with tests outputs.
           :param collected_ins: List of inputs that were generated,
                  not taken from archive as a file.
           :param scored_groups: Accumulator for score groups.
           :param outs_to_make: Accumulator for name of output files to
                  be generated by model solution.
           :return: Test instance or None if name couldn't be matched.
        """
        match = names_re.match(test)
        if not match:
            if test.endswith('.in'):
                raise ProblemPackageError(_("Unrecognized test: %s") % (test))
            return None

        # Examples for odl0ocen.in:
        basename = match.group(1)  # odl0ocen
        name = match.group(2)  # 0ocen
        group = match.group(3)  # 0
        suffix = match.group(4)  # ocen

        instance, created = Test.objects.get_or_create(
            problem_instance=self.main_problem_instance, name=name)

        inname_base = basename + '.in'
        inname = os.path.join(indir, inname_base)
        outname_base = basename + '.out'
        outname = os.path.join(outdir, outname_base)

        if test in collected_ins:
            self._save_to_field(instance.input_file, collected_ins[test])
        else:
            instance.input_file.save(inname_base, File(open(inname, 'rb')))

        if os.path.isfile(outname):
            instance.output_file.save(outname_base, File(open(outname), 'rb'))

        outs_to_make.append(
            (_make_filename_in_job_dir(self.env,
                                       'out/%s' % (outname_base)), instance))

        if group == '0' or 'ocen' in suffix:
            # Example tests
            instance.kind = 'EXAMPLE'
            instance.group = name
        else:
            instance.kind = 'NORMAL'
            instance.group = group
            scored_groups.add(group)

        if created:
            instance.time_limit = self.time_limits.get(name,
                                                       DEFAULT_TIME_LIMIT)

        memory_limit = self._get_memory_limit(created, name)
        if memory_limit:
            instance.memory_limit = memory_limit

        instance.order = order
        instance.save()
        return instance

    def _get_memory_limit(self, created, name):
        """If we find the memory limit specified anywhere in the package:
           either in the config.yml or in the problem statement, then we
           overwrite potential manual changes. (In the future we should
           disallow editing memory limits if they were taken from the
           package).
           :return: Memory limit found in config or statement,
                    None otherwise.
        """
        if name in self.memory_limits:
            return self.memory_limits[name]
        if 'memory_limit' in self.config:
            return self.config['memory_limit']
        if self.statement_memory_limit is not None:
            return self.statement_memory_limit
        if created:
            return DEFAULT_MEMORY_LIMIT

        return None

    def _make_outs(self, outs_to_make):
        """Run jobs to generate test outputs.
           :return: Result from workers.
        """
        env = self._find_and_compile('', command='outgen')
        if not env:
            return {}

        jobs = {}
        for outname, test in outs_to_make:
            job = env.copy()
            job['job_type'] = 'exec' if self.use_sandboxes else 'unsafe-exec'
            job['task_priority'] = TASK_PRIORITY
            job['exe_file'] = env['compiled_file']
            job['upload_out'] = True
            job['in_file'] = django_to_filetracker_path(test.input_file)
            job['out_file'] = outname
            jobs[test.name] = job

        jobs = run_sioworkers_jobs(jobs)
        get_client().delete_file(env['compiled_file'])
        return jobs

    def _assign_scores(self, scored_groups, total_score):
        """All groups get equal score, except few last groups
           that are given +1 to compensate rounding error and
           match total sum of ``total_score``.
        """
        Test.objects.filter(problem_instance=self.main_problem_instance) \
                .update(max_score=0)
        num_groups = len(scored_groups)
        group_score = total_score / num_groups
        extra_score_groups = sorted(
            scored_groups,
            key=naturalsort_key)[num_groups -
                                 (total_score - num_groups * group_score):]
        for group in scored_groups:
            score = group_score
            if group in extra_score_groups:
                score += 1
            Test.objects.filter(problem_instance=self.main_problem_instance,
                                group=group).update(max_score=score)

    def _process_checkers(self):
        """Compiles output checker and saves its binary.
        """
        checker_name = '%schk.e' % (self.short_name)
        out_name = _make_filename_in_job_dir(self.env, checker_name)
        instance = OutputChecker.objects.get(problem=self.problem)
        env = self._find_and_compile('chk',
                                     command=checker_name,
                                     cwd=os.path.join(self.rootdir, 'prog'),
                                     log_on_failure=False,
                                     out_name=out_name)
        if not self.use_make and env:
            self._save_to_field(instance.exe_file, env['compiled_file'])
        else:
            instance.exe_file = self._find_checker_exec()
            instance.save()

    def _find_checker_exec(self):
        checker_prefix = os.path.join(self.rootdir, 'prog',
                                      self.short_name + 'chk')
        exe_candidates = [
            checker_prefix + '.e',
            checker_prefix + '.sh',
        ]
        for exe in exe_candidates:
            if os.path.isfile(exe):
                return File(open(exe, 'rb'))

        return None

    def _process_model_solutions(self):
        """Save model solutions to database.
        """
        ModelSolution.objects.filter(problem=self.problem).delete()

        progs = self._get_model_solutions_sources()

        # Dictionary -- kind_shortcut -> (order, full_kind_name)
        kinds = {
            '': (0, 'NORMAL'),
            's': (1, 'SLOW'),
            'b': (2, 'INCORRECT'),
        }

        def modelsolutionssort_key(key):
            short_kind, name, _path = key
            return (kinds[short_kind][0],
                    naturalsort_key(name[:name.index(".")]))

        for order, (short_kind, name, path) in \
               enumerate(sorted(progs, key=modelsolutionssort_key)):
            instance = ModelSolution(problem=self.problem,
                                     name=name,
                                     order_key=order,
                                     kind=kinds[short_kind][1])

            instance.source_file.save(name, File(open(path, 'rb')))
            logger.info('%s: model solution: %s', self.filename, name)

    def _get_model_solutions_sources(self):
        """:return: Sources as tuples (kind_of_solution, filename,
                                       full path to file).
        """
        lang_exts_list = \
            getattr(settings, 'SUBMITTABLE_EXTENSIONS', {}).values()
        extensions = [ext for lang_exts in lang_exts_list for ext in lang_exts]
        regex = r'^%s[0-9]*([bs]?)[0-9]*\.(' + \
                '|'.join(extensions) + ')'
        names_re = re.compile(regex % (re.escape(self.short_name), ))
        progdir = os.path.join(self.rootdir, 'prog')
        progs = [(x[0].group(1), x[1], x[2])
                 for x in ((names_re.match(name), name,
                            os.path.join(progdir, name))
                           for name in os.listdir(progdir))
                 if x[0] and os.path.isfile(x[2])]
        return progs

    def _process_attachments(self):
        """Remove previously added attachments for the problem,
           and saves new ones from attachment directory.
        """
        problem_attachments = ProblemAttachment.objects.filter(
            problem=self.problem)
        if problem_attachments is not None:
            problem_attachments.delete()

        attachments_dir = os.path.join(self.rootdir, 'attachments')
        if not os.path.isdir(attachments_dir):
            return
        attachments = [
            attachment for attachment in os.listdir(attachments_dir)
            if os.path.isfile(os.path.join(attachments_dir, attachment))
        ]
        if len(attachments) == 0:
            return

        for attachment in attachments:
            path = os.path.join(attachments_dir, attachment)
            instance = ProblemAttachment(problem=self.problem,
                                         description=attachment)
            instance.content.save(attachment, File(open(path, 'rb')))
            logger.info('%s: attachment: %s', path, attachment)

    def _save_original_package(self):
        """Save instance of package that would be reused by other
           instances of this problem.
        """
        original_package, created = \
                OriginalPackage.objects.get_or_create(problem=self.problem)
        original_package.problem_package = self.package
        original_package.save()
Beispiel #12
0
class SinolPackage(object):
    controller_name = 'oioioi.sinolpack.controllers.SinolProblemController'
    package_backend_name = 'oioioi.sinolpack.package.SinolPackageBackend'

    def __init__(self, path, original_filename=None):
        self.filename = original_filename or path
        if self.filename.lower().endswith('.tar.gz'):
            ext = '.tar.gz'
        else:
            ext = os.path.splitext(self.filename)[1]
        self.archive = Archive(path, ext)
        self.config = None
        self.problem = None
        self.main_problem_instance = None
        self.rootdir = None
        self.short_name = None
        self.env = None
        self.package = None
        self.time_limits = None
        self.memory_limits = None
        self.statement_memory_limit = None
        self.prog_archive = None
        self.extra_compilation_args = \
                {'c': C_EXTRA_ARGS, 'cpp': C_EXTRA_ARGS, 'pas': PAS_EXTRA_ARGS}
        self.use_make = settings.USE_SINOLPACK_MAKEFILES
        self.use_sandboxes = not settings.USE_UNSAFE_EXEC

    def identify(self):
        return self._find_main_folder() is not None

    def get_short_name(self):
        return self._find_main_folder()

    def _find_main_folder(self):
        # Looks for the only folder which has at least the in/ and out/
        # subfolders.
        #
        # Note that depending on the archive type, there may be or
        # may not be entries for the folders themselves in
        # self.archive.filenames()

        files = map(os.path.normcase, self.archive.filenames())
        files = map(os.path.normpath, files)
        toplevel_folders = set(f.split(os.sep)[0] for f in files)
        toplevel_folders = filter(slug_re.match, toplevel_folders)
        problem_folders = []
        for folder in toplevel_folders:
            for required_subfolder in ('in', 'out'):
                if all(f.split(os.sep)[:2] != [folder, required_subfolder]
                       for f in files):
                    break
            else:
                problem_folders.append(folder)
        if len(problem_folders) == 1:
            return problem_folders[0]

    def _process_config_yml(self):
        config_file = os.path.join(self.rootdir, 'config.yml')
        instance, created = \
                ExtraConfig.objects.get_or_create(problem=self.problem)
        if os.path.isfile(config_file):
            instance.config = open(config_file, 'r').read()
        else:
            instance.config = ''
        instance.save()
        self.config = instance.parsed_config

    def _detect_full_name(self):
        """Sets the problem's full name from the ``config.yml`` (key ``title``)
           or from the ``title`` tag in the LateX source file.

           Example of how the ``title`` tag may look like:
           \title{A problem}
        """
        if 'title' in self.config:
            self.problem.name = self.config['title']
            self.problem.save()
            return

        source = os.path.join(self.rootdir, 'doc', self.short_name + 'zad.tex')
        if os.path.isfile(source):
            text = open(source, 'r').read()
            r = re.search(r'^[^%]*\\title{(.+)}', text, re.MULTILINE)
            if r is not None:
                self.problem.name = _decode(r.group(1), text)
                self.problem.save()

    def _detect_statement_memory_limit(self):
        """Returns the memory limit in the problem statement, converted to
           KiB or ``None``.
        """
        source = os.path.join(self.rootdir, 'doc', self.short_name + 'zad.tex')
        if os.path.isfile(source):
            text = open(source, 'r').read()
            r = re.search(r'^[^%]*\\RAM{(\d+)}', text, re.MULTILINE)
            if r is not None:
                try:
                    value = int(r.group(1))
                    # In SIO1's tradition 66000 was used instead of 65536 etc.
                    # We're trying to cope with this legacy here.
                    return (value + (value + 31) / 32) * 1000
                except ValueError:
                    pass
        return None

    def _save_prog_dir(self):
        prog_dir = os.path.join(self.rootdir, 'prog')
        if not os.path.isdir(prog_dir):
            return
        archive_name = 'compilation-dir-archive'
        archive = shutil.make_archive(
                os.path.join(self.rootdir, archive_name), format="zip",
                root_dir=prog_dir)
        self.prog_archive = get_client().put_file(
                _make_filename(self.env, archive), archive)

    def _find_and_save_files(self, files):
        not_found = []
        for filename in files:
            fn = os.path.join(self.rootdir, 'prog', filename)
            if not os.path.isfile(fn):
                not_found.append(filename)
            else:
                instance = ExtraFile(problem=self.problem, name=filename)
                instance.file.save(filename, File(open(fn, 'rb')))
        return not_found

    def _process_extra_files(self):
        ExtraFile.objects.filter(problem=self.problem).delete()
        files = list(self.config.get('extra_compilation_files', ()))
        not_found = self._find_and_save_files(files)
        if not_found:
            raise ProblemPackageError(
                    _("Expected extra files %r not found in prog/")
                    % (not_found))

    def _save_to_field(self, field, file):
        basename = os.path.basename(filetracker_to_django_file(file).name)
        filename = os.path.join(self.rootdir, basename)
        get_client().get_file(file, filename)
        field.save(os.path.basename(filename), File(open(filename, 'rb')))
        get_client().delete_file(file)

    def _extract_makefiles(self):
        sinol_makefiles_tgz = os.path.join(os.path.dirname(__file__),
                'files', 'sinol-makefiles.tgz')
        Archive(sinol_makefiles_tgz).extract(to_path=self.rootdir)

        makefile_in = os.path.join(self.rootdir, 'makefile.in')
        if not os.path.exists(makefile_in):
            with open(makefile_in, 'w') as f:
                f.write('MODE=wer\n')
                f.write('ID=%s\n' % (self.short_name,))
                f.write('SIG=xxxx000\n')

    def _compile_docs(self, docdir):
        # fancyheadings.sty looks like a rarely available LaTeX package...
        src_fancyheadings = os.path.join(os.path.dirname(__file__), 'files',
                'fancyheadings.sty')
        dst_fancyheadings = os.path.join(docdir, 'fancyheadings.sty')
        if not os.path.isfile(dst_fancyheadings):
            shutil.copyfile(src_fancyheadings, dst_fancyheadings)

        # Extract sinol.cls and oilogo.*, but do not overwrite if they
        # already exist (-k).
        sinol_cls_tgz = os.path.join(os.path.dirname(__file__), 'files',
                'sinol-cls.tgz')
        execute(['tar', '-C', docdir, '-kzxf', sinol_cls_tgz], cwd=docdir)

        try:
            execute('make', cwd=docdir)
        except ExecuteError:
            logger.warning("%s: failed to compile statement", self.filename,
                    exc_info=True)

    def _process_statements(self):
        docdir = os.path.join(self.rootdir, 'doc')
        if not os.path.isdir(docdir):
            logger.warning("%s: no docdir", self.filename)
            return

        # pylint: disable=maybe-no-member
        self.problem.statements.all().delete()

        htmlzipfile = os.path.join(docdir, self.short_name + 'zad.html.zip')
        if os.path.isfile(htmlzipfile):
            statement = ProblemStatement(problem=self.problem)
            statement.content.save(self.short_name + '.html.zip',
                    File(open(htmlzipfile, 'rb')))

        pdffile = os.path.join(docdir, self.short_name + 'zad.pdf')

        if self.use_make and not os.path.isfile(pdffile):
            self._compile_docs(docdir)

        if os.path.isfile(pdffile):
            statement = ProblemStatement(problem=self.problem)
            statement.content.save(self.short_name + '.pdf',
                    File(open(pdffile, 'rb')))
        else:
            logger.warning("%s: no problem statement", self.filename)

    def _compile(self, filename, prog_name, ext, out_name=None):
        client = get_client()
        source_name = '%s.%s' % (prog_name, ext)
        ft_source_name = client.put_file(_make_filename(self.env, source_name),
                filename)

        if not out_name:
            out_name = _make_filename(self.env, '%s.e' % prog_name)

        compilation_job = self.env.copy()
        compilation_job['job_type'] = 'compile'
        compilation_job['source_file'] = ft_source_name
        compilation_job['out_file'] = out_name
        lang = ext
        compilation_job['language'] = lang
        if self.use_sandboxes:
            prefix = 'default'
        else:
            prefix = 'system'
        compilation_job['compiler'] = prefix + '-' + lang
        if not self.use_make and self.prog_archive:
            compilation_job['additional_archive'] = self.prog_archive

        add_extra_files(compilation_job, self.problem,
                additional_args=self.extra_compilation_args)
        new_env = run_sioworkers_job(compilation_job)
        client.delete_file(ft_source_name)

        compilation_message = new_env.get('compiler_output', '')
        compilation_result = new_env.get('result_code', 'CE')
        if compilation_result != 'OK':
            logger.warning("%s: compilation of file %s failed with code %s",
                    self.filename, filename, compilation_result)
            logger.warning("%s: compiler output: %r", self.filename,
                    compilation_message)

            raise ProblemPackageError(_("Compilation of file %(filename)s "
                "failed. Compiler output: %(output)s") % {
                    'filename': filename, 'output': compilation_message})

        # TODO Remeber about 'exec_info' when Java support is introduced.
        new_env['compiled_file'] = new_env['out_file']
        return new_env

    def _find_and_compile(self, suffix, command=None, cwd=None,
            log_on_failure=True, out_name=None):
        renv = None
        if not command:
            command = suffix
        if self.use_make:
            if glob.glob(os.path.join(self.rootdir, 'prog',
                    '%s%s.*' % (self.short_name, suffix))):
                logger.info("%s: %s", self.filename, command)
                renv = {}
                if not cwd:
                    cwd = self.rootdir
                renv['stdout'] = execute('make %s' % (command), cwd=cwd)
                logger.info(renv['stdout'])
        else:
            name = self.short_name + suffix
            choices = (getattr(settings, 'SUBMITTABLE_EXTENSIONS', {})). \
                    values()
            lang_exts = []
            for ch in choices:
                lang_exts.extend(ch)

            source = None
            for ext in lang_exts:
                src = os.path.join(self.rootdir, 'prog', '%s.%s' % (name, ext))
                if os.path.isfile(src):
                    source = src
                    extension = ext
                    break

            if source:
                renv = self._compile(source, name, extension, out_name)
                logger.info("%s: %s", self.filename, command)

        if not renv and log_on_failure:
            logger.info("%s: no %s in package", self.filename, command)
        return renv

    def _make_ins(self, re_string):
        env = self._find_and_compile('ingen')
        if env and not self.use_make:
            env['job_type'] = 'ingen'
            env['exe_file'] = env['compiled_file']
            env['re_string'] = re_string
            env['use_sandboxes'] = self.use_sandboxes
            env['collected_files_path'] = _make_filename(self.env, 'in')

            renv = run_sioworkers_job(env)
            get_client().delete_file(env['compiled_file'])
            return renv['collected_files']
        else:
            return {}

    def _make_outs(self, outs_to_make):
        env = self._find_and_compile('', command='outgen')
        if not env:
            return {}

        jobs = {}
        for outname, test in outs_to_make:
            job = env.copy()
            job['job_type'] = 'exec' if self.use_sandboxes else 'unsafe-exec'
            job['exe_file'] = env['compiled_file']
            job['upload_out'] = True
            job['in_file'] = django_to_filetracker_path(test.input_file)
            job['out_file'] = outname
            jobs[test.name] = job

        jobs = run_sioworkers_jobs(jobs)
        get_client().delete_file(env['compiled_file'])
        return jobs

    def _verify_ins(self, tests):
        env = self._find_and_compile('inwer')
        if env and not self.use_make:
            jobs = {}

            for test in tests:
                job = env.copy()
                job['job_type'] = 'inwer'
                job['exe_file'] = env['compiled_file']
                job['in_file'] = django_to_filetracker_path(test.input_file)
                job['use_sandboxes'] = self.use_sandboxes
                jobs[test.name] = job

            jobs = run_sioworkers_jobs(jobs)
            get_client().delete_file(env['compiled_file'])

            for test_name, job in jobs.iteritems():
                if job['result_code'] != 'OK':
                    raise ProblemPackageError(_("Inwer failed on test "
                        "%(test)s. Inwer output %(output)s") %
                        {'test': test_name, 'output': '\n'.join(job['stdout'])}
                        )

            logger.info("%s: inwer success", self.filename)

    def _assign_scores(self, scored_groups, total_score):
        Test.objects.filter(problem_instance=self.main_problem_instance) \
                .update(max_score=0)
        num_groups = len(scored_groups)
        group_score = total_score / num_groups
        extra_score_groups = sorted(scored_groups, key=naturalsort_key)[
                num_groups - (total_score - num_groups * group_score):]
        for group in scored_groups:
            score = group_score
            if group in extra_score_groups:
                score += 1
            Test.objects.filter(problem_instance=self.main_problem_instance,
                    group=group).update(max_score=score)

    def _process_test(self, test, order, names_re, indir, outdir,
            collected_ins, scored_groups, outs_to_make):
        match = names_re.match(test)
        if not match:
            if test.endswith('.in'):
                raise ProblemPackageError(_("Unrecognized test: %s") %
                        (test))
            return None

        # Examples for odl0ocen.in:
        basename = match.group(1)    # odl0ocen
        name = match.group(2)        # 0ocen
        group = match.group(3)       # 0
        suffix = match.group(4)      # ocen

        instance, created = Test.objects.get_or_create(
                problem_instance=self.main_problem_instance, name=name)

        inname_base = basename + '.in'
        inname = os.path.join(indir, inname_base)
        outname_base = basename + '.out'
        outname = os.path.join(outdir, outname_base)

        if test in collected_ins:
            self._save_to_field(instance.input_file, collected_ins[test])
        else:
            instance.input_file.save(inname_base, File(open(inname, 'rb')))

        if os.path.isfile(outname):
            instance.output_file.save(outname_base, File(open(outname), 'rb'))
        outs_to_make.append((_make_filename(self.env,
                'out/%s' % (outname_base)), instance))

        if group == '0' or 'ocen' in suffix:
            # Example tests
            instance.kind = 'EXAMPLE'
            instance.group = name
        else:
            instance.kind = 'NORMAL'
            instance.group = group
            scored_groups.add(group)

        if created:
            instance.time_limit = self.time_limits.get(name,
                    DEFAULT_TIME_LIMIT)

        # If we find the memory limit specified anywhere in the package:
        # either in the config.yml or in the problem statement, then we
        # overwrite potential manual changes. (In the future we should
        # disallow editing memory limits if they were taken from the
        # package).
        if name in self.memory_limits:
            instance.memory_limit = self.memory_limits[name]
        elif 'memory_limit' in self.config:
            instance.memory_limit = self.config['memory_limit']
        elif self.statement_memory_limit is not None:
            instance.memory_limit = self.statement_memory_limit
        elif created:
            instance.memory_limit = DEFAULT_MEMORY_LIMIT

        instance.order = order
        instance.save()
        return instance

    def _generate_tests(self, total_score=100):

        indir = os.path.join(self.rootdir, 'in')
        outdir = os.path.join(self.rootdir, 'out')

        scored_groups = set()
        re_string = r'^(%s(([0-9]+)([a-z]?[a-z0-9]*))).in$' \
                % (re.escape(self.short_name))
        names_re = re.compile(re_string)

        self.time_limits = _stringify_keys(self.config.get('time_limits', {}))
        self.memory_limits = _stringify_keys(
                self.config.get('memory_limits', {}))
        self.statement_memory_limit = self._detect_statement_memory_limit()

        outs_to_make = []
        created_tests = []
        collected_ins = self._make_ins(re_string)
        all_items = list(set(os.listdir(indir)) | set(collected_ins.keys()))
        if self.use_make:
            self._find_and_compile('', command='outgen')

        # Find tests and create objects
        for order, test in enumerate(sorted(all_items, key=naturalsort_key)):
            instance = self._process_test(test, order, names_re, indir, outdir,
                    collected_ins, scored_groups, outs_to_make)
            if instance:
                created_tests.append(instance)

        # Check test inputs
        self._verify_ins(created_tests)

        # Generate outputs (safe upload only)
        if not self.use_make:
            outs = self._make_outs(outs_to_make)
            for instance in created_tests:
                if instance.name in outs:
                    generated_out = outs[instance.name]
                    self._save_to_field(instance.output_file,
                            generated_out['out_file'])

        # Validate tests
        for instance in created_tests:
            if not instance.output_file:
                raise ProblemPackageError(_("Missing out file for test %s") %
                        instance.name)
            try:
                instance.full_clean()
            except ValidationError as e:
                raise ProblemPackageError(e.messages[0])

        # Delete nonexistent tests
        for test in Test.objects.filter(
                problem_instance=self.main_problem_instance) \
                .exclude(id__in=[instance.id for instance in created_tests]):
            logger.info("%s: deleting test %s", self.filename, test.name)
            test.delete()

        # Assign scores
        if scored_groups:
            self._assign_scores(scored_groups, total_score)

    def _detect_library(self):
        """Finds if the problem has a library.

           Tries to read a library name (filename library should be given
           during compilation) from the ``config.yml`` (key ``library``).

           If there is no such key, assumes that a library is not needed.
        """
        if 'library' in self.config and self.config['library']:
            instance, _created = LibraryProblemData.objects \
                .get_or_create(problem=self.problem)
            instance.libname = self.config['library']
            instance.save()
            logger.info("Library %s needed for this problem.",
                        instance.libname)
        else:
            LibraryProblemData.objects.filter(problem=self.problem).delete()

    def _process_checkers(self):
        checker = None
        checker_name = '%schk.e' % (self.short_name)
        out_name = _make_filename(self.env, checker_name)
        instance = OutputChecker.objects.get(problem=self.problem)
        env = self._find_and_compile('chk',
                command=checker_name,
                cwd=os.path.join(self.rootdir, 'prog'),
                log_on_failure=False,
                out_name=out_name)
        if not self.use_make and env:
            self._save_to_field(instance.exe_file, env['compiled_file'])
        else:
            checker_prefix = os.path.join(self.rootdir, 'prog',
                    self.short_name + 'chk')
            exe_candidates = [
                    checker_prefix + '.e',
                    checker_prefix + '.sh',
                ]
            for exe in exe_candidates:
                if os.path.isfile(exe):
                    checker = File(open(exe, 'rb'))
                    instance.exe_file = checker
                    instance.save()
                    break
        if not checker:
            instance.exe_file = None
            instance.save()

    def _process_model_solutions(self):
        ModelSolution.objects.filter(problem=self.problem).delete()

        lang_exts_list = \
                getattr(settings, 'SUBMITTABLE_EXTENSIONS', {}).values()
        extensions = [ext for lang_exts in lang_exts_list for ext in lang_exts]
        regex = r'^%s[0-9]*([bs]?)[0-9]*\.(' + \
                '|'.join(extensions) + ')'
        names_re = re.compile(regex % (re.escape(self.short_name),))
        progdir = os.path.join(self.rootdir, 'prog')

        progs = [(x[0].group(1), x[1], x[2]) for x in
                    ((names_re.match(name), name, os.path.join(progdir, name))
                    for name in os.listdir(progdir))
                if x[0] and os.path.isfile(x[2])]

        # Dictionary -- kind_shortcut -> (order, full_kind_name)
        kinds = {
                '': (0, 'NORMAL'),
                's': (1, 'SLOW'),
                'b': (2, 'INCORRECT'),
        }

        def modelsolutionssort_key(key):
            short_kind, name, _path = key
            return (kinds[short_kind][0],
                    naturalsort_key(name[:name.index(".")]))

        for order, (short_kind, name, path) in \
               enumerate(sorted(progs, key=modelsolutionssort_key)):
            instance = ModelSolution(problem=self.problem, name=name,
                                     order_key=order,
                                     kind=kinds[short_kind][1])

            instance.source_file.save(name, File(open(path, 'rb')))
            logger.info("%s: model solution: %s", self.filename, name)

    def _save_original_package(self):
        original_package, created = \
                OriginalPackage.objects.get_or_create(problem=self.problem)
        original_package.problem_package = self.package
        original_package.save()

    def process_package(self):
        self._process_config_yml()
        self._detect_full_name()
        self._detect_library()
        self._process_extra_files()
        if self.use_make:
            self._extract_makefiles()
        else:
            self._save_prog_dir()
        self._process_statements()
        self._generate_tests()
        self._process_checkers()
        self._process_model_solutions()
        self._save_original_package()

    def unpack(self, env, package):
        self.short_name = self._find_main_folder()
        self.env = env
        self.package = package
        existing_problem = self.package.problem
        if existing_problem:
            self.problem = existing_problem
            self.main_problem_instance = self.problem.main_problem_instance
            if existing_problem.short_name != self.short_name:
                raise ProblemPackageError(_("Tried to replace problem "
                    "'%(oldname)s' with '%(newname)s'. For safety, changing "
                    "problem short name is not possible.") %
                    dict(oldname=existing_problem.short_name,
                        newname=self.short_name))
        else:
            author_username = env.get('author')
            if author_username:
                author = User.objects.get(username=author_username)
            else:
                author = None

            self.problem = Problem.create(
                    name=self.short_name,
                    short_name=self.short_name,
                    controller_name=self.controller_name,
                    contest=self.package.contest,
                    is_public=(author is None),
                    author=author)
            problem_site = ProblemSite(problem=self.problem,
                                       url_key=generate_key())
            problem_site.save()
            self.problem.problem_site = problem_site
            self.main_problem_instance = self.problem.main_problem_instance

        self.problem.package_backend_name = self.package_backend_name
        self.problem.save()
        tmpdir = tempfile.mkdtemp()
        logger.info("%s: tmpdir is %s", self.filename, tmpdir)
        try:
            self.archive.extract(to_path=tmpdir)
            self.rootdir = os.path.join(tmpdir, self.short_name)
            self.process_package()

            return self.problem
        finally:
            shutil.rmtree(tmpdir)
            if self.prog_archive:
                get_client().delete_file(self.prog_archive)
Beispiel #13
0
class SinolPackage(object):
    def __init__(self, path, original_filename=None):
        self.filename = original_filename or path
        if self.filename.lower().endswith('.tar.gz'):
            ext = '.tar.gz'
        else:
            ext = os.path.splitext(self.filename)[1]
        self.archive = Archive(path, ext)
        self.config = None
        self.problem = None
        self.rootdir = None
        self.short_name = self._find_main_folder()

    def _find_main_folder(self):
        # Looks for the only folder which has at least the in/ and out/
        # subfolders.
        #
        # Note that depending on the archive type, there may be or
        # may not be entries for the folders themselves in
        # self.archive.filenames()

        files = map(os.path.normcase, self.archive.filenames())
        files = map(os.path.normpath, files)
        toplevel_folders = set(f.split(os.sep)[0] for f in files)
        toplevel_folders = filter(slug_re.match, toplevel_folders)
        problem_folders = []
        for folder in toplevel_folders:
            for required_subfolder in ('in', 'out'):
                if all(f.split(os.sep)[:2] != [folder, required_subfolder]
                       for f in files):
                    break
            else:
                problem_folders.append(folder)
        if len(problem_folders) == 1:
            return problem_folders[0]

    def identify(self):
        return self._find_main_folder() is not None

    def _process_config_yml(self):
        config_file = os.path.join(self.rootdir, 'config.yml')
        instance, created = \
                ExtraConfig.objects.get_or_create(problem=self.problem)
        if os.path.exists(config_file):
            instance.config = open(config_file, 'r').read()
        else:
            instance.config = ''
        instance.save()
        self.config = instance.parsed_config

    def _detect_full_name(self):
        """Sets the problem's full name from the ``config.yml`` (key ``title``)
           or from the ``title`` tag in the LateX source file.

           Example of how the ``title`` tag may look like:
           \title{A problem}
        """
        if 'title' in self.config:
           self.problem.name = self.config['title']
           self.problem.save()
           return

        source = os.path.join(self.rootdir, 'doc', self.short_name + 'zad.tex')
        if os.path.isfile(source):
            text = open(source, 'r').read()
            r = re.search(r'\\title{(.+)}', text)
            if r is not None:
                self.problem.name = _decode(r.group(1), text)
                self.problem.save()

    def _compile_docs(self, docdir):
        # fancyheadings.sty looks like a rarely available LaTeX package...
        src_fancyheadings = os.path.join(os.path.dirname(__file__), 'files',
                'fancyheadings.sty')
        dst_fancyheadings = os.path.join(docdir, 'fancyheadings.sty')
        if not os.path.exists(dst_fancyheadings):
            shutil.copyfile(src_fancyheadings, dst_fancyheadings)

        # Extract sinol.cls and oilogo.*, but do not overwrite if they
        # already exist (-k).
        sinol_cls_tgz = os.path.join(os.path.dirname(__file__), 'files',
                'sinol-cls.tgz')
        execute(['tar', '-C', docdir, '-kzxf', sinol_cls_tgz], cwd=docdir)

        try:
            execute('make', cwd=docdir)
        except ExecuteError:
            logger.warning('%s: failed to compile statement', self.filename,
                    exc_info=True)

    def _process_statements(self):
        docdir = os.path.join(self.rootdir, 'doc')
        if not os.path.isdir(docdir):
            logger.warning('%s: no docdir', self.filename)
            return

        self.problem.statements.all().delete()

        htmlzipfile = os.path.join(docdir, self.short_name + 'zad.html.zip')
        if os.path.exists(htmlzipfile):
            statement = ProblemStatement(problem=self.problem)
            statement.content.save(self.short_name + '.html.zip',
                    File(open(htmlzipfile, 'rb')))

        pdffile = os.path.join(docdir, self.short_name + 'zad.pdf')

        if not os.path.isfile(pdffile):
            self._compile_docs(docdir)
        if not os.path.isfile(pdffile):
            logger.warning('%s: no problem statement', self.filename)
            return

        statement = ProblemStatement(problem=self.problem)
        statement.content.save(self.short_name + '.pdf',
                File(open(pdffile, 'rb')))

    def _extract_makefiles(self):
        sinol_makefiles_tgz = os.path.join(os.path.dirname(__file__),
                'files', 'sinol-makefiles.tgz')
        Archive(sinol_makefiles_tgz).extract(to_path=self.rootdir)

        makefile_in = os.path.join(self.rootdir, 'makefile.in')
        if not os.path.exists(makefile_in):
           with open(makefile_in, 'w') as f:
                f.write('MODE=wer\n')
                f.write('ID=%s\n' % (self.short_name,))
                f.write('SIG=xxxx000\n')

    def _generate_tests(self):
        logger.info('%s: ingen', self.filename)
        execute('make ingen', cwd=self.rootdir)

        if glob.glob(os.path.join(self.rootdir, 'prog',
                '%sinwer.*' % (self.short_name,))):
            logger.info('%s: inwer', self.filename)
            execute('make inwer', cwd=self.rootdir)
        else:
            logger.info('%s: no inwer in package', self.filename)

        indir = os.path.join(self.rootdir, 'in')
        outdir = os.path.join(self.rootdir, 'out')
        for test in os.listdir(indir):
            basename = os.path.splitext(test)[0]
            if not os.path.exists(os.path.join(outdir, basename + '.out')):
                logger.info('%s: outgen', self.filename)
                execute('make outgen', cwd=self.rootdir)
                break

    def _process_tests(self, total_score=100):
        indir = os.path.join(self.rootdir, 'in')
        outdir = os.path.join(self.rootdir, 'out')
        test_names = []
        scored_groups = set()
        names_re = re.compile(r'^(%s(([0-9]+)([a-z]?[a-z0-9]*))).in$'
                % (re.escape(self.short_name),))

        time_limits = _stringify_keys(self.config.get('time_limits', {}))
        memory_limits = _stringify_keys(self.config.get('memory_limits', {}))

        # Find tests and create objects
        for order, test in enumerate(sorted(os.listdir(indir),
                                            key=naturalsort_key)):
            match = names_re.match(test)
            if not match:
                if test.endswith('.in'):
                    raise ProblemPackageError("Unrecognized test: " + test)
                continue

            # Examples for odl0ocen.in:
            basename = match.group(1)    # odl0ocen
            name = match.group(2)        # 0ocen
            group = match.group(3)       # 0
            suffix = match.group(4)      # ocen

            instance, created = Test.objects.get_or_create(
                problem=self.problem, name=name)
            instance.input_file.save(basename + '.in',
                    File(open(os.path.join(indir, basename + '.in'), 'rb')))
            instance.output_file.save(basename + '.out',
                    File(open(os.path.join(outdir, basename + '.out'), 'rb')))
            if group == '0' or 'ocen' in suffix:
                # Example tests
                instance.kind = 'EXAMPLE'
                instance.group = name
            else:
                instance.kind = 'NORMAL'
                instance.group = group
                scored_groups.add(group)

            if created:
                instance.time_limit = time_limits.get(name, DEFAULT_TIME_LIMIT)
                if 'memory_limit' in self.config:
                    instance.memory_limit = self.config['memory_limit']
                else:
                    instance.memory_limit = memory_limits.get(name,
                            DEFAULT_MEMORY_LIMIT)
            instance.order = order
            instance.save()
            test_names.append(name)

        # Delete nonexistent tests
        for test in Test.objects.filter(problem=self.problem) \
                .exclude(name__in=test_names):
            logger.info('%s: deleting test %s', self.filename, test.name)
            test.delete()

        # Assign scores
        if scored_groups:
            Test.objects.filter(problem=self.problem).update(max_score=0)
            num_groups = len(scored_groups)
            group_score = total_score / num_groups
            extra_score_groups = sorted(scored_groups, key=naturalsort_key)[
                    num_groups - (total_score - num_groups * group_score):]
            for group in scored_groups:
                score = group_score
                if group in extra_score_groups:
                    score += 1
                Test.objects.filter(problem=self.problem, group=group) \
                        .update(max_score=score)

    def _process_checkers(self):
        checker_prefix = os.path.join(self.rootdir, 'prog',
                self.short_name + 'chk')
        checker = None

        source_candidates = [
                checker_prefix + '.cpp',
                checker_prefix + '.c',
                checker_prefix + '.pas',
            ]
        for source in source_candidates:
            if os.path.isfile(source):
                logger.info('%s: compiling checker', self.filename)
                execute(['make', self.short_name + 'chk.e'],
                        cwd=os.path.join(self.rootdir, 'prog'))
                break

        exe_candidates = [
                checker_prefix + '.e',
                checker_prefix + '.sh',
            ]
        for exe in exe_candidates:
            if os.path.isfile(exe):
                checker = exe

        instance = OutputChecker.objects.get(problem=self.problem)
        if checker:
            instance.exe_file.save(os.path.basename(checker),
                File(open(checker, 'rb')))
        else:
            instance.exe_file = None
            instance.save()

    def _process_extra_files(self):
        ExtraFile.objects.filter(problem=self.problem).delete()
        for filename in self.config.get('extra_compilation_files', ()):
            fn = os.path.join(self.rootdir, 'prog', filename)
            if not os.path.exists(fn):
                raise ProblemPackageError(_("Expected extra file '%s' not "
                    "found in prog/") % (filename,))
            instance = ExtraFile(problem=self.problem, name=filename)
            instance.file.save(filename, File(open(fn, 'rb')))

    def _process_model_solutions(self):
        ModelSolution.objects.filter(problem=self.problem).delete()

        names_re = re.compile(r'^%s[0-9]*([bs]?)[0-9]*\.(c|cpp|pas|java)'
                % (re.escape(self.short_name),))
        progdir = os.path.join(self.rootdir, 'prog')
        for name in os.listdir(progdir):
            path = os.path.join(progdir, name)
            if not os.path.isfile(path):
                continue
            match = names_re.match(name)
            if match:
                instance = ModelSolution(problem=self.problem, name=name)
                instance.kind = {
                        '': 'NORMAL',
                        's': 'SLOW',
                        'b': 'INCORRECT',
                    }[match.group(1)]
                instance.source_file.save(name, File(open(path, 'rb')))
                logger.info('%s: model solution: %s', self.filename, name)

    def _save_original_package(self):
        original_package, created = \
                OriginalPackage.objects.get_or_create(problem=self.problem)
        original_package.package_file.save(os.path.basename(self.filename),
                File(open(self.archive.filename, 'rb')))

    def unpack(self, existing_problem=None):
        self.short_name = self._find_main_folder()

        if existing_problem:
            self.problem = existing_problem
            if existing_problem.short_name != self.short_name:
                raise ProblemPackageError(_("Tried to replace problem "
                    "'%(oldname)s' with '%(newname)s'. For safety, changing "
                    "problem short name is not possible.") %
                    dict(oldname=existing_problem.short_name,
                        newname=self.short_name))
        else:
            self.problem = Problem(
                    name=self.short_name,
                    short_name=self.short_name,
                    controller_name='oioioi.sinolpack.controllers.SinolProblemController')

        self.problem.package_backend_name = \
                'oioioi.sinolpack.package.SinolPackageBackend'
        self.problem.save()

        tmpdir = tempfile.mkdtemp()
        logger.info('%s: tmpdir is %s', self.filename, tmpdir)
        try:
            self.archive.extract(to_path=tmpdir)
            self.rootdir = os.path.join(tmpdir, self.short_name)
            self._process_config_yml()
            self._detect_full_name()
            self._extract_makefiles()
            self._process_statements()
            self._generate_tests()
            self._process_tests()
            self._process_checkers()
            self._process_extra_files()
            self._process_model_solutions()
            self._save_original_package()
            return self.problem
        finally:
            shutil.rmtree(tmpdir)
Beispiel #14
0
class SinolPackage(object):
    controller_name = 'oioioi.sinolpack.controllers.SinolProblemController'
    package_backend_name = 'oioioi.sinolpack.package.SinolPackageBackend'

    def __init__(self, path, original_filename=None):
        self.filename = original_filename or path
        if self.filename.lower().endswith('.tar.gz'):
            ext = '.tar.gz'
        else:
            ext = os.path.splitext(self.filename)[1]
        self.archive = Archive(path, ext)
        self.config = None
        self.problem = None
        self.main_problem_instance = None
        self.rootdir = None
        self.short_name = None
        self.env = None
        self.package = None
        self.time_limits = None
        self.memory_limits = None
        self.statement_memory_limit = None
        self.prog_archive = None
        self.extra_compilation_args = \
                {'c': C_EXTRA_ARGS, 'cpp': C_EXTRA_ARGS, 'pas': PAS_EXTRA_ARGS}
        self.use_make = settings.USE_SINOLPACK_MAKEFILES
        self.use_sandboxes = not settings.USE_UNSAFE_EXEC

    def identify(self):
        return self._find_main_dir() is not None

    def get_short_name(self):
        return self._find_main_dir()

    def _find_main_dir(self):
        """Looks for the directory which contains at least the in/ and out/
           subdirectories. Only one such directory should be found.
           Otherwise None is returned.

           Note that depending on the archive type, there may be or
           may not be entries for the dirs themselves in
           self.archive.filenames()
        """

        files = list(map(os.path.normcase, self.archive.filenames()))
        files = list(map(os.path.normpath, files))
        toplevel_dirs = set(f.split(os.sep)[0] for f in files)
        toplevel_dirs = list(filter(slug_re.match, toplevel_dirs))
        problem_dirs = []
        for dir in toplevel_dirs:
            for required_subdir in ('in', 'out'):
                if all(f.split(os.sep)[:2] != [dir, required_subdir]
                       for f in files):
                    break
            else:
                problem_dirs.append(dir)
        if len(problem_dirs) == 1:
            return problem_dirs[0]

        return None

    def _save_to_field(self, field, file):
        basename = os.path.basename(filetracker_to_django_file(file).name)
        filename = os.path.join(self.rootdir, basename)
        get_client().get_file(file, filename)
        field.save(os.path.basename(filename), File(open(filename, 'rb')))
        get_client().delete_file(file)

    def _find_and_compile(self, suffix, command=None, cwd=None,
                          log_on_failure=True, out_name=None):
        if not command:
            command = suffix
        if self.use_make:
            renv = self._compile_using_make(command, cwd, suffix)
        else:
            renv = self._compile_matching_extension(command, out_name, suffix)

        if not renv and log_on_failure:
            logger.info("%s: no %s in package", self.filename, command)
        return renv

    def _compile_using_make(self, command, cwd, suffix):
        renv = None
        if glob.glob(os.path.join(self.rootdir, 'prog',
                                  '%s%s.*' % (self.short_name, suffix))):
            logger.info("%s: %s", self.filename, command)
            renv = {}
            if not cwd:
                cwd = self.rootdir
            renv['stdout'] = execute('make %s' % (command),
                                     cwd=cwd).decode('utf-8', 'replace')
            logger.info(renv['stdout'])
        return renv

    def _compile_matching_extension(self, command, out_name, suffix):
        renv = None
        name = self.short_name + suffix
        choices = list((getattr(settings, 'SUBMITTABLE_EXTENSIONS', {})).
            values())
        lang_exts = []
        for ch in choices:
            lang_exts.extend(ch)
        for ext in lang_exts:
            src = os.path.join(self.rootdir, 'prog', '%s.%s' % (name, ext))
            if os.path.isfile(src):
                renv = self._compile(src, name, ext, out_name)
                logger.info("%s: %s", self.filename, command)
                break
        return renv

    def _compile(self, filename, prog_name, ext, out_name=None):
        client = get_client()
        source_name = '%s.%s' % (prog_name, ext)
        ft_source_name = client.put_file(
            _make_filename_in_job_dir(self.env, source_name),
            filename)

        if not out_name:
            out_name = _make_filename_in_job_dir(self.env, '%s.e' % prog_name)

        new_env = self._run_compilation_job(ext, ft_source_name, out_name)
        client.delete_file(ft_source_name)

        self._ensure_compilation_success(filename, new_env)

        # TODO Remeber about 'exec_info' when Java support is introduced.
        new_env['compiled_file'] = new_env['out_file']
        return new_env

    def _run_compilation_job(self, ext, ft_source_name, out_name):
        compilation_job = self.env.copy()
        compilation_job['job_type'] = 'compile'
        compilation_job['task_priority'] = TASK_PRIORITY
        compilation_job['source_file'] = ft_source_name
        compilation_job['out_file'] = out_name
        lang = ext
        compilation_job['language'] = lang
        if self.use_sandboxes:
            prefix = 'default'
        else:
            prefix = 'system'
        compilation_job['compiler'] = prefix + '-' + lang
        if not self.use_make and self.prog_archive:
            compilation_job['additional_archive'] = self.prog_archive
        add_extra_files(compilation_job, self.problem,
                        additional_args=self.extra_compilation_args)
        new_env = run_sioworkers_job(compilation_job)
        return new_env

    def _ensure_compilation_success(self, filename, new_env):
        compilation_message = new_env.get('compiler_output', '')
        compilation_result = new_env.get('result_code', 'CE')
        if compilation_result != 'OK':
            logger.warning("%s: compilation of file %s failed with code %s",
                           self.filename, filename, compilation_result)
            logger.warning("%s: compiler output: %r", self.filename,
                           compilation_message)

            raise ProblemPackageError(_("Compilation of file %(filename)s "
                                        "failed. Compiler output: "
                                        "%(output)s") % {
                                          'filename': filename,
                                          'output': compilation_message})

    def _make_ins(self, re_string):
        env = self._find_and_compile('ingen')
        if env and not self.use_make:
            env['job_type'] = 'ingen'
            env['task_priority'] = TASK_PRIORITY
            env['exe_file'] = env['compiled_file']
            env['re_string'] = re_string
            env['use_sandboxes'] = self.use_sandboxes
            env['collected_files_path'] = \
                _make_filename_in_job_dir(self.env, 'in')

            renv = run_sioworkers_job(env)
            get_client().delete_file(env['compiled_file'])
            return renv['collected_files']

        return {}

    def unpack(self, env, package):
        self.short_name = self.get_short_name()
        self.env = env
        self.package = package

        self._create_problem_or_reuse_if_exists(self.package.problem)
        return self._extract_and_process_package()

    def _create_problem_or_reuse_if_exists(self, existing_problem):
        if existing_problem:
            self.problem = existing_problem
            self._ensure_short_name_equality_with(existing_problem)
        else:
            self.problem = self._create_problem_instance()
            problem_site = ProblemSite(problem=self.problem,
                                       url_key=generate_key())
            problem_site.save()
            self.problem.problem_site = problem_site

        self.main_problem_instance = self.problem.main_problem_instance
        self.problem.package_backend_name = self.package_backend_name
        self.problem.save()

    def _ensure_short_name_equality_with(self, existing_problem):
        if existing_problem.short_name != self.short_name:
            raise ProblemPackageError(_("Tried to replace problem "
                "'%(oldname)s' with '%(newname)s'. For safety, changing "
                "problem short name is not possible.") %
                dict(oldname=existing_problem.short_name,
                     newname=self.short_name))

    def _create_problem_instance(self):
        author_username = self.env.get('author')
        if author_username:
            author = User.objects.get(username=author_username)
        else:
            author = None

        return Problem.create(
            name=self.short_name,
            short_name=self.short_name,
            controller_name=self.controller_name,
            contest=self.package.contest,
            is_public=(author is None),
            author=author)

    def _extract_and_process_package(self):
        tmpdir = tempfile.mkdtemp()
        logger.info("%s: tmpdir is %s", self.filename, tmpdir)
        try:
            self.archive.extract(to_path=tmpdir)
            self.rootdir = os.path.join(tmpdir, self.short_name)
            self._process_package()

            return self.problem
        finally:
            shutil.rmtree(tmpdir)
            if self.prog_archive:
                get_client().delete_file(self.prog_archive)

    def _process_package(self):
        self._process_config_yml()
        self._detect_full_name()
        self._detect_library()
        self._process_extra_files()
        if self.use_make:
            self._extract_makefiles()
        else:
            self._save_prog_dir()
        self._process_statements()
        self._generate_tests()
        self._process_checkers()
        self._process_model_solutions()
        self._process_attachments()
        self._save_original_package()

    def _process_config_yml(self):
        """Parses config file from problem dir, saves its content to
           the current instance.
        """
        config_file = os.path.join(self.rootdir, 'config.yml')
        instance, created = \
                ExtraConfig.objects.get_or_create(problem=self.problem)
        if os.path.isfile(config_file):
            instance.config = open(config_file, 'r').read()
        else:
            instance.config = ''
        instance.save()
        self.config = instance.parsed_config

    def _detect_full_name(self):
        """Sets the problem's full name from the ``config.yml`` (key ``title``)
           or from the ``title`` tag in the LateX source file.

           Example of how the ``title`` tag may look like:
           \title{A problem}
        """
        if 'title' in self.config:
            self.problem.name = self.config['title']
            self.problem.save()
            return

        source = os.path.join(self.rootdir, 'doc', self.short_name + 'zad.tex')
        if os.path.isfile(source):
            text = open(source, 'r').read()
            r = re.search(r'^[^%]*\\title{(.+)}', text, re.MULTILINE)
            if r is not None:
                self.problem.name = _decode(r.group(1), text)
                self.problem.save()

    def _detect_library(self):
        """Finds if the problem has a library.

           Tries to read a library name (filename library should be given
           during compilation) from the ``config.yml`` (key ``library``).

           If there is no such key, assumes that a library is not needed.
        """
        if 'library' in self.config and self.config['library']:
            instance, _created = LibraryProblemData.objects \
                .get_or_create(problem=self.problem)
            instance.libname = self.config['library']
            instance.save()
            logger.info("Library %s needed for this problem.",
                        instance.libname)
        else:
            LibraryProblemData.objects.filter(problem=self.problem).delete()

    def _process_extra_files(self):
        ExtraFile.objects.filter(problem=self.problem).delete()
        files = list(self.config.get('extra_compilation_files', ()))
        not_found = self._find_and_save_files(files)
        if not_found:
            raise ProblemPackageError(
                    _("Expected extra files %r not found in prog/")
                    % (not_found))

    def _find_and_save_files(self, files):
        """Saves files in the database.
           :param files: List of expected files.
           :return: List of files that were not found.
        """
        not_found = []
        for filename in files:
            fn = os.path.join(self.rootdir, 'prog', filename)
            if not os.path.isfile(fn):
                not_found.append(filename)
            else:
                instance = ExtraFile(problem=self.problem, name=filename)
                instance.file.save(filename, File(open(fn, 'rb')))
        return not_found

    def _extract_makefiles(self):
        sinol_makefiles_tgz = os.path.join(os.path.dirname(__file__),
                'files', 'sinol-makefiles.tgz')
        Archive(sinol_makefiles_tgz).extract(to_path=self.rootdir)

        makefile_in = os.path.join(self.rootdir, 'makefile.in')
        if not os.path.exists(makefile_in):
            with open(makefile_in, 'w') as f:
                f.write('MODE=wer\n')
                f.write('ID=%s\n' % (self.short_name,))
                f.write('SIG=xxxx000\n')

    def _save_prog_dir(self):
        """Creates archive with programs directory.
        """
        prog_dir = os.path.join(self.rootdir, 'prog')
        if not os.path.isdir(prog_dir):
            return
        archive_name = 'compilation-dir-archive'
        archive = shutil.make_archive(
                os.path.join(self.rootdir, archive_name), format="zip",
                root_dir=prog_dir)
        self.prog_archive = get_client().put_file(
                _make_filename_in_job_dir(self.env, archive), archive)

    def _process_statements(self):
        """Creates problem statement from html or pdf source.
        """
        docdir = os.path.join(self.rootdir, 'doc')
        if not os.path.isdir(docdir):
            logger.warning("%s: no docdir", self.filename)
            return

        # pylint: disable=maybe-no-member
        self.problem.statements.all().delete()

        lang_prefs = [''] + ['-' + l[0] for l in settings.STATEMENT_LANGUAGES]

        if self.use_make:
            self._compile_latex_docs(docdir)

        for lang in lang_prefs:
            htmlzipfile = os.path.join(docdir, self.short_name + 'zad' + lang + '.html.zip')
            if os.path.isfile(htmlzipfile):
                self._force_index_encoding(htmlzipfile)
                statement = ProblemStatement(problem=self.problem, language=lang[1:])
                statement.content.save(self.short_name + lang + '.html.zip',
                                    File(open(htmlzipfile, 'rb')))

            pdffile = os.path.join(docdir, self.short_name + 'zad' + lang + '.pdf')

            if os.path.isfile(pdffile):
                statement = ProblemStatement(problem=self.problem, language=lang[1:])
                statement.content.save(self.short_name + lang + '.pdf',
                                    File(open(pdffile, 'rb')))

        if not self.problem.statements.exists():
            logger.warning("%s: no problem statement", self.filename)

    def _force_index_encoding(self, htmlzipfile):
        """Ensures index.html file is utf-8 encoded, if cannot apply
           this encoding raise
           :class:`~oioioi.problems.package.ProblemPackageError`.
        """
        with zipfile.ZipFile(htmlzipfile, 'r') as archive, \
                archive.open('index.html') as index:

            data = index.read()
            # First, we check if index.html is utf-8 encoded.
            # If it is - nothing to do.
            try:
                data.decode('utf8')
            # We accept iso-8859-2 encoded files, but django doesn't
            # so index.html has to be translated to utf-8.
            except UnicodeDecodeError:
                try:
                    data = data.decode('iso-8859-2').encode('utf8')
                except (UnicodeDecodeError, UnicodeEncodeError):
                    raise ProblemPackageError(
                        _("index.html has to be utf8 or iso8859-2 encoded"))
                # We have to remove index.html from the archive and
                # then add the translated file to archive because
                # zipfile module doesn't implement editing files
                # inside archive.
                _remove_from_zip(htmlzipfile, 'index.html')
                with zipfile.ZipFile(htmlzipfile, 'a') as new_archive:
                    new_archive.writestr('index.html', data)

    def _compile_latex_docs(self, docdir):
        # fancyheadings.sty looks like a rarely available LaTeX package...
        src_fancyheadings = os.path.join(os.path.dirname(__file__), 'files',
                'fancyheadings.sty')
        dst_fancyheadings = os.path.join(docdir, 'fancyheadings.sty')
        if not os.path.isfile(dst_fancyheadings):
            shutil.copyfile(src_fancyheadings, dst_fancyheadings)

        # Extract sinol.cls and oilogo.*, but do not overwrite if they
        # already exist (-k).
        sinol_cls_tgz = os.path.join(os.path.dirname(__file__), 'files',
                'sinol-cls.tgz')
        execute(['tar', '-C', docdir, '-kzxf', sinol_cls_tgz], cwd=docdir)

        try:
            execute('make', cwd=docdir)
        except ExecuteError:
            logger.warning("%s: failed to compile statement", self.filename,
                    exc_info=True)

    def _generate_tests(self, total_score_if_auto=100):
        self.time_limits = _stringify_keys(self.config.get('time_limits', {}))
        self.memory_limits = _stringify_keys(
                self.config.get('memory_limits', {}))
        self.statement_memory_limit = self._detect_statement_memory_limit()

        created_tests, outs_to_make, scored_groups = \
            self._create_instances_for_tests()

        self._verify_time_limits(created_tests)
        self._verify_inputs(created_tests)
        self._generate_test_outputs(created_tests, outs_to_make)
        self._validate_tests(created_tests)
        self._delete_non_existing_tests(created_tests)

        self._assign_scores(scored_groups, total_score_if_auto)

    def _detect_statement_memory_limit(self):
        """Returns the memory limit in the problem statement, converted to
           KiB or ``None``.
        """
        source = os.path.join(self.rootdir, 'doc', self.short_name + 'zad.tex')
        if os.path.isfile(source):
            text = open(source, 'r').read()
            r = re.search(r'^[^%]*\\RAM{(\d+)}', text, re.MULTILINE)
            if r is not None:
                try:
                    value = int(r.group(1))
                    # In SIO1's tradition 66000 was used instead of 65536 etc.
                    # We're trying to cope with this legacy here.
                    return (value + (value + 31) / 32) * 1000
                except ValueError:
                    pass
        return None

    def _create_instances_for_tests(self):
        """Iterate through available test inputs.
           :return: Triple (created tests instances,
                            outs that have to be generated,
                            score groups (determined by test names))
        """
        indir = os.path.join(self.rootdir, 'in')
        outdir = os.path.join(self.rootdir, 'out')

        re_string = r'^(%s(([0-9]+)([a-z]?[a-z0-9]*))).in$' \
                    % (re.escape(self.short_name))
        names_re = re.compile(re_string)

        collected_ins = self._make_ins(re_string)
        all_items = list(set(os.listdir(indir)) | set(collected_ins.keys()))

        created_tests = []
        outs_to_make = []
        scored_groups = set()

        if self.use_make:
            self._find_and_compile('', command='outgen')

        for order, test in enumerate(sorted(all_items, key=naturalsort_key)):
            instance = self._process_test(test, order, names_re,
                                          indir, outdir,
                                          collected_ins, scored_groups,
                                          outs_to_make)
            if instance:
                created_tests.append(instance)

        return created_tests, outs_to_make, scored_groups

    def _verify_time_limits(self, tests):
        """:raises: :class:`~oioioi.problems.package.ProblemPackageError`
           if sum of tests time limits exceeds
        """
        time_limit_sum = 0
        for test in tests:
            time_limit_sum += test.time_limit
        if time_limit_sum > settings.MAX_TEST_TIME_LIMIT_PER_PROBLEM:
            time_limit_sum_rounded = (time_limit_sum + 999) / 1000.0
            limit_seconds = settings.MAX_TEST_TIME_LIMIT_PER_PROBLEM / 1000.0

            raise ProblemPackageError(_(
                "Sum of time limits for all tests is too big. It's %(sum)ds, "
                "but it shouldn't exceed %(limit)ds."
            ) % {'sum': time_limit_sum_rounded, 'limit': limit_seconds})

    def _verify_inputs(self, tests):
        """Check if correct solution exits with code 0 on all tests.
           :raises: :class:`~oioioi.problems.package.ProblemPackageError`
           otherwise.
        """
        env = self._find_and_compile('inwer')
        if env and not self.use_make:
            jobs = {}

            for test in tests:
                job = env.copy()
                job['job_type'] = 'inwer'
                job['task_priority'] = TASK_PRIORITY
                job['exe_file'] = env['compiled_file']
                job['in_file'] = django_to_filetracker_path(test.input_file)
                job['use_sandboxes'] = self.use_sandboxes
                jobs[test.name] = job

            jobs = run_sioworkers_jobs(jobs)
            get_client().delete_file(env['compiled_file'])

            for test_name, job in six.iteritems(jobs):
                if job['result_code'] != 'OK':
                    raise ProblemPackageError(_("Inwer failed on test "
                        "%(test)s. Inwer output %(output)s") %
                        {
                            'test': test_name,
                            'output': '\n'.join(job['stdout'])}
                        )

            logger.info("%s: inwer success", self.filename)

    def _generate_test_outputs(self, tests, outs_to_make):
        if not self.use_make:
            outs = self._make_outs(outs_to_make)
            for instance in tests:
                if instance.name in outs:
                    generated_out = outs[instance.name]
                    self._save_to_field(instance.output_file,
                                        generated_out['out_file'])

    def _validate_tests(self, created_tests):
        """Check if all tests have output files and that
           test instances are correct.
           :raises: :class:`~oioioi.problems.package.ProblemPackageError`
        """
        for instance in created_tests:
            if not instance.output_file:
                raise ProblemPackageError(_("Missing out file for test %s") %
                                          instance.name)
            try:
                instance.full_clean()
            except ValidationError as e:
                raise ProblemPackageError(e.messages[0])

    def _delete_non_existing_tests(self, created_tests):
        for test in Test.objects.filter(
                problem_instance=self.main_problem_instance) \
                .exclude(id__in=[instance.id for instance in created_tests]):
            logger.info("%s: deleting test %s", self.filename, test.name)
            test.delete()

    def _process_test(self, test, order, names_re, indir, outdir,
            collected_ins, scored_groups, outs_to_make):
        """Responsible for saving test in and out files,
           setting test limits, assigning test kind and group.
           :param test: Test name.
           :param order: Test number.
           :param names_re: Compiled regex to match test details from name.
                  Should extract basename, test name,
                  group number and test type.
           :param indir: Directory with tests inputs.
           :param outdir: Directory with tests outputs.
           :param collected_ins: List of inputs that were generated,
                  not taken from archive as a file.
           :param scored_groups: Accumulator for score groups.
           :param outs_to_make: Accumulator for name of output files to
                  be generated by model solution.
           :return: Test instance or None if name couldn't be matched.
        """
        match = names_re.match(test)
        if not match:
            if test.endswith('.in'):
                raise ProblemPackageError(_("Unrecognized test: %s") %
                        (test))
            return None

        # Examples for odl0ocen.in:
        basename = match.group(1)    # odl0ocen
        name = match.group(2)        # 0ocen
        group = match.group(3)       # 0
        suffix = match.group(4)      # ocen

        instance, created = Test.objects.get_or_create(
                problem_instance=self.main_problem_instance, name=name)

        inname_base = basename + '.in'
        inname = os.path.join(indir, inname_base)
        outname_base = basename + '.out'
        outname = os.path.join(outdir, outname_base)

        if test in collected_ins:
            self._save_to_field(instance.input_file, collected_ins[test])
        else:
            instance.input_file.save(inname_base, File(open(inname, 'rb')))

        if os.path.isfile(outname):
            instance.output_file.save(outname_base, File(open(outname), 'rb'))
        else:
            outs_to_make.append((_make_filename_in_job_dir(self.env,
                'out/%s' % (outname_base)), instance))

        if group == '0' or 'ocen' in suffix:
            # Example tests
            instance.kind = 'EXAMPLE'
            instance.group = name
        else:
            instance.kind = 'NORMAL'
            instance.group = group
            scored_groups.add(group)

        if created:
            instance.time_limit = self.time_limits.get(name,
                    DEFAULT_TIME_LIMIT)

        memory_limit = self._get_memory_limit(created, name)
        if memory_limit:
            instance.memory_limit = memory_limit

        instance.order = order
        instance.save()
        return instance

    def _get_memory_limit(self, created, name):
        """If we find the memory limit specified anywhere in the package:
           either in the config.yml or in the problem statement, then we
           overwrite potential manual changes. (In the future we should
           disallow editing memory limits if they were taken from the
           package).
           :return: Memory limit found in config or statement,
                    None otherwise.
        """
        if name in self.memory_limits:
            return self.memory_limits[name]
        if 'memory_limit' in self.config:
            return self.config['memory_limit']
        if self.statement_memory_limit is not None:
            return self.statement_memory_limit
        if created:
            return DEFAULT_MEMORY_LIMIT

        return None

    def _make_outs(self, outs_to_make):
        """Run jobs to generate test outputs.
           :return: Result from workers.
        """
        env = self._find_and_compile('', command='outgen')
        if not env:
            return {}

        jobs = {}
        for outname, test in outs_to_make:
            job = env.copy()
            job['job_type'] = 'exec' if self.use_sandboxes else 'unsafe-exec'
            job['task_priority'] = TASK_PRIORITY
            job['exe_file'] = env['compiled_file']
            job['upload_out'] = True
            job['in_file'] = django_to_filetracker_path(test.input_file)
            job['out_file'] = outname
            if test.memory_limit:
                job['exec_mem_limit'] = test.memory_limit
            jobs[test.name] = job

        jobs = run_sioworkers_jobs(jobs)
        get_client().delete_file(env['compiled_file'])
        return jobs

    def _check_scores_from_config(self, scored_groups, config_scores):
        """Makes sure that all scored tests are present in config
           and that nothing else is there.
        """

        for group in scored_groups:
            if int(group) not in config_scores:
                errormsg = _("Score for group '%(group_name)s' not found. "
                             "You must either provide scores for all groups "
                             "or not provide them at all "
                             "(to have them assigned automatically). "
                             "(Scored groups: %(scored_groups)s, "
                             "groups from config: %(config_groups)s)") % \
                           {"group_name": group,
                            "scored_groups": list(scored_groups),
                            "config_groups": config_scores}
                raise ProblemPackageError(errormsg)

        for group in config_scores:
            if str(group) not in scored_groups:
                errormsg = _("Score for group '%(group_name)s' "
                             "found in config, "
                             "but no such test group exists in scored groups. "
                             "You must either provide scores for all groups "
                             "or not provide them at all "
                             "(to have them assigned automatically). "
                             "(Scored groups: %(scored_groups)s, "
                             "groups from config: %(config_groups)s)") % \
                           {"group_name": group,
                            "scored_groups": list(scored_groups),
                            "config_groups": config_scores}
                raise ProblemPackageError(errormsg)

    def _compute_scores_automatically(self, scored_groups, total_score):
        """All groups get equal score, except few last groups
           that are given +1 to compensate rounding error and
           match total sum of ``total_score``.
        """
        if not scored_groups:
            return {}

        scores = {}
        num_groups = len(scored_groups)
        group_score = total_score / num_groups
        extra_score_groups = sorted(scored_groups, key=naturalsort_key)[
                num_groups - (total_score - num_groups * group_score):]
        for group in scored_groups:
            score = group_score
            if group in extra_score_groups:
                score += 1

            scores[group] = score

        return scores

    def _assign_scores(self, scored_groups, total_score_if_auto):
        """Checks if there's a ``scores`` entry in config
           and sets scores according to that
           or assigns them automatically otherwise.
        """
        group_scores_from_config = self.config.get('scores', {})
        if group_scores_from_config:
            self._check_scores_from_config(scored_groups,
                                           group_scores_from_config)
            scores = group_scores_from_config
        else:
            scores = self._compute_scores_automatically(scored_groups,
                                                        total_score_if_auto)

        Test.objects.filter(problem_instance=self.main_problem_instance) \
            .update(max_score=0)

        for group, score in six.iteritems(scores):
            Test.objects.filter(problem_instance=self.main_problem_instance,
                                group=group).update(max_score=score)

    def _process_checkers(self):
        """Compiles output checker and saves its binary.
        """
        checker_name = '%schk.e' % (self.short_name)
        out_name = _make_filename_in_job_dir(self.env, checker_name)
        instance = OutputChecker.objects.get_or_create(problem=self.problem)[0]
        env = self._find_and_compile('chk',
                command=checker_name,
                cwd=os.path.join(self.rootdir, 'prog'),
                log_on_failure=False,
                out_name=out_name)
        if not self.use_make and env:
            self._save_to_field(instance.exe_file, env['compiled_file'])
        else:
            instance.exe_file = self._find_checker_exec()
            instance.save()

    def _find_checker_exec(self):
        checker_prefix = os.path.join(self.rootdir, 'prog',
                                      self.short_name + 'chk')
        exe_candidates = [
            checker_prefix + '.e',
            checker_prefix + '.sh',
        ]
        for exe in exe_candidates:
            if os.path.isfile(exe):
                return File(open(exe, 'rb'))

        return None

    def _process_model_solutions(self):
        """Save model solutions to database.
        """
        ModelSolution.objects.filter(problem=self.problem).delete()

        progs = self._get_model_solutions_sources()

        # Dictionary -- kind_shortcut -> (order, full_kind_name)
        kinds = {
                '': (0, 'NORMAL'),
                's': (1, 'SLOW'),
                'b': (2, 'INCORRECT'),
        }

        def modelsolutionssort_key(key):
            short_kind, name, _path = key
            return (kinds[short_kind][0],
                    naturalsort_key(name[:name.index(".")]))

        for order, (short_kind, name, path) in \
               enumerate(sorted(progs, key=modelsolutionssort_key)):
            instance = ModelSolution(problem=self.problem, name=name,
                                     order_key=order,
                                     kind=kinds[short_kind][1])

            instance.source_file.save(name, File(open(path, 'rb')))
            logger.info('%s: model solution: %s', self.filename, name)

    def _get_model_solutions_sources(self):
        """:return: Sources as tuples (kind_of_solution, filename,
                                       full path to file).
        """
        lang_exts_list = \
            list(getattr(settings, 'SUBMITTABLE_EXTENSIONS', {}).values())
        extensions = [ext for lang_exts in lang_exts_list for ext in lang_exts]
        regex = r'^%s[0-9]*([bs]?)[0-9]*\.(' + \
                '|'.join(extensions) + ')'
        names_re = re.compile(regex % (re.escape(self.short_name),))
        progdir = os.path.join(self.rootdir, 'prog')
        progs = [(x[0].group(1), x[1], x[2]) for x in
                 ((names_re.match(name), name, os.path.join(progdir, name))
                  for name in os.listdir(progdir))
                 if x[0] and os.path.isfile(x[2])]
        return progs

    def _process_attachments(self):
        """Remove previously added attachments for the problem,
           and saves new ones from attachment directory.
        """
        problem_attachments = ProblemAttachment.objects.filter(
            problem=self.problem)
        if problem_attachments is not None:
            problem_attachments.delete()

        attachments_dir = os.path.join(self.rootdir, 'attachments')
        if not os.path.isdir(attachments_dir):
            return
        attachments = [attachment for attachment in os.listdir(attachments_dir)
                if os.path.isfile(os.path.join(attachments_dir, attachment))]
        if len(attachments) == 0:
            return

        for attachment in attachments:
            path = os.path.join(attachments_dir, attachment)
            instance = ProblemAttachment(problem=self.problem,
                    description=attachment)
            instance.content.save(attachment, File(open(path, 'rb')))
            logger.info('%s: attachment: %s', path, attachment)

    def _save_original_package(self):
        """Save instance of package that would be reused by other
           instances of this problem.
        """
        original_package, created = \
                OriginalPackage.objects.get_or_create(problem=self.problem)
        original_package.problem_package = self.package
        original_package.save()
Beispiel #15
0
class SinolPackage(object):
    def __init__(self, path, original_filename=None):
        self.filename = original_filename or path
        if self.filename.lower().endswith('.tar.gz'):
            ext = '.tar.gz'
        else:
            ext = os.path.splitext(self.filename)[1]
        self.archive = Archive(path, ext)
        self.config = None
        self.problem = None
        self.rootdir = None
        self.short_name = self._find_main_folder()

    def _find_main_folder(self):
        # Looks for the only folder which has at least the in/ and out/
        # subfolders.
        #
        # Note that depending on the archive type, there may be or
        # may not be entries for the folders themselves in
        # self.archive.filenames()

        files = map(os.path.normcase, self.archive.filenames())
        files = map(os.path.normpath, files)
        toplevel_folders = set(f.split(os.sep)[0] for f in files)
        toplevel_folders = filter(slug_re.match, toplevel_folders)
        problem_folders = []
        for folder in toplevel_folders:
            for required_subfolder in ('in', 'out'):
                if all(
                        f.split(os.sep)[:2] != [folder, required_subfolder]
                        for f in files):
                    break
            else:
                problem_folders.append(folder)
        if len(problem_folders) == 1:
            return problem_folders[0]

    def identify(self):
        return self._find_main_folder() is not None

    def _process_config_yml(self):
        config_file = os.path.join(self.rootdir, 'config.yml')
        instance, created = \
                ExtraConfig.objects.get_or_create(problem=self.problem)
        if os.path.exists(config_file):
            instance.config = open(config_file, 'r').read()
        else:
            instance.config = ''
        instance.save()
        self.config = instance.parsed_config

    def _detect_full_name(self):
        """Sets the problem's full name from the ``config.yml`` (key ``title``)
           or from the ``title`` tag in the LateX source file.

           Example of how the ``title`` tag may look like:
           \title{A problem}
        """
        if 'title' in self.config:
            self.problem.name = self.config['title']
            self.problem.save()
            return

        source = os.path.join(self.rootdir, 'doc', self.short_name + 'zad.tex')
        if os.path.isfile(source):
            text = open(source, 'r').read()
            r = re.search(r'\\title{(.+)}', text)
            if r is not None:
                self.problem.name = _decode(r.group(1), text)
                self.problem.save()

    def _compile_docs(self, docdir):
        # fancyheadings.sty looks like a rarely available LaTeX package...
        src_fancyheadings = os.path.join(os.path.dirname(__file__), 'files',
                                         'fancyheadings.sty')
        dst_fancyheadings = os.path.join(docdir, 'fancyheadings.sty')
        if not os.path.exists(dst_fancyheadings):
            shutil.copyfile(src_fancyheadings, dst_fancyheadings)

        # Extract sinol.cls and oilogo.*, but do not overwrite if they
        # already exist (-k).
        sinol_cls_tgz = os.path.join(os.path.dirname(__file__), 'files',
                                     'sinol-cls.tgz')
        execute(['tar', '-C', docdir, '-kzxf', sinol_cls_tgz], cwd=docdir)

        try:
            execute('make', cwd=docdir)
        except ExecuteError:
            logger.warning('%s: failed to compile statement',
                           self.filename,
                           exc_info=True)

    def _process_statements(self):
        docdir = os.path.join(self.rootdir, 'doc')
        if not os.path.isdir(docdir):
            logger.warning('%s: no docdir', self.filename)
            return

        self.problem.statements.all().delete()

        htmlzipfile = os.path.join(docdir, self.short_name + 'zad.html.zip')
        if os.path.exists(htmlzipfile):
            statement = ProblemStatement(problem=self.problem)
            statement.content.save(self.short_name + '.html.zip',
                                   File(open(htmlzipfile, 'rb')))

        pdffile = os.path.join(docdir, self.short_name + 'zad.pdf')

        if not os.path.isfile(pdffile):
            self._compile_docs(docdir)
        if not os.path.isfile(pdffile):
            logger.warning('%s: no problem statement', self.filename)
            return

        statement = ProblemStatement(problem=self.problem)
        statement.content.save(self.short_name + '.pdf',
                               File(open(pdffile, 'rb')))

    def _extract_makefiles(self):
        sinol_makefiles_tgz = os.path.join(os.path.dirname(__file__), 'files',
                                           'sinol-makefiles.tgz')
        Archive(sinol_makefiles_tgz).extract(to_path=self.rootdir)

        makefile_in = os.path.join(self.rootdir, 'makefile.in')
        if not os.path.exists(makefile_in):
            with open(makefile_in, 'w') as f:
                f.write('MODE=wer\n')
                f.write('ID=%s\n' % (self.short_name, ))
                f.write('SIG=xxxx000\n')

    def _generate_tests(self):
        logger.info('%s: ingen', self.filename)
        execute('make ingen', cwd=self.rootdir)

        if glob.glob(
                os.path.join(self.rootdir, 'prog',
                             '%sinwer.*' % (self.short_name, ))):
            logger.info('%s: inwer', self.filename)
            execute('make inwer', cwd=self.rootdir)
        else:
            logger.info('%s: no inwer in package', self.filename)

        indir = os.path.join(self.rootdir, 'in')
        outdir = os.path.join(self.rootdir, 'out')
        for test in os.listdir(indir):
            basename = os.path.splitext(test)[0]
            if not os.path.exists(os.path.join(outdir, basename + '.out')):
                logger.info('%s: outgen', self.filename)
                execute('make outgen', cwd=self.rootdir)
                break

    def _process_tests(self, total_score=100):
        indir = os.path.join(self.rootdir, 'in')
        outdir = os.path.join(self.rootdir, 'out')
        test_names = []
        scored_groups = set()
        names_re = re.compile(r'^(%s(([0-9]+)([a-z]?[a-z0-9]*))).in$' %
                              (re.escape(self.short_name), ))

        time_limits = _stringify_keys(self.config.get('time_limits', {}))
        memory_limits = _stringify_keys(self.config.get('memory_limits', {}))

        # Find tests and create objects
        for order, test in enumerate(
                sorted(os.listdir(indir), key=naturalsort_key)):
            match = names_re.match(test)
            if not match:
                if test.endswith('.in'):
                    raise ProblemPackageError("Unrecognized test: " + test)
                continue

            # Examples for odl0ocen.in:
            basename = match.group(1)  # odl0ocen
            name = match.group(2)  # 0ocen
            group = match.group(3)  # 0
            suffix = match.group(4)  # ocen

            instance, created = Test.objects.get_or_create(
                problem=self.problem, name=name)
            instance.input_file.save(
                basename + '.in',
                File(open(os.path.join(indir, basename + '.in'), 'rb')))
            instance.output_file.save(
                basename + '.out',
                File(open(os.path.join(outdir, basename + '.out'), 'rb')))
            if group == '0' or 'ocen' in suffix:
                # Example tests
                instance.kind = 'EXAMPLE'
                instance.group = name
            else:
                instance.kind = 'NORMAL'
                instance.group = group
                scored_groups.add(group)

            if created:
                instance.time_limit = time_limits.get(name, DEFAULT_TIME_LIMIT)
                if 'memory_limit' in self.config:
                    instance.memory_limit = self.config['memory_limit']
                else:
                    instance.memory_limit = memory_limits.get(
                        name, DEFAULT_MEMORY_LIMIT)
            instance.order = order
            instance.save()
            test_names.append(name)

        # Delete nonexistent tests
        for test in Test.objects.filter(problem=self.problem) \
                .exclude(name__in=test_names):
            logger.info('%s: deleting test %s', self.filename, test.name)
            test.delete()

        # Assign scores
        if scored_groups:
            Test.objects.filter(problem=self.problem).update(max_score=0)
            num_groups = len(scored_groups)
            group_score = total_score / num_groups
            extra_score_groups = sorted(
                scored_groups,
                key=naturalsort_key)[num_groups -
                                     (total_score - num_groups * group_score):]
            for group in scored_groups:
                score = group_score
                if group in extra_score_groups:
                    score += 1
                Test.objects.filter(problem=self.problem, group=group) \
                        .update(max_score=score)

    def _process_checkers(self):
        checker_prefix = os.path.join(self.rootdir, 'prog',
                                      self.short_name + 'chk')
        checker = None

        source_candidates = [
            checker_prefix + '.cpp',
            checker_prefix + '.c',
            checker_prefix + '.pas',
        ]
        for source in source_candidates:
            if os.path.isfile(source):
                logger.info('%s: compiling checker', self.filename)
                execute(['make', self.short_name + 'chk.e'],
                        cwd=os.path.join(self.rootdir, 'prog'))
                break

        exe_candidates = [
            checker_prefix + '.e',
            checker_prefix + '.sh',
        ]
        for exe in exe_candidates:
            if os.path.isfile(exe):
                checker = exe

        instance = OutputChecker.objects.get(problem=self.problem)
        if checker:
            instance.exe_file.save(os.path.basename(checker),
                                   File(open(checker, 'rb')))
        else:
            instance.exe_file = None
            instance.save()

    def _process_extra_files(self):
        ExtraFile.objects.filter(problem=self.problem).delete()
        for filename in self.config.get('extra_compilation_files', ()):
            fn = os.path.join(self.rootdir, 'prog', filename)
            if not os.path.exists(fn):
                raise ProblemPackageError(
                    _("Expected extra file '%s' not "
                      "found in prog/") % (filename, ))
            instance = ExtraFile(problem=self.problem, name=filename)
            instance.file.save(filename, File(open(fn, 'rb')))

    def _process_model_solutions(self):
        ModelSolution.objects.filter(problem=self.problem).delete()

        names_re = re.compile(r'^%s[0-9]*([bs]?)[0-9]*\.(c|cpp|pas|java)' %
                              (re.escape(self.short_name), ))
        progdir = os.path.join(self.rootdir, 'prog')
        for name in os.listdir(progdir):
            path = os.path.join(progdir, name)
            if not os.path.isfile(path):
                continue
            match = names_re.match(name)
            if match:
                instance = ModelSolution(problem=self.problem, name=name)
                instance.kind = {
                    '': 'NORMAL',
                    's': 'SLOW',
                    'b': 'INCORRECT',
                }[match.group(1)]
                instance.source_file.save(name, File(open(path, 'rb')))
                logger.info('%s: model solution: %s', self.filename, name)

    def _save_original_package(self):
        original_package, created = \
                OriginalPackage.objects.get_or_create(problem=self.problem)
        original_package.package_file.save(
            os.path.basename(self.filename),
            File(open(self.archive.filename, 'rb')))

    def unpack(self, existing_problem=None):
        self.short_name = self._find_main_folder()

        if existing_problem:
            self.problem = existing_problem
            if existing_problem.short_name != self.short_name:
                raise ProblemPackageError(
                    _("Tried to replace problem "
                      "'%(oldname)s' with '%(newname)s'. For safety, changing "
                      "problem short name is not possible.") %
                    dict(oldname=existing_problem.short_name,
                         newname=self.short_name))
        else:
            self.problem = Problem(
                name=self.short_name,
                short_name=self.short_name,
                controller_name=
                'oioioi.sinolpack.controllers.SinolProblemController')

        self.problem.package_backend_name = \
                'oioioi.sinolpack.package.SinolPackageBackend'
        self.problem.save()

        tmpdir = tempfile.mkdtemp()
        logger.info('%s: tmpdir is %s', self.filename, tmpdir)
        try:
            self.archive.extract(to_path=tmpdir)
            self.rootdir = os.path.join(tmpdir, self.short_name)
            self._process_config_yml()
            self._detect_full_name()
            self._extract_makefiles()
            self._process_statements()
            self._generate_tests()
            self._process_tests()
            self._process_checkers()
            self._process_extra_files()
            self._process_model_solutions()
            self._save_original_package()
            return self.problem
        finally:
            shutil.rmtree(tmpdir)