Esempio n. 1
0
class DejaGnuSetup(Checker, DejaGnu):

    test_defs = CheckerFileField(help_text=_(
        u"Das Setup benutzt den <A HREF=\"http://www.gnu.org/software/dejagnu/dejagnu.html\">DejaGnu-Testrahmen</A>, um die Programme zu testen. Die in dieser Datei enthaltenen Definitionen gelten für alle Testfälle dieser Aufgabe. Sie werden beim Testen in die DejaGnu-Datei <TT>default.exp</TT> geschrieben. (Vergl. hierzuden Abschnitt <EM>Target dependent procedures</EM> im	<A HREF=\"http://www.gnu.org/manual/dejagnu/\" TARGET=\"_blank\">DejaGnu-Handbuch</A>.) Die Variablen PROGRAM und JAVA werden mit dem Programmnamen bzw. dem Pfad zur Java-Runtime ersetzt."
    ))

    def title(self):
        return "Tests einrichten"

    @staticmethod
    def description():
        return u"Dies ist keine wirkliche Prüfung.  Sie dient nur dazu, den nachfolgenden Tests Definitionen zur Verfügung zu stellen. Diese 'Prüfung' wird immer bestanden."

    def requires(self):
        return [Builder]

    # Set up tests.
    def run(self, env):
        self.setup_dirs(env)
        create_file(os.path.join(self.lib_dir(env),
                                 env.program() + ".exp"), u"")
        defs = string.replace(encoding.get_unicode(self.test_defs.read()),
                              "PROGRAM", env.program())
        #		defs = string.replace(defs, "JAVA", join(join(dirname(dirname(__file__)),"scripts"),"java"))
        defs = string.replace(defs, "JAVA", settings.JVM_SECURE)
        create_file(os.path.join(self.config_dir(env), "default.exp"), defs)

        return self.create_result(env)
Esempio n. 2
0
class CheckStyleChecker(Checker):

    name = models.CharField(
        max_length=100,
        default="CheckStyle",
        help_text=_("Name to be displayed on the solution detail page."))
    configuration = CheckerFileField(help_text=_(
        "XML configuration of CheckStyle. See http://checkstyle.sourceforge.net/"
    ))

    def title(self):
        """ Returns the title for this checker category. """
        return self.name

    @staticmethod
    def description():
        """ Returns a description for this Checker. """
        return "Runs checkstyle (http://checkstyle.sourceforge.net/)."

    def run(self, env):

        # Save save check configuration
        config_path = os.path.join(env.tmpdir(), "checks.xml")
        copy_file(self.configuration.path, config_path)

        # Run the tests
        args = [
            settings.JVM, "-cp", settings.CHECKSTYLEALLJAR, "-Dbasedir=.",
            "com.puppycrawl.tools.checkstyle.Main", "-c", "checks.xml"
        ] + [name for (name, content) in env.sources()]
        [output, error, exitcode, timed_out,
         oom_ed] = execute_arglist(args, env.tmpdir())

        # Remove Praktomat-Path-Prefixes from result:
        output = re.sub(r"^" + re.escape(env.tmpdir()) + "/+",
                        "",
                        output,
                        flags=re.MULTILINE)

        result = self.create_result(env)

        log = '<pre>' + escape(output) + '</pre>'
        if timed_out:
            log = log + '<div class="error">Timeout occured!</div>'
        if oom_ed:
            log = log + '<div class="error">Out of memory!</div>'
        result.set_log(log)

        result.set_passed(
            not timed_out and not oom_ed and not exitcode and
            (not re.match('Starting audit...\nAudit done.', output) == None))

        return result
Esempio n. 3
0
class DejaGnuTester(Checker, DejaGnu):
    """ Run a test case on the program.  Requires a previous `DejaGnuSetup'. """

    name = models.CharField(max_length=100,
                            help_text=_("The name of the Test"))
    test_case = CheckerFileField(help_text=_(
        u"In den folgenden DejaGnu-Testfällen werden typischerweise Funktionen aufgerufen, die beim vorherigen Schritt <EM>Tests einrichten</EM> definiert wurden.	 Siehe	auch den Abschnitt <EM>How to write a test case</EM> im <A TARGET=\"_blank\" HREF=\"http://www.gnu.org/manual/dejagnu/\">DejaGnu-Handbuch</A>."
    ))

    def __unicode__(self):
        return self.name

    def title(self):
        return self.name

    @staticmethod
    def description():
        return u"Diese Prüfung ist bestanden, wenn alle Testfälle zum erwarteten Ergebnis führten."

    def requires(self):
        return [DejaGnuSetup]

    # Return 1 if the output is ok
    def output_ok(self, output):
        return (RXFAIL.search(output) == None
                and string.find(output, "runtest completed") >= 0
                and string.find(output, "non-expected failures") < 0
                and string.find(output, "unexpected failures") < 0)

    def htmlize_output(self, log):
        # Always kill the author's name from the log
        log = re.sub(RXRUN_BY, "Run By " + settings.SITE_NAME + " on ", log)

        # Clean the output
        log = re.sub(RXREMOVE, "", log)

        log = re.sub(re.escape(settings.JVM_SECURE),
                     os.path.basename(settings.JVM_SECURE), log)

        # HTMLize it all
        log = escape(log)

        # Every line that contains a passed message is to be enhanced.
        log = re.sub(RXPASS, r'\1 <B class="passed"> \2 </B> \3', log)
        # Every line that contains a failure message is to be enhanced.
        return "<TT><PRE>" + re.sub(RXFAIL, r'\1 <B class="error"> \2 </B> \3',
                                    log) + "</PRE></TT>"

    # Run tests.  Return a CheckerResult.
    def run(self, env):

        # Save public test cases in `tests.exp'
        tests_exp = os.path.join(self.tests_dir(env), "tests.exp")
        test_cases = string.replace(
            encoding.get_unicode(self.test_case.read()), u"PROGRAM",
            env.program())
        create_file(tests_exp, test_cases)

        testsuite = self.testsuite_dir(env)
        program_name = env.program()

        if " " in program_name:
            result = self.create_result(env)
            result.set_log(
                "<pre><b class=\"fail\">Error</b>: Path to the main() - source file contains spaces.\n\nFor Java .zip submittions, the directory hierarchy of the .zip file must excactly match the package structure.\nThe default package must correspond to the .zip root directory.</pre>"
            )
            result.set_passed(False)
            return result

        cmd = [settings.DEJAGNU_RUNTEST, "--tool", program_name, "tests.exp"]

        environ = {}
        environ['JAVA'] = settings.JVM
        script_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                  'scripts')
        environ['POLICY'] = join(script_dir, "praktomat.policy")
        environ['USER'] = env.user().get_full_name().encode(
            sys.getdefaultencoding(), 'ignore')
        environ['HOME'] = testsuite
        environ['UPLOAD_ROOT'] = settings.UPLOAD_ROOT

        [output, error, exitcode, timed_out, oom_ed] = \
                          execute_arglist(
                              cmd,
                              testsuite,
                              environment_variables=environ,
                              timeout=settings.TEST_TIMEOUT,
                              fileseeklimit=settings.TEST_MAXFILESIZE,
                              extradirs=[env.tmpdir(), script_dir]
                              )
        output = encoding.get_unicode(output)

        try:
            summary = encoding.get_unicode(
                open(os.path.join(testsuite, program_name + ".sum")).read())
            log = encoding.get_unicode(
                open(os.path.join(testsuite, program_name + ".log")).read())
        except:
            summary = ""
            log = ""

        complete_output = self.htmlize_output(output + log)

        result = self.create_result(env)
        result.set_log(complete_output, timed_out=timed_out or oom_ed)
        result.set_passed(not exitcode and not timed_out and not oom_ed
                          and self.output_ok(complete_output))
        return result
Esempio n. 4
0
class ScriptChecker(Checker):

    name = models.CharField(
        max_length=100,
        default="Externen Tutor ausführen",
        help_text=_("Name to be displayed on the solution detail page."))
    shell_script = CheckerFileField(help_text=_(
        "A script (e.g. a shell script) to run. Its output will be displayed to the user (if public), the checker will succeed if it returns an exit code of 0. The environment will contain the variables JAVA and PROGRAM."
    ))
    remove = models.CharField(
        max_length=5000,
        blank=True,
        help_text=
        _("Regular expression describing passages to be removed from the output."
          ))
    returns_html = models.BooleanField(
        default=False,
        help_text=
        _("If the script doesn't return HTML it will be enclosed in &lt; pre &gt; tags."
          ))

    def title(self):
        """ Returns the title for this checker category. """
        return self.name

    @staticmethod
    def description():
        """ Returns a description for this Checker. """
        return u"Diese Prüfung wird bestanden, wenn das externe Programm keinen Fehlercode liefert."

    def run(self, env):
        """ Runs tests in a special environment. Here's the actual work. 
		This runs the check in the environment ENV, returning a CheckerResult. """

        # Setup
        copy_file(self.shell_script.path, env.tmpdir(), to_is_directory=True)
        os.chmod(env.tmpdir() + '/' + os.path.basename(self.shell_script.name),
                 0750)

        # Run the tests -- execute dumped shell script 'script.sh'

        filenames = [name for (name, content) in env.sources()]
        args = [env.tmpdir() + '/' + os.path.basename(self.shell_script.name)
                ] + filenames

        environ = {}
        environ['USER'] = str(env.user().id)
        environ['HOME'] = env.tmpdir()
        environ['JAVA'] = settings.JVM
        environ['JAVA_SECURE'] = settings.JVM_SECURE
        environ['POLICY'] = settings.JVM_POLICY
        environ['PROGRAM'] = env.program() or ''

        script_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                  'scripts')

        [output, error, exitcode, timed_out, oom_ed] = execute_arglist(
            args,
            working_directory=env.tmpdir(),
            environment_variables=environ,
            timeout=settings.TEST_TIMEOUT,
            maxmem=settings.TEST_MAXMEM,
            fileseeklimit=settings.TEST_MAXFILESIZE,
            extradirs=[script_dir],
        )
        output = force_unicode(output, errors='replace')

        result = self.create_result(env)
        (output, truncated) = truncated_log(output)

        if self.remove:
            output = re.sub(self.remove, "", output)
        if not self.returns_html or truncated or timed_out or oom_ed:
            output = '<pre>' + escape(output) + '</pre>'

        result.set_log(output,
                       timed_out=timed_out,
                       truncated=truncated,
                       oom_ed=oom_ed)
        result.set_passed(not exitcode and not timed_out and not oom_ed
                          and not truncated)

        return result
Esempio n. 5
0
class DiffChecker(Checker):

    shell_script = CheckerFileField(help_text=_(
        "The shell script whose output for the given input file is compared to the given output file: The substrings JAVA and PROGRAM got replaced by Praktomat determined values."
    ))
    input_file = CheckerFileField(
        blank=True,
        help_text=_("The file containing the input for the program."))
    output_file = CheckerFileField(
        blank=True,
        help_text=_("The file containing the output for the program."))

    def clean(self):
        super(DiffChecker, self).clean()
        if (not self.shell_script or not self.input_file
                or not self.output_file):
            raise ValidationError(
                "Robert says: DiffChecker have to have an Shell script, an Inputfile and an Outputfile"
            )

    def title(self):
        """ Returns the title for this checker category. """
        return u"Ausgaben mit 'diff' prüfen."

    @staticmethod
    def description():
        """ Returns a description for this Checker. """
        return u"Diese Prüfung wird bestanden, wenn erwartete und tatsächliche Ausgabe übereinstimmen."

    def run(self, env):
        """ Runs tests in a special environment. Here's the actual work. 
        This runs the check in the environment ENV, returning a CheckerResult. """

        # Setup
        test_dir = env.tmpdir()
        environ = {}
        if self.input_file:
            input_path = os.path.join(test_dir,
                                      os.path.basename(self.input_file.path))
            environ['INPUTFILE'] = os.path.basename(self.input_file.path)
            copy_file(self.input_file.path, input_path)
        if self.output_file:
            output_path = os.path.join(test_dir,
                                       os.path.basename(self.output_file.path))
            environ['OUTPUTFILE'] = os.path.basename(self.output_file.path)
            copy_file(self.output_file.path, output_path)
        replace = [(u'PROGRAM', env.program())] if env.program() else []
        replace += [("JAVA", settings.JVM_SECURE)]
        #copy_file_to_directory(self.shell_script.path, test_dir, replace=replace)
        copy_file(self.shell_script.path, test_dir, to_is_directory=True)

        #some time after 2013 Praktomat losts copy_file_to_directory with replace parameter
        to_path = os.path.join(test_dir,
                               os.path.basename(self.shell_script.path))
        with open(to_path) as fd:
            content = encoding.get_unicode(fd.read())
            for (old, new) in replace:
                content = content.replace(old, new)
        with open(to_path, 'w') as fd:
            fd.write(encoding.get_utf8(content))

        args = ["sh", os.path.basename(self.shell_script.name)]
        #environ['USER'] = unicode(env.user().get_full_name()).encode('utf-8')
        environ['USER'] = env.user(
        ).username  # gets overwritten with praktomat-test-user's name, therefore:
        environ['AUTHOR'] = env.solution(
        ).author.username  # will not be overwritten!
        environ['HOME'] = test_dir

        script_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                  'scripts')

        #[output, error, exitcode,_] = execute_arglist(args, working_directory=test_dir, environment_variables=environ)

        [output, error, exitcode, timed_out, oom_ed] = execute_arglist(
            args,
            working_directory=test_dir,
            environment_variables=environ,
            timeout=settings.TEST_TIMEOUT,
            maxmem=settings.TEST_MAXMEM,
            fileseeklimit=settings.TEST_MAXFILESIZE,
            extradirs=[script_dir],
        )
        output = force_unicode(output, errors='replace')

        result = CheckerResult(checker=self, solution=env.solution())

        result.set_log('<pre>' + escape(output) + '</pre>')

        result.set_passed(not exitcode)

        return result
class PythonChecker(ProFormAChecker):
    name = models.CharField(
        max_length=100,
        default="Externen Tutor ausführen",
        help_text=_("Name to be displayed on the solution detail page."))
    doctest = CheckerFileField(help_text=_("The doctest script."))
    remove = models.CharField(
        max_length=5000,
        blank=True,
        help_text=
        _("Regular expression describing passages to be removed from the output."
          ))
    returns_html = models.BooleanField(
        default=False,
        help_text=
        _("If the script doesn't return HTML it will be enclosed in &lt; pre &gt; tags."
          ))

    def title(self):
        """ Returns the title for this checker category. """
        return self.name

    @staticmethod
    def description():
        """ Returns a description for this Checker. """
        return u"Diese Prüfung wird bestanden, wenn das externe Programm keinen Fehlercode liefert."

    def output_ok(self, output):
        return RXFAIL.search(output) is None

    def output_ok_positiv(self, output):
        if RXPASS.search(output):
            return True
        else:
            return False

    def checkSubmission(self, submission):
        if RXSECURE.search(submission) or RXSHEBANG.search(submission):
            #  if RXSECURE.search(submission) or RXCODING.search(submission) or RXSHEBANG.search(submission):
            return True
        else:
            return False

    def removeSystemPath(self, output, env):
        output = re.sub(env.tmpdir(), "", output, flags=re.MULTILINE)
        output = re.sub("/usr/lib/python2.7/", "", output, flags=re.MULTILINE)
        return output

    def run(self, env):
        """ Runs tests in a special environment. Here's the actual work.
        This runs the check in the environment ENV, returning a CheckerResult. """

        # Setup
        # copy files and unzip zip file if submission consists of just a zip file.
        self.prepare_run(env)

        test_dir = env.tmpdir()
        replace = [(u'PROGRAM', env.program())] if env.program() else []
        copy_file(self.doctest.path,
                  os.path.join(test_dir, os.path.basename(self.doctest.path)))
        script_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                  'scripts')
        # copy python interpreter with all its shared libraries into sandbox
        # since it is not available because of chroot in restrict

        # todo: make properly
        copy_file('/usr/bin/python3', test_dir + '/python3')
        self.copy_shared_objects(env)

        # todo: make properly
        # python3 instead of 3.8 and prepare outside checker
        createpathonlib = "(cd / && tar -cf - usr/lib/python3.8) | (cd " + test_dir + " && tar -xf -)"
        os.system(createpathonlib)

        # Run the tests -- execute dumped shell script 'script.sh'
        cmd = ["./python3", os.path.basename(self.doctest.name), "-v"]
        environ = dict()
        environ['USER'] = env.user().get_full_name()
        environ['HOME'] = test_dir

        for (name, content) in env.sources():
            if self.checkSubmission(content):
                result = CheckerResult(checker=self)
                result.set_passed(False)
                result.set_log(
                    "Bitte überarbeiten Sie Ihren Programmcode: "
                    "Betriebssystem kritische Befehle sind verboten")
                return result

        # (output, error, exitcode) = execute(args, working_directory=test_dir, environment_variables=environ)

        [output, error, exitcode, timed_out, oom_ed] = execute_arglist(
            cmd,
            env.tmpdir(),
            environment_variables=environ,
            # use_default_user_configuration=True,
            timeout=settings.TEST_TIMEOUT,
            fileseeklimit=settings.TEST_MAXFILESIZE,
            # extradirs=[script_dir]
        )

        # cleanup sandbox
        # todo: make properly shutil...
        try:
            os.system('rm -rf ' + test_dir + '/lib')
            os.system('rm -rf ' + test_dir + '/lib64')
            os.system('rm -rf ' + test_dir + '/usr/lib')
            os.system('rm -rf ' + test_dir + '/__pycache__')
            os.system('rm ' + test_dir + '/python3')
        except:
            logger.error('error while cleaning up python sandbox')

        logger.debug(output)
        logger.debug(error)
        result = self.create_result(env)
        (output, truncated) = truncated_log(output)

        if self.remove:
            output = re.sub(self.remove, "", output)

        # Remove Praktomat-Path-Prefixes from result:
        output = re.sub(r"^" + re.escape(env.tmpdir()) + "/+",
                        "",
                        output,
                        flags=re.MULTILINE)
        if ProFormAChecker.retrieve_subtest_results:
            # plain text output
            result.set_log(output,
                           timed_out=timed_out,
                           truncated=truncated,
                           log_format=CheckerResult.TEXT_LOG)
        else:
            if not self.returns_html:
                output = '<pre>' + output + '</pre>'
            output = '<pre>' + '\n\n======== Test Results ======\n\n</pre><br/><pre>' + \
                 escape(output) + '</pre>'
            result.set_log(output, timed_out=timed_out, truncated=truncated)
        result.set_passed(not exitcode and not timed_out
                          and self.output_ok_positiv(output) and not truncated)

        return result
Esempio n. 7
0
class CheckerWithFile(Checker):
    class Meta:
        abstract = True

    file = CheckerFileField(
        help_text=_("The file that is copied into the sandbox"))
    filename = models.CharField(
        max_length=500,
        blank=True,
        help_text=
        _("What the file will be named in the sandbox. If empty, we try to guess the right filename!"
          ))
    path = models.CharField(
        max_length=500,
        blank=True,
        help_text=_("Subfolder in the sandbox which shall contain the file."))
    unpack_zipfile = models.BooleanField(
        default=False,
        help_text=
        _("Unpack the zip file into the given subfolder. (It will be an error if the file is not a zip file; the filename is ignored.)"
          ))
    is_sourcecode = models.BooleanField(
        default=False,
        help_text=
        _("The file is (or, if it is a zipfile to be unpacked: contains) source code"
          ))
    include_in_solution_download = models.BooleanField(
        default=True,
        help_text=
        _("The file is (or, if it is a zipfile to be unpacked: its content) is included in \"full\" solution download .zip files"
          ))

    _add_to_environment = True

    def path_relative_to_sandbox(self):
        filename = self.filename if self.filename else self.file.path
        return os.path.join(string.lstrip(self.path, "/ "),
                            os.path.basename(filename))

    def add_to_environment(self, env, path):
        if (self._add_to_environment):
            env.add_source(path, file(os.path.join(env.tmpdir(), path)).read())

    def run_file(self, env):
        result = self.create_result(env)
        clashes = []
        cleanpath = string.lstrip(self.path, "/ ")
        if (self.unpack_zipfile):
            path = os.path.join(env.tmpdir(), cleanpath)
            unpack_zipfile_to(
                self.file.path, path,
                lambda n: clashes.append(os.path.join(cleanpath, n)),
                lambda f: self.add_to_environment(env,
                                                  os.path.join(cleanpath, f)))
        else:
            filename = self.filename if self.filename else self.file.path
            source_path = os.path.join(cleanpath, os.path.basename(filename))
            path = os.path.join(env.tmpdir(), source_path)
            overridden = os.path.exists(path)
            copy_file(self.file.path, path, binary=True)
            if overridden:
                clashes.append(
                    os.path.join(self.path, os.path.basename(filename)))
            self.add_to_environment(env, source_path)

        result.set_passed(not clashes)
        if clashes:
            result.set_log(
                "These files already existed. Do NOT include them in your submissions:<br/><ul>\n"
                +
                "\n".join(map(lambda f: "<li>%s</li>" % escape(f), clashes)) +
                "</ul>")
        return result
Esempio n. 8
0
class CheckerWithFile(Checker):
    class Meta:
        abstract = True

    file = CheckerFileField(
        help_text=_("The file that is copied into the sandbox"))
    filename = models.CharField(
        max_length=500,
        blank=True,
        help_text=
        _("What the file will be named in the sandbox. If empty, we try to guess the right filename!"
          ))
    path = models.CharField(
        max_length=500,
        blank=True,
        help_text=_("Subfolder in the sandbox which shall contain the file."))
    unpack_zipfile = models.BooleanField(
        default=False,
        help_text=
        _("Unpack the zip file into the given subfolder. (It will be an error if the file is not a zip file; the filename is ignored.)"
          ))
    is_sourcecode = models.BooleanField(
        default=False,
        help_text=
        _("The file is (or, if it is a zipfile to be unpacked: contains) source code"
          ))
    include_in_solution_download = models.BooleanField(
        default=True,
        help_text=
        _("The file is (or, if it is a zipfile to be unpacked: its content) is included in \"full\" solution download .zip files"
          ))

    _add_to_environment = True

    def path_relative_to_sandbox(self):
        filename = self.filename if self.filename else self.file.path
        return os.path.join(self.path.lstrip("/ "), os.path.basename(filename))

    def add_to_environment(self, env, path):
        if (self._add_to_environment):
            with open(os.path.join(env.tmpdir(), path), 'rb') as fd:
                env.add_source(path, fd.read())

    def run_file(self, env):
        """ Tries to unpack all necessary files.
        Fails if there is a clash with files submitted by the student.
        In that case, this function creates and returns the (failed) CheckerResult.
        Otherwise (if the unpacking succeeds), this function returns None.
        """
        clashes = []
        cleanpath = self.path.lstrip("/ ")
        if (self.unpack_zipfile):
            path = os.path.join(env.tmpdir(), cleanpath)
            unpack_zipfile_to(
                self.file.path, path,
                lambda n: clashes.append(os.path.join(cleanpath, n)),
                lambda f: self.add_to_environment(env,
                                                  os.path.join(cleanpath, f)))
        else:
            filename = self.filename if self.filename else self.file.path
            source_path = os.path.join(cleanpath, os.path.basename(filename))
            path = os.path.join(env.tmpdir(), source_path)
            overridden = os.path.exists(path)
            copy_file(self.file.path, path)
            if overridden:
                clashes.append(
                    os.path.join(self.path, os.path.basename(filename)))
            self.add_to_environment(env, source_path)

        if clashes:
            result = self.create_result(env)
            result.set_passed(False)
            result.set_log(
                "These files already existed. Do NOT include them in your submissions:<br/><ul>\n"
                + "\n".join(["<li>%s</li>" % escape(f)
                             for f in clashes]) + "</ul>")
            return result

        return None
class CheckStyleChecker(ProFormAChecker):

    name = models.CharField(
        max_length=100,
        default="CheckStyle",
        help_text=_("Name to be displayed on the solution detail page."))
    configuration = CheckerFileField(help_text=_(
        "XML configuration of CheckStyle. See http://checkstyle.sourceforge.net/"
    ))

    allowedWarnings = models.IntegerField(
        default=0,
        help_text=_("How many warnings are allowed before the checker "
                    "is not passed"))
    allowedErrors = models.IntegerField(
        default=0,
        help_text=_("How many errors are allowed before the checker "
                    "is not passed"))
    regText = models.CharField(
        default=".*",
        max_length=5000,
        help_text=_("Regular expression describing files to be analysed."))

    CHECKSTYLE_CHOICES = (
        (u'check-6.2', u'Checkstyle 6.2 all'),
        (u'check-7.6', u'Checkstyle 7.6 all'),
        (u'check-8.23', u'Checkstyle 8.23 all'),
        (u'check-8.29', u'Checkstyle 8.29 all'),
        (u'check-10.1', u'Checkstyle 10.1 all'),
    )
    check_version = models.CharField(max_length=16,
                                     choices=CHECKSTYLE_CHOICES,
                                     default="check-8.29")

    def title(self):
        """ Returns the title for this checker category. """
        return self.name

    @staticmethod
    def description():
        """ Returns a description for this Checker. """
        return "Runs checkstyle (http://checkstyle.sourceforge.net/)."

    def run(self, env):
        self.copy_files(env)

        # Save save check configuration
        config_path = os.path.join(env.tmpdir(), "checks.xml")
        copy_file(self.configuration.path, config_path)

        # Run the tests
        # tests are run unsafe because checkstyle fails when network is missing
        args = [settings.JVM, "-cp", settings.CHECKSTYLE_VER[self.check_version], "-Dbasedir=.",
                "com.puppycrawl.tools.checkstyle.Main", "-c", "checks.xml"] + \
               [name for (name, content) in env.sources()] # + [" > ", env.tmpdir() + "/output.txt"]
        [output, error, exitcode, timed_out,
         oom_ed] = execute_arglist(args, env.tmpdir(), unsafe=True)

        # Remove Praktomat-Path-Prefixes from result:
        output = re.sub(r"" + re.escape(env.tmpdir() + "/") + "+",
                        "",
                        output,
                        flags=re.MULTILINE)
        warnings = str.count(output, '[WARN]')
        errors = str.count(output, '[ERROR]')

        result = self.create_result(env)
        (output, truncated) = truncated_log(output)

        # logger.debug('Exitcode is ' + str(exitcode))
        if ProFormAChecker.retrieve_subtest_results:
            # simply use plaintext
            result.set_log(output,
                           timed_out=timed_out,
                           truncated=False,
                           oom_ed=oom_ed,
                           log_format=CheckerResult.TEXT_LOG)
            regexp = '\[(?<msgtype>[A-Z]+)\]\s(?<filename>\/?(.+\/)*(.+)\.([^\s:]+)):(?<line>[0-9]+)(:(?<column>[0-9]+))?:\s(?<text>.+\.)\s\[(?<short>\w+)\]'
            result.set_regexp(regexp)
        else:
            # old handling (e.g. for LON-CAPA)
            log = '<pre>' + '\n\n======== Test Results ======\n\n</pre><br/><pre>' + \
                 escape(output) + '</pre>'
            # log = '<pre>' + escape(output) + '</pre>'
            if timed_out:
                log = log + '<div class="error">Timeout occured!</div>'
            if oom_ed:
                log = log + '<div class="error">Out of memory!</div>'
            result.set_log(log)

        result.set_passed(not timed_out and not oom_ed and not exitcode
                          and warnings <= self.allowedWarnings
                          and errors <= self.allowedErrors and not truncated)
        # result.set_passed(not timed_out and not oom_ed and not exitcode and (not re.match('Starting audit...\nAudit done.', output) == None))

        return result
Esempio n. 10
0
class SetlXChecker(ProFormAChecker):

    name = models.CharField(max_length=100,
                            default="SetlXChecker",
                            help_text=_("Name to be displayed "
                                        "on the solution detail page."))
    testFile = CheckerFileField(
        help_text=_("Test File which is appended to the submission"))

    def title(self):
        """ Returns the title for this checker category. """
        return self.name

    @staticmethod
    def description():
        """ Returns a description for this Checker. """
        return u"Check http://randoom.org/Software/SetlX"

    def secureSubmission(self, submission):
        if RXSECURE.search(submission):
            return False
        else:
            return True

    def conCat(self, testdir, studentSubmission, testFile):
        #if studentSubmission.__class__.__name__ != 'unicode':
        #    raise Exception('unsupported class ' + studentSubmission.__class__.__name__)

        import codecs
        with codecs.open(os.path.join(testdir, "concat.stlx"),
                         encoding='utf-8',
                         mode='w+') as concat:
            logger.debug('studentSubmission class name is ' +
                         studentSubmission.__class__.__name__)
            f = codecs.open(self.testFile.path, encoding='utf-8')
            testfile_content = f.read()
            sequence = [studentSubmission, testfile_content]
            output = ''.join(sequence)
            concat.write(output)
            return concat

    def run(self, env):

        # Setup
        self.copy_files(env)
        test_dir = env.tmpdir()
        replace = [(u'PROGRAM', env.program())] if env.program() else []
        copy_file(self.testFile.path,
                  os.path.join(test_dir, os.path.basename(self.testFile.path)))
        script_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                  'scripts')

        # check: only one submission file allowed
        result = self.create_result(env)
        if len(env.sources()) > 1:
            result.set_log("Sie dürfen nur eine Datei angegeben!")
            result.set_passed(False)
            return result

        # check submission
        for (name, content) in env.sources():
            if not (self.secureSubmission(content)):
                result.set_passed(False)
                result.set_log("Bitte keine IO-Befehle verwenden")
                return result
            else:
                #concat test
                #try:
                self.conCat(test_dir, content, self.testFile)
                #except UnicodeEncodeError:
                #    result.set_passed(False)
                #    result.set_log("Special characters can pose a problem. Vermeiden Sie Umlaute im Source Code "
                #                   "und verwenden Sie kein <, > oder & in XML Dokumenten.")
                #    return result

        # complete test
        cmd = [
            settings.JVM, '-cp', settings.SETLXJAR,
            "org.randoom.setlx.pc.ui.SetlX", "concat.stlx"
        ]
        # (output, error, exitcode) = execute(args, env.tmpdir())

        environ = {}
        environ['UPLOAD_ROOT'] = settings.UPLOAD_ROOT

        [output, error, exitcode, timed_out,
         oom_ed] = execute_arglist(cmd,
                                   env.tmpdir(),
                                   environment_variables=environ,
                                   timeout=settings.TEST_TIMEOUT,
                                   fileseeklimit=settings.TEST_MAXFILESIZE,
                                   extradirs=[script_dir],
                                   unsafe=True)

        # [output, error, exitcode, timed_out] = execute_arglist(cmd, env.tmpdir(),
        #  use_default_user_configuration=True,
        #  timeout=settings.TEST_TIMEOUT,
        #  fileseeklimit=settings.TEST_MAXFILESIZE,
        #  extradirs=[script_dir])                                                            extradirs=[script_dir])

        (output, truncated) = truncated_log(output)

        # Remove Praktomat-Path-Prefixes from result:
        # output = re.sub(r"^"+re.escape(env.tmpdir())+"/+", "", output, flags=re.MULTILINE)
        output = re.sub(r"" + re.escape(env.tmpdir() + "/") + "+",
                        "",
                        output,
                        flags=re.MULTILINE)

        passed = True
        if len(output.strip()) == 0:
            output = "no output"
            passed = False

        if ProFormAChecker.retrieve_subtest_results:
            # plain text output
            if passed and (RXFAIL.search(output) is not None or exitcode):
                # add regular expression in case of an error
                regexp = 'line\ (?<line>[0-9]+)(:(?<column>[0-9]+))?\s(?<text>.+)'
                result.set_regexp(regexp)
            result.set_log(output,
                           timed_out=timed_out,
                           truncated=truncated,
                           log_format=CheckerResult.TEXT_LOG)
        else:
            output = '<pre>' + '\n\n======== Test Results ======\n\n</pre><br/><pre>' + \
                 escape(output) + '</pre>'
            result.set_log(output, timed_out=timed_out, truncated=truncated)
        result.set_passed(passed and not exitcode and not timed_out
                          and (RXFAIL.search(output) is None)
                          and not truncated)

        return result