class DiffChecker(Checker): shell_script = CheckerFileField(help_text=_( "The shell script whose output for the given input file is compared to the given output file." )) input_file = CheckerFileField( blank=True, help_text=_("The file containing the input for the program.")) output_file = CheckerFileField( blank=True, help_text=_("The file containing the output for the program.")) def title(self): """ Returns the title for this checker category. """ return u"Ausgaben mit 'diff' prüfen." @staticmethod def description(): """ Returns a description for this Checker. """ return u"Diese Prüfung wird bestanden, wenn erwartete und tatsächliche Ausgabe übereinstimmen." def run(self, env): """ Runs tests in a special environment. Here's the actual work. This runs the check in the environment ENV, returning a CheckerResult. """ # Setup test_dir = env.tmpdir() if self.input_file: copy_file(self.input_file.path, test_dir) if self.output_file: copy_file(self.output_file.path, test_dir) replace = [(u'PROGRAM', env.program())] if env.program() else [] copy_file_to_directory(self.shell_script.path, test_dir, replace=replace) args = ["sh", os.path.basename(self.shell_script.name)] environ = {} environ['USER'] = env.user().get_full_name() environ['HOME'] = test_dir (output, error, exitcode) = execute(args, working_directory=test_dir, environment_variables=environ) result = CheckerResult(checker=self) result.set_log(output) result.set_passed(not error) return result
class DejaGnuSetup(Checker, DejaGnu): test_defs = CheckerFileField(help_text=_( u"Das Setup benutzt den <A HREF=\"http://www.gnu.org/software/dejagnu/dejagnu.html\">DejaGnu-Testrahmen</A>, um die Programme zu testen. Die in dieser Datei enthaltenen Definitionen gelten für alle Testfälle dieser Aufgabe. Sie werden beim Testen in die DejaGnu-Datei <TT>default.exp</TT> geschrieben. (Vergl. hierzuden Abschnitt <EM>Target dependent procedures</EM> im <A HREF=\"http://www.gnu.org/manual/dejagnu/\" TARGET=\"_blank\">DejaGnu-Handbuch</A>.) Die Variablen PROGRAM und JAVA werden mit dem Programmnamen bzw. dem Pfad zur Java-Runtime ersetzt." )) def title(self): return "Tests einrichten" @staticmethod def description(): return u"Dies ist keine wirkliche Prüfung. Sie dient nur dazu, den nachfolgenden Tests Definitionen zur Verfügung zu stellen. Diese 'Prüfung' wird immer bestanden." def requires(self): return [Builder] # Set up tests. def run(self, env): self.setup_dirs(env) create_file(os.path.join(self.lib_dir(env), env.program() + ".exp"), u"") defs = string.replace(encoding.get_unicode(self.test_defs.read()), "PROGRAM", env.program()) # defs = string.replace(defs, "JAVA", join(join(dirname(dirname(__file__)),"scripts"),"java")) defs = string.replace(defs, "JAVA", settings.JVM_SECURE) create_file(os.path.join(self.config_dir(env), "default.exp"), defs) return self.create_result(env)
class JUnit3Checker(Checker): """ Checker for JUnit3 Unittests. Deprecated. Please use JUnitChecker """ # Add fields to configure checker instances. You can use any of the Django fields. (See online documentation) # The fields created, task, public, required and always will be inherited from the abstract base class Checker name = models.CharField(max_length=100, help_text=_("The name of the Test")) test_case = CheckerFileField(help_text=_(u"Die JUnit3-Testfälle als Java .class File")) def title(self): return u"JUnit3 Checker" @staticmethod def description(): return u"Deprecated!!! Please use the JUnitChecker" def requires(self): return [ Builder ] def run(self, env): """ Do whatever this checker is suposed to do. """ copy_file(self.test_case.path, env.tmpdir(), to_is_directory=True, binary=True) junit_class = os.path.basename(self.test_case.path).rsplit('.',1).pop(0) cmd = settings.JUNIT38 + " -text " + junit_class [output, error, exitcode] = execute(cmd, env.tmpdir()) result = CheckerResult(checker=self) result.set_log('<pre>' + escape(output) + '</pre>') result.set_passed(not exitcode) return result
class ScriptChecker(Checker): name = models.CharField( max_length=100, default="Externen Tutor ausführen", help_text=_("Name to be displayed on the solution detail page.")) shell_script = CheckerFileField(help_text=_( "The shell script whose output for the given input file is compared to the given output file." )) remove = models.CharField( max_length=5000, blank=True, help_text= _("Regular expression describing passages to be removed from the output." )) returns_html = models.BooleanField( default=False, help_text= _("If the script doesn't return HTML it will be enclosed in < pre > tags." )) def title(self): """ Returns the title for this checker category. """ return self.name @staticmethod def description(): """ Returns a description for this Checker. """ return u"Diese Prüfung wird bestanden, wenn das externe Programm keinen Fehlercode liefert." def run(self, env): """ Runs tests in a special environment. Here's the actual work. This runs the check in the environment ENV, returning a CheckerResult. """ # Setup test_dir = env.tmpdir() replace = [(u'PROGRAM', env.program())] if env.program() else [] copy_file_to_directory(self.shell_script.path, test_dir, replace=replace) # Run the tests -- execute dumped shell script 'script.sh' args = ["sh", os.path.basename(self.shell_script.name)] environ = {} environ['USER'] = env.user().get_full_name() environ['HOME'] = test_dir (output, error, exitcode) = execute(args, working_directory=test_dir, environment_variables=environ) result = CheckerResult(checker=self) if self.remove: output = re.sub(self.remove, "", output) if not self.returns_html: output = '<pre>' + output + '</pre>' result.set_log(output) result.set_passed(not error) return result
class CheckStyleChecker(Checker): name = models.CharField( max_length=100, default="CheckStyle", help_text=_("Name to be displayed on the solution detail page.")) configuration = CheckerFileField(help_text=_( "XML configuration of CheckStyle. See http://checkstyle.sourceforge.net/" )) def title(self): """ Returns the title for this checker category. """ return self.name @staticmethod def description(): """ Returns a description for this Checker. """ return u"Runs checkstyle (http://checkstyle.sourceforge.net/)." def run(self, env): # Save save check configuration config_path = os.path.join(env.tmpdir(), "checks.xml") copy_file(self.configuration.path, config_path) # Run the tests args = [ settings.JVM, "-cp", settings.CHECKSTYLEALLJAR, "-Dbasedir=.", "com.puppycrawl.tools.checkstyle.Main", "-c", "checks.xml" ] + [name for (name, content) in env.sources()] [output, error, exitcode, timed_out, oom_ed] = execute_arglist(args, env.tmpdir()) # Remove Praktomat-Path-Prefixes from result: output = re.sub(r"^" + re.escape(env.tmpdir()) + "/+", "", output, flags=re.MULTILINE) result = self.create_result(env) log = '<pre>' + escape(output) + '</pre>' if timed_out: log = log + '<div class="error">Timeout occured!</div>' if oom_ed: log = log + '<div class="error">Out of memory!</div>' result.set_log(log) result.set_passed( not timed_out and not oom_ed and not exitcode and (not re.match('Starting audit...\nAudit done.', output) == None)) return result
class CreateFileChecker(Checker): file = CheckerFileField( help_text=_("The file that is copied into the sandbox")) filename = models.CharField( max_length=500, blank=True, help_text= _("What the file will be named in the sandbox. If empty, we try to guess the right filename!" )) path = models.CharField( max_length=500, blank=True, help_text=_("Subfolder in the sandbox which shall contain the file.")) def title(self): """ Returns the title for this checker category. """ return "Copy File" @staticmethod def description(): """ Returns a description for this Checker. """ return u"Diese Prüfung wird bestanden, falls die Zieldatei nicht schon vorhanden ist (z.B.: vom Studenten eingereicht wurde)!" def run(self, env): """ Runs tests in a special environment. Here's the actual work. This runs the check in the environment ENV, returning a CheckerResult. """ filename = self.filename if self.filename else self.file.path path = os.path.join( os.path.join(env.tmpdir(), string.lstrip(self.path, "/ ")), os.path.basename(filename)) overridden = os.path.exists(path) copy_file(self.file.path, path) result = CheckerResult(checker=self) if not overridden: result.set_log("") result.set_passed(True) else: result.set_log( "The file '%s' was overridden" % os.path.join(self.path, os.path.basename(self.file.path))) result.set_passed(False) source_path = os.path.join(string.lstrip(self.path, "/ "), os.path.basename(filename)) env.add_source(source_path, get_unicode(self.file.read())) return result def show_publicly(self, passed): return super(CreateFileChecker, self).show_publicly(passed) or (not passed)
class DejaGnuTester(Checker, DejaGnu): """ Run a test case on the program. Requires a previous `DejaGnuSetup'. """ name = models.CharField(max_length=100, help_text=_("The name of the Test")) test_case = CheckerFileField(help_text=_(u"In den folgenden DejaGnu-Testfällen werden typischerweise Funktionen aufgerufen, die beim vorherigen Schritt <EM>Tests einrichten</EM> definiert wurden. Siehe auch den Abschnitt <EM>How to write a test case</EM> im <A TARGET=\"_blank\" HREF=\"http://www.gnu.org/manual/dejagnu/\">DejaGnu-Handbuch</A>.")) def __unicode__(self): return self.name def title(self): return self.name @staticmethod def description(): return u"Diese Prüfung ist bestanden, wenn alle Testfälle zum erwarteten Ergebnis führten." def requires(self): return [ DejaGnuSetup ] # Return 1 if the output is ok def output_ok(self, output): return (RXFAIL.search(output) == None and string.find(output, "runtest completed") >= 0 and string.find(output, "non-expected failures") < 0 and string.find(output, "unexpected failures") < 0) def htmlize_output(self, log): # Always kill the author's name from the log log = re.sub(RXRUN_BY, "Run By " + settings.SITE_NAME + " on ", log) # Clean the output log = re.sub(RXREMOVE, "", log) log = re.sub(re.escape(settings.JVM_SECURE),os.path.basename(settings.JVM_SECURE),log) # HTMLize it all log = escape(log) # Every line that contains a passed message is to be enhanced. log = re.sub(RXPASS, r'\1 <B class="passed"> \2 </B> \3', log) # Every line that contains a failure message is to be enhanced. return "<TT><PRE>" + re.sub(RXFAIL, r'\1 <B class="error"> \2 </B> \3', log) + "</PRE></TT>" # Run tests. Return a CheckerResult. def run(self, env): # Save public test cases in `tests.exp' tests_exp = os.path.join(self.tests_dir(env), "tests.exp") test_cases = string.replace(encoding.get_unicode(self.test_case.read()), u"PROGRAM", env.program()) create_file(tests_exp,test_cases) testsuite = self.testsuite_dir(env) program_name = env.program() if " " in program_name: result = self.result() result.set_log("<pre><b class=\"fail\">Error</b>: Path to the main() - source file contains spaces.\n\nFor Java .zip submittions, the directory hierarchy of the .zip file must excactly match the package structure.\nThe default package must correspond to the .zip root directory.</pre>") result.set_passed(False) return result cmd = [settings.DEJAGNU_RUNTEST, "--tool", program_name, "tests.exp"] environ = {} environ['JAVA'] = settings.JVM environ['POLICY'] = join(join(dirname(dirname(__file__)),"scripts"),"praktomat.policy") environ['USER'] = env.user().get_full_name().encode(sys.getdefaultencoding(), 'ignore') environ['HOME'] = testsuite environ['UPLOAD_ROOT'] = settings.UPLOAD_ROOT environ['USE_KILL_LOG'] = str(settings.USE_KILL_LOG) environ['ULIMIT_FILESIZE'] = '128' # Have the checker script set a filesize-ulimit of 128kb # Specifically, this limits the DejaGNU .log file size, # and thus deals with Programs that output lots of junk [output, error, exitcode] = execute_arglist(cmd, testsuite, environment_variables=environ) output = encoding.get_unicode(output) try: summary = encoding.get_unicode(open(os.path.join(testsuite, program_name + ".sum")).read()) log = encoding.get_unicode(open(os.path.join(testsuite, program_name + ".log")).read()) except: summary = "" log = "" complete_output = output + log if error: complete_output += "ERROR: Ein TimeOut ist aufgetreten!" # Or propably somsing else happend??????? result = self.result() result.set_log(self.htmlize_output(complete_output)) result.set_passed((not error) & self.output_ok(complete_output)) return result
class ScriptChecker(Checker): name = models.CharField( max_length=100, default="Externen Tutor ausführen", help_text=_("Name to be displayed on the solution detail page.")) shell_script = CheckerFileField(help_text=_( "A script (e.g. a shell script) to run. Its output will be displayed to the user (if public), the checker will succeed if it returns an exit code of 0. The environment will contain the variables JAVA and PROGRAM." )) remove = models.CharField( max_length=5000, blank=True, help_text= _("Regular expression describing passages to be removed from the output." )) returns_html = models.BooleanField( default=False, help_text= _("If the script doesn't return HTML it will be enclosed in < pre > tags." )) def title(self): """ Returns the title for this checker category. """ return self.name @staticmethod def description(): """ Returns a description for this Checker. """ return u"Diese Prüfung wird bestanden, wenn das externe Programm keinen Fehlercode liefert." def run(self, env): """ Runs tests in a special environment. Here's the actual work. This runs the check in the environment ENV, returning a CheckerResult. """ # Setup copy_file(self.shell_script.path, env.tmpdir(), to_is_directory=True) os.chmod(env.tmpdir() + '/' + os.path.basename(self.shell_script.name), 0750) # Run the tests -- execute dumped shell script 'script.sh' filenames = [name for (name, content) in env.sources()] args = [env.tmpdir() + '/' + os.path.basename(self.shell_script.name) ] + filenames environ = {} environ['USER'] = str(env.user().id) environ['HOME'] = env.tmpdir() environ['JAVA'] = settings.JVM environ['JAVA_SECURE'] = settings.JVM_SECURE environ['POLICY'] = settings.JVM_POLICY environ['PROGRAM'] = env.program() or '' script_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'scripts') [output, error, exitcode, timed_out, oom_ed] = execute_arglist( args, working_directory=env.tmpdir(), environment_variables=environ, timeout=settings.TEST_TIMEOUT, maxmem=settings.TEST_MAXMEM, fileseeklimit=settings.TEST_MAXFILESIZE, extradirs=[script_dir], ) output = force_unicode(output, errors='replace') result = self.create_result(env) (output, truncated) = truncated_log(output) if self.remove: output = re.sub(self.remove, "", output) if not self.returns_html or truncated or timed_out or oom_ed: output = '<pre>' + escape(output) + '</pre>' result.set_log(output, timed_out=timed_out, truncated=truncated) result.set_passed(not exitcode and not timed_out and not oom_ed and not truncated) return result
class CheckerWithFile(Checker): class Meta: abstract = True file = CheckerFileField( help_text=_("The file that is copied into the sandbox")) filename = models.CharField( max_length=500, blank=True, help_text= _("What the file will be named in the sandbox. If empty, we try to guess the right filename!" )) path = models.CharField( max_length=500, blank=True, help_text=_("Subfolder in the sandbox which shall contain the file.")) unpack_zipfile = models.BooleanField( default=False, help_text= _("Unpack the zip file into the given subfolder. (It will be an error if the file is not a zip file; the filename is ignored.)" )) _add_to_environment = True def path_relative_to_sandbox(self): filename = self.filename if self.filename else self.file.path return os.path.join(string.lstrip(self.path, "/ "), os.path.basename(filename)) def add_to_environment(self, env, path): if (self._add_to_environment): env.add_source(path, file(os.path.join(env.tmpdir(), path)).read()) def run_file(self, env): result = self.create_result(env) clashes = [] cleanpath = string.lstrip(self.path, "/ ") if (self.unpack_zipfile): path = os.path.join(env.tmpdir(), cleanpath) unpack_zipfile_to( self.file.path, path, lambda n: clashes.append(os.path.join(cleanpath, n)), lambda f: self.add_to_environment(env, os.path.join(cleanpath, f))) else: filename = self.filename if self.filename else self.file.path source_path = os.path.join(cleanpath, os.path.basename(filename)) path = os.path.join(env.tmpdir(), source_path) overridden = os.path.exists(path) copy_file(self.file.path, path, binary=True) if overridden: clashes.append( os.path.join(self.path, os.path.basename(filename))) self.add_to_environment(env, source_path) result.set_passed(not clashes) if clashes: result.set_log( "These files already existed. Do NOT include them in your submissions:<br/><ul>\n" + "\n".join(map(lambda f: "<li>%s</li>" % escape(f), clashes)) + "</ul>") return result