def create_testrun(self, request, problem_instance, form_data, commit=True): if not is_zeus_problem(problem_instance.problem): return super(ZeusContestControllerMixin, self).create_testrun(request, problem_instance, form_data, commit) submission = super(ZeusContestControllerMixin, self).create_testrun( request, problem_instance, form_data, commit=False, model=ZeusTestRunProgramSubmission) # TODO: if is task with library if 'library' in form_data: library_file = form_data['library'] submission.library_file.save(library_file.name, library_file) if commit: submission.save() self.judge(submission) return submission
def render_submission(self, request, submission): if submission.kind != 'TESTRUN' or \ not is_zeus_problem(submission.problem_instance.problem): return super(ZeusContestControllerMixin, self) \ .render_submission(request, submission) if not isinstance(submission, ZeusTestRunProgramSubmission): submission = ZeusTestRunProgramSubmission.objects \ .get(id=submission.id) return render_to_string('zeus/submission_header.html', context_instance=RequestContext(request, {'submission': submission_template_context(request, submission), 'supported_extra_args': self.get_supported_extra_args(submission)}))
def render_submission(self, request, submission): if submission.kind != 'TESTRUN' or \ not is_zeus_problem(submission.problem_instance.problem): return super(ZeusContestControllerMixin, self) \ .render_submission(request, submission) if not isinstance(submission, ZeusTestRunProgramSubmission): submission = ZeusTestRunProgramSubmission.objects \ .get(id=submission.id) return render_to_string( 'zeus/submission_header.html', context_instance=RequestContext( request, { 'submission': submission_template_context(request, submission), 'supported_extra_args': self.get_supported_extra_args(submission) }))
def create_testrun(self, request, problem_instance, form_data, commit=True): if not is_zeus_problem(problem_instance.problem): return super(ZeusContestControllerMixin, self).create_testrun( request, problem_instance, form_data, commit) submission = super(ZeusContestControllerMixin, self).create_testrun( request, problem_instance, form_data, commit=False, model=ZeusTestRunProgramSubmission) # TODO: if is task with library if 'library' in form_data: library_file = form_data['library'] submission.library_file.save(library_file.name, library_file) if commit: submission.save() submission.problem_instance.controller.judge(submission) return submission
def use_spliteval(self, submission): if is_zeus_problem(submission.problem_instance.problem): return False return super(ZeusContestControllerMixin, self) \ .use_spliteval(submission)