def finalize_evaluation_environment(self, environ): super(SuspendJudgeContestControllerMixin, self).finalize_evaluation_environment(environ) try: add_before_recipe_entry( environ, 'compile', ( 'check_problem_instance_state', 'oioioi.suspendjudge.handlers.check_problem_instance_state', dict(suspend_init_tests=True), ), ) except IndexError: pass try: add_before_placeholder( environ, 'before_final_tests', ( 'check_problem_instance_state', 'oioioi.suspendjudge.handlers.check_problem_instance_state', ), ) except IndexError: pass
def fill_evaluation_environ(self, environ, submission, **kwargs): self.generate_base_environ(environ, submission, **kwargs) if 'USER_OUTS' in environ['submission_kind']: environ['report_kinds'] = ['USER_OUTS'] environ['save_outputs'] = True recipe_body = self.generate_recipe(environ['report_kinds']) extend_after_placeholder(environ, 'after_compile', recipe_body) environ.setdefault('group_scorer', 'oioioi.programs.utils.min_group_scorer') environ.setdefault('score_aggregator', 'oioioi.programs.utils.sum_score_aggregator') checker = OutputChecker.objects.get(problem=self.problem).exe_file if checker: environ['checker'] = django_to_filetracker_path(checker) if 'INITIAL' in environ['report_kinds']: add_before_placeholder( environ, 'after_initial_tests', ('update_report_statuses', 'oioioi.contests.handlers.update_report_statuses')) add_before_placeholder( environ, 'after_initial_tests', ('update_submission_score', 'oioioi.contests.handlers.update_submission_score'))
def fill_evaluation_environ_post_problem(self, environ, submission): """Run after ProblemController.fill_evaluation_environ.""" if 'INITIAL' in environ['report_kinds']: add_before_placeholder(environ, 'after_initial_tests', ('update_report_statuses', 'oioioi.contests.handlers.update_report_statuses')) add_before_placeholder(environ, 'after_initial_tests', ('update_submission_score', 'oioioi.contests.handlers.update_submission_score'))