def make_report(env, kind='NORMAL', **kwargs): """Builds entities for tests results in a database. Used ``environ`` keys: * ``tests`` * ``test_results`` * ``group_results`` * ``status`` * ``score`` * ``compilation_result`` * ``compilation_message`` Produced ``environ`` keys: * ``report_id``: id of the produced :class:`~oioioi.contests.models.SubmissionReport` """ submission, submission_report = _make_base_report(env, kind) if env['compilation_result'] != 'OK': return env tests = env['tests'] test_results = env.get('test_results', {}) for test_name, result in test_results.iteritems(): test = tests[test_name] if 'report_id' in result: continue test_report = TestReport(submission_report=submission_report) test_report.test_id = test.get('id') test_report.test_name = test_name test_report.test_group = test['group'] test_report.test_time_limit = test.get('exec_time_limit') test_report.test_max_score = test['max_score'] test_report.score = result['score'] test_report.status = result['status'] test_report.time_used = result['time_used'] comment = result.get('result_string', '') if comment.lower() == 'ok': # Annoying comment = '' test_report.comment = \ slice_str(comment, TestReport._meta.get_field('comment').max_length) test_report.save() result['report_id'] = test_report.id group_results = env.get('group_results', {}) for group_name, group_result in group_results.iteritems(): if 'report_id' in group_result: continue group_report = GroupReport(submission_report=submission_report) group_report.group = group_name group_report.score = group_result['score'] group_report.status = group_result['status'] group_report.save() group_result['result_id'] = group_report.id return env
def make_zeus_testrun_report(env, **kwargs): """Builds entities for Zeus-testrun reports in a database. Used ``environ`` keys: * ``tests`` * ``test_results`` * ``status`` * ``score`` * ``compilation_result`` * ``compilation_message`` * ``submission_id`` Produced ``environ`` keys: * ``report_id``: id of the produced :class:`~oioioi.contests.models.SubmissionReport` """ _submission, submission_report = _make_base_report(env, 'TESTRUN') if env['compilation_result'] != 'OK': return env test_name = env['tests'].keys()[0] test = env['tests'][test_name] test_result = env['test_results'][test_name] zeus_result = test_result['zeus_test_result'] comment = test_result.get('result_string', '') if comment.lower() == 'ok': # Annoying comment = '' testrun_report = ZeusTestRunReport(submission_report=submission_report) testrun_report.status = env['status'] testrun_report.comment = \ slice_str(comment, ZeusTestRunReport._meta .get_field('comment').max_length) testrun_report.time_used = test_result['time_used'] testrun_report.test_time_limit = test.get('exec_time_limit') testrun_report.full_out_size = zeus_result['stdout_size'] # The server to download from: submission.problem_instance.problem testrun_report.full_out_handle = zeus_result['stdout_uid'] # Output truncated to first 10kB testrun_report.output_file.save('out', ContentFile(zeus_result['stdout'])) testrun_report.save() return env
def make_report(env, **kwargs): """Builds entities for testrun reports in a database. Used ``environ`` keys: * ``tests`` * ``test_results`` * ``status`` * ``score`` * ``compilation_result`` * ``compilation_message`` Produced ``environ`` keys: * ``report_id``: id of the produced :class:`~oioioi.contests.models.SubmissionReport` """ _submission, submission_report = _make_base_report(env, 'TESTRUN') if env['compilation_result'] != 'OK': return env test = env['tests']['test'] test_result = env['test_results']['test'] comment = test_result.get('result_string', '') if comment.lower() == 'ok': # Annoying comment = '' testrun_report = TestRunReport(submission_report=submission_report) testrun_report.status = env['status'] testrun_report.comment = \ slice_str(comment, TestRunReport._meta .get_field('comment').max_length) testrun_report.time_used = test_result['time_used'] testrun_report.test_time_limit = test.get('exec_time_limit') testrun_report.output_file = filetracker_to_django_file( test_result['out_file']) testrun_report.save() return env
def make_report(env, **kwargs): """Builds entities for testrun reports in a database. Used ``environ`` keys: * ``tests`` * ``test_results`` * ``status`` * ``score`` * ``compilation_result`` * ``compilation_message`` Produced ``environ`` keys: * ``report_id``: id of the produced :class:`~oioioi.contests.models.SubmissionReport` """ submission, submission_report = _make_base_report(env, 'TESTRUN') if env['compilation_result'] != 'OK': return env test = env['tests']['test'] test_result = env['test_results']['test'] comment = test_result.get('result_string', '') if comment.lower() == 'ok': # Annoying comment = '' testrun_report = TestRunReport(submission_report=submission_report) testrun_report.status = env['status'] testrun_report.comment = \ slice_str(comment, TestRunReport._meta .get_field('comment').max_length) testrun_report.time_used = test_result['time_used'] testrun_report.test_time_limit = test.get('exec_time_limit') testrun_report.output_file = filetracker_to_django_file( test_result['out_file']) testrun_report.save() return env
def make_report(env, kind='NORMAL', save_scores=True, **kwargs): """Builds entities for tests results in a database. Used ``environ`` keys: * ``tests`` * ``test_results`` * ``group_results`` * ``status`` * ``score`` * ``compilation_result`` * ``compilation_message`` * ``submission_id`` Produced ``environ`` keys: * ``report_id``: id of the produced :class:`~oioioi.contests.models.SubmissionReport` """ submission, submission_report = _make_base_report(env, kind) if env['compilation_result'] != 'OK': return env tests = env['tests'] test_results = env.get('test_results', {}) for test_name, result in test_results.iteritems(): test = tests[test_name] if 'report_id' in result: continue test_report = TestReport(submission_report=submission_report) test_report.test_id = test.get('id') test_report.test_name = test_name test_report.test_group = test['group'] test_report.test_time_limit = test.get('exec_time_limit') test_report.test_max_score = test['max_score'] test_report.score = result['score'] if save_scores else None test_report.status = result['status'] test_report.time_used = result['time_used'] comment = result.get('result_string', '') if comment.lower() in ['ok', 'time limit exceeded']: # Annoying comment = '' test_report.comment = slice_str(comment, TestReport. _meta.get_field('comment').max_length) if env.get('save_outputs', False): test_report.output_file = filetracker_to_django_file( result['out_file']) test_report.save() result['report_id'] = test_report.id group_results = env.get('group_results', {}) for group_name, group_result in group_results.iteritems(): if 'report_id' in group_result: continue group_report = GroupReport(submission_report=submission_report) group_report.group = group_name group_report.score = group_result['score'] if save_scores else None group_report.max_score = \ group_result['max_score'] if save_scores else None group_report.status = group_result['status'] group_report.save() group_result['result_id'] = group_report.id if kind == 'INITIAL': if submission.user is not None and not env.get('is_rejudge', False): logger.info("Submission %(submission_id)d by user %(username)s" " for problem %(short_name)s got initial result.", {'submission_id': submission.pk, 'username': submission.user.username, 'short_name': submission.problem_instance .short_name}, extra={'notification': 'initial_results', 'user': submission.user, 'submission': submission}) return env