def test_outputting_non_utf8(): if ENABLE_SANDBOXES: with TemporaryCwd(): upload_files() renv = compile_and_run('/output-non-utf8.c', { 'in_file': '/input', 'check_output': True, 'hint_file': '/input', }, SupervisedExecutor(), use_sandboxes=True) print_env(renv) in_('42', renv['result_string']) ok_(renv['result_string'])
def test_zip(): with TemporaryCwd(): upload_files() compile_and_run("/echo.c", { 'in_file': '/input.zip', 'out_file': '/output', 'exec_mem_limit': 102400 }, DetailedUnprotectedExecutor()) ft.download({'in_file': '/input'}, 'in_file', 'out.expected') ft.download({'out_file': '/output'}, 'out_file', 'out.real') ok_(filecmp.cmp(tempcwd('out.expected'), tempcwd('out.real')))
def inner(env): eq_(env['return_code'], 0) eq_(env['stdout'], expected_output) collected = env['collected_files'] eq_(len(expected_files), len(collected)) for filename, path in six.iteritems(collected): in_(filename, expected_files) unversioned_path = '/%s/%s' % (upload_dir, filename) upload_re_str = '%s@\d+' % (unversioned_path) upload_re = re.compile(upload_re_str) ok_(upload_re.match(path), 'Unexpected filetracker path') ft.download({'in': unversioned_path}, 'in', filename) eq_(expected_files[filename], open(tempcwd(filename)).read())
def inner(env): eq_(env['return_code'], 0) eq_(env['stdout'], expected_output) collected = env['collected_files'] eq_(len(expected_files), len(collected)) for filename, path in six.iteritems(collected): in_(filename, expected_files) unversioned_path = '/%s/%s' % (upload_dir, filename) upload_re_str = r'%s@\d+' % (unversioned_path) upload_re = re.compile(upload_re_str) ok_(upload_re.match(path), 'Unexpected filetracker path') ft.download({'in': unversioned_path}, 'in', filename) eq_(expected_files[filename], open(tempcwd(filename)).read())
def test_compilation_with_additional_archive(message, compiler, source, sources, archive, unexpected_files): with TemporaryCwd(inner_directory='one_more_level'): upload_files() compile_and_run({ 'source_file': source, 'additional_sources': sources, 'additional_archive': archive, 'compiler': compiler, 'out_file': '/out', }, message) for f in unexpected_files: ok_(not os.path.exists(f))
def test_compilation_error_gcc_large_limit(message, compiler, source): time_limit = _get_limits()['time_limit'] time_hard_limit = _get_limits()['time_hard_limit'] with TemporaryCwd(): upload_files() result_env = compile_fail({ 'source_file': source, 'compiler': compiler, 'out_file': '/out', 'compilation_time_limit': time_limit, 'compilation_real_time_limit': time_hard_limit, 'compilation_output_limit': 100 * DEFAULT_COMPILER_OUTPUT_LIMIT }, message) ok_(len(result_env['compiler_output']) > DEFAULT_COMPILER_OUTPUT_LIMIT)
def compile_fail(compiler_env, expected_in_compiler_output=None): """Helper function for compiling and asserting that it fails.""" result_env = run(compiler_env) print_env(result_env) eq_(result_env['result_code'], 'CE') if 'compilation_output_limit' not in compiler_env: ok_(len(result_env['compiler_output']) <= DEFAULT_COMPILER_OUTPUT_LIMIT) elif compiler_env['compilation_output_limit'] is not None: ok_(len(result_env['compiler_output']) <= compiler_env['compilation_output_limit']) if expected_in_compiler_output: in_(expected_in_compiler_output, result_env['compiler_output']) return result_env
def change(env): res_ok(env) eq_('13', open(tempcwd('somefile')).read().strip()) ok_(os.path.exists(tempcwd('./not_existing')))
def lines_split(env): ok_(isinstance(env['stdout'], list)) eq_(len(env['stdout']), 3)
def inner(env): ok_(len(env['stdout']) <= limit)
def check_inwer_faulty(env): eq_(env['result_code'], "OK") ok_(not env['stdout'][0].startswith(b"OK"))
def fail(*args, **kwargs): ok_(False, "Forced fail")
def nochange(env): res_re(1)(env) eq_('42', open(tempcwd('somefile')).read().strip()) ok_(not os.path.exists(tempcwd('./not_existing')))
def inner(env): eq_('TLE', env['result_code']) ok_(env['real_time_used'] > limit)