def execute_cpp(untrusted_code, input_values, limits): result = [] with epicbox.working_directory() as workdir: value = epicbox.run( "gcc_compile", "g++ -pipe -O2 -static -o main main.cpp", files=[{ "name": "main.cpp", "content": untrusted_code }], workdir=workdir, ) if value["stderr"]: return [value] for val in input_values: result.append( epicbox.run( "gcc_run", "./main", stdin=val, limits=limits, workdir=workdir, )) return result
def check(self, reply, clue): studio_files = [{ 'name': 'main.qrs', 'content': attachment_content(reply) }] with epicbox.working_directory() as workdir: # noinspection PyAttributeOutsideInit self.log = logger.bind(workdir=workdir) if self.source.fields_archive: # TODO: extract all xmls from archive and upload using one epicbox run fields_files = [{ 'name': 'fields.zip', 'content': attachment_content(self.source.fields_archive[0]) }] self.log.info("Uploading and unpacking fields archive") command = 'mkdir -p fields/main && unzip fields.zip -d fields/main' result = epicbox.run('trik', command=command, files=fields_files, limits=UNARCHIVE_LIMITS, workdir=workdir) if result['exit_code'] != 0: raise PluginError("Failed to extract fields from the archive") self.log.info("Starting trik sandbox") result = epicbox.run('trik', files=studio_files, limits=EPICBOX_TRIK_LIMITS, workdir=workdir) return self._process_checker_result(result, workdir)
def execute_java(untrusted_code, input_values, limits): result = [] with epicbox.working_directory() as workdir: text = untrusted_code.decode("UTF-8") text = text.split() name_of_class = text[text.index("class") + 1].replace("{", "") files = [{"name": f"{name_of_class}.java", "content": untrusted_code}] value = epicbox.run( "java", f"javac {name_of_class}.java", files=files, workdir=workdir, limits=limits, ) if value["stderr"]: return [value] for val in input_values: result.append( epicbox.run( "java", f"java {name_of_class}", files=files, workdir=workdir, stdin=val, limits=limits, )) return result
def sandbox_asm64(code, stdins=[""], callback=print): files = [{'name': 'main.asm', 'content': code.encode()}] compile_limits = {'cputime': 3, 'memory': 128} run_limits = {'cputime': 1, 'memory': 64} with epicbox.working_directory() as workdir: result = epicbox.run('asm64_compile', 'nasm -f elf64 -g main.asm', files=files, limits=compile_limits, workdir=workdir) result['type'] = 'compile' callback(result) if result["exit_code"] == 0: result = epicbox.run('asm64_compile', 'gcc -m64 -no-pie main.o -o main', files=files, limits=compile_limits, workdir=workdir) result['type'] = 'compile' callback(result) if result["exit_code"] == 0: for stdin in stdins: result = epicbox.run('asm64_run', './main', files=files, limits=run_limits, workdir=workdir, stdin=stdin) result['type'] = 'run' callback(result)
def propose(question, example_input, example_output, testcases, len_limit): global q_num print("Question:", question) print("Example Input:\n", example_input, sep='', end='\n\n') print("Example Output:\n", example_output, sep='') print("Input Length Limit:", len_limit) print("Your list comprehension code:") content = input() assert len(content) < len_limit, 'Length exceed limit' assert content.find('"') == -1, 'Find invalid character "' assert content.find("'") == -1, 'Find invalid character \'' new_content = '[\n' + content + '\n]' for idx, (input_, output_) in enumerate(testcases): files = [{ 'name': 'main.py', 'content': new_content.encode('utf-8') }, { 'name': 'input', 'content': input_.encode('utf-8') }] limits = {'cputime': 3, 'memory': 512} n_result = epicbox.run('python', 'python3 main.py < input', files=files, limits=limits) files = [{ 'name': 'main.py', 'content': content.encode('utf-8') }, { 'name': 'input', 'content': input_.encode('utf-8') }] limits = {'cputime': 3, 'memory': 512} result = epicbox.run('python', 'python3 main.py < input', files=files, limits=limits) #print(n_result['exit_code'], result['exit_code'], n_result['stdout'], result['stdout']) if not (n_result['exit_code'] is not None and not n_result['exit_code']): print("Runtime Error QQ") exit(1) if not (result['exit_code'] is not None and not result['exit_code']): print("Runtime Error QQ") exit(1) if not (n_result['stdout'].decode('utf-8').strip() == result['stdout'].decode('utf-8').strip() == output_.strip()): print("Wrong Answer QQ") exit(1) print("Pass the {} test case.".format(idx), flush=True) print('Congrats! You pass question {}.'.format(q_num)) q_num += 1
def _save_source(self): if not self.workdir: raise RuntimeError( "A CodeRunner instance must be used as a context manager") source_bytes = self.source.encode() files = [{'name': self.filename, 'content': source_bytes}] epicbox.run(BaseProfiles.BASE, command='true', files=files, workdir=self.workdir) self._is_source_saved = True logger.info("Source code was saved to the sandbox working directory", workdir=self.workdir, files=[f['name'] for f in files])
def cpp(code: str, testcase_dir: str) -> SubmissionResult: files = [{'name': 'main.cpp', 'content': code}] result = SubmissionResult() PROFILES = { 'gcc_compile': { 'docker_image': 'stepik/epicbox-gcc:6.3.0', 'user': '******', }, 'gcc_run': { 'docker_image': 'stepik/epicbox-gcc:6.3.0', # It's safer to run untrusted code as a non-root user (even in a container) 'user': '******', 'read_only': True, 'network_disabled': True, } } epicbox.configure(profiles=PROFILES) test_no = 0 with epicbox.working_directory() as workdir: compilation_output = epicbox.run('gcc_compile', 'g++ -std=c++14 -O2 -o main main.cpp', files=files, workdir=workdir) if compilation_output['exit_code'] != 0: result.add_testcase_verdict(test_no, False, Verdict.COMPILATION_ERROR, 0) result.add_log(compilation_output['stderr']) return result while test_no < MAX_NUMBER_OF_TESTCASES: stdin, expected = get_input_expected_output(testcase_dir, test_no) if stdin == None or expected == None: break output = epicbox.run('gcc_run', './main', stdin=stdin, limits=limits['cpp'], workdir=workdir) process_output(test_no, output, expected, result) test_no += 1 return result
def _save_source(self): if not self.workdir: raise RuntimeError( "A CodeRunner instance must be used as a context manager") source_bytes = self.source.encode() files = [{'name': self.filename, 'content': source_bytes}] epicbox.run(BaseProfiles.BASE, command='true', files=files, workdir=self.workdir) self._is_source_saved = True logger.info("Source code was saved to the sandbox working directory", workdir=self.workdir, files=[f['name'] for f in files])
def test_executor_in_docker(): import epicbox PROFILES = { 'python': { 'docker_image': 'python:3.8-alpine', 'network_disabled': True, 'user': '******' } } epicbox.configure(profiles=PROFILES) files = [{ 'name': 'executor.py', 'content': resource_string('aleph.vms.dockerized', 'tools/executor.py') }] limits = {'cputime': 2, 'memory': 256} example_code = resource_string('aleph.vms.dockerized', 'tools/example.py') payload = { 'code': example_code.decode('utf-8'), 'action': 'call', 'function': 'transfer', 'message': { 'sender': 'NULSd6HgcNwprmEYbQ7pqLznGVU3EhW7Syv7W' }, 'state': { "owner": "NULSd6HgcNwprmEYbQ7pqLznGVU3EhW7Syv7W", "name": "Test", "symbol": "TST", "decimals": 18, "total_supply": 24000000000000000000000000, "balances": { "NULSd6HgcNwprmEYbQ7pqLznGVU3EhW7Syv7W": 24000000000000000000000000 }, "allowed": {} }, 'args': ['blah', 1000 * (10**18)] } output = epicbox.run('python', 'python3 executor.py', files=files, limits=limits, stdin=json.dumps(payload).encode('utf-8')) out_payload = json.loads(output['stdout'].decode('utf-8')) assert out_payload['result'] is True assert out_payload['state'][ 'owner'] == 'NULSd6HgcNwprmEYbQ7pqLznGVU3EhW7Syv7W' assert out_payload['state']['total_supply'] == 24000000 * (10**18) assert out_payload['state']['balances'][ 'NULSd6HgcNwprmEYbQ7pqLznGVU3EhW7Syv7W'] == (24000000 - 1000) * (10** 18) assert out_payload['state']['balances']['blah'] == 1000 * (10**18) assert len(out_payload['state']['balances']) == 2 assert not output.get('stderr')
def run(obj): container, path, stdin, expectedOut, limits, workdir, files, n = obj result = epicbox.run(container, path, stdin=stdin, limits=limits, workdir=workdir, files=files) if result["timeout"]: print("Test " + n + ": TLE") return if result["exit_code"] != 0 or result["stderr"] != b"": print("Test " + n + ": RTE") return a = result["stdout"].strip().split(b"\n") b = str.encode(expectedOut).strip().split(b"\n") if len(a) == len(b) and all( [b" ".join(i.split()) == b" ".join(j.split()) for i, j in zip(a, b)]): print("Test " + n + ": AC", result["duration"]) else: print("Test " + n + ": WA")
def _process_checker_result(self, result, workdir): exit_code = result['exit_code'] if exit_code == EXIT_CODE_OK: return True if exit_code == EXIT_CODE_TEST_FAILED: report_result = epicbox.run('trik', command='cat ./report', workdir=workdir) if report_result['exit_code']: return False, "TRIK Studio save file is incorrect or corrupted" try: report = json.loads(report_result['stdout'].decode(errors='replace')) except ValueError: msg = "Failed to parse the report file" self.log.exception(msg) return False, INTERNAL_ERROR.format(msg) # noinspection PyBroadException try: return False, report[0]['message'] except Exception: msg = "Report format is incorrect" self.log.exception(msg, report=report) return False, INTERNAL_ERROR.format(msg) # Exit code is not OK or TEST_FAILED stdout = result['stdout'].decode(errors='replace') stderr = result['stderr'].decode(errors='replace') self.log.error("Checker failed", exit_code=exit_code, stdout=stdout, stderr=stderr) return False, INTERNAL_ERROR.format( CHECKER_RESULT_MSG.format(exit_code=exit_code, stdout=stdout, stderr=stderr))
def _compile(file, compile_command, compilation_profile, workdir, other_files) -> ExecutedProcess: """ Compiles the file with the provided command, profile and working directory """ with open(file, 'rb') as f: code = f.read() file = os.path.basename(file) files = [{'name': file, 'content': code}] _Executor._validate_files_list(other_files) if other_files is not None: files.extend(other_files) result = epicbox.run(compilation_profile, compile_command, files=files, workdir=workdir) return ExecutedProcess(tag="compilation", stdout=result["stdout"].decode(), stderr=result["stderr"].decode(), exit_code=result["exit_code"], timeout=result["timeout"])
def python(code: str, testcase_dir: str) -> SubmissionResult: files = [{'name': 'main.py', 'content': code}] result = SubmissionResult() epicbox.configure( profiles=[epicbox.Profile('python', 'python:3.6.5-alpine')]) test_no = 0 while test_no < MAX_NUMBER_OF_TESTCASES: stdin, expected = get_input_expected_output(testcase_dir, test_no) if stdin == None or expected == None: break output = epicbox.run('python', 'python3 main.py', stdin=stdin, files=files, limits=limits['py']) process_output(test_no, output, expected, result) test_no += 1 return result
def _run_dataset(self, dataset): command = 'java -Xmx{}k Main' limits = dict(self.limits) return epicbox.run(self.profile, command=command, stdin=dataset, limits=limits, workdir=self.workdir)
def getProcessResponse(content: bytes): files = [{'name': 'main.py', 'content': content}] limits = {'cputime': 1, 'memory': 64} result = epicbox.run('python', 'python3 main.py', files=files, limits=limits) return ResponseWrapper(**result)
def run(code, lang): epicbox.configure(profiles=PROFILES) with epicbox.working_directory() as workdir: if lang == 'python': result = epicbox.run( 'python', 'python3 main.py', files=[{ 'name': 'main.py', 'content': code }], limits=LIMITS, workdir=workdir, ) elif lang == 'text/x-java': compile_result = epicbox.run( 'java_compile', 'javac Main.java', files=[{ 'name': 'Main.java', 'content': code }], limits=LIMITS, workdir=workdir, ) if compile_result['exit_code'] != 0: return compile_result else: result = epicbox.run( 'java_run', 'java Main', files=[{ 'name': 'Main.java', 'content': code }], limits=LIMITS, workdir=workdir, ) else: return 'FATAL: Language Unsupported' return result
def _run_dataset(self, dataset): command = 'java -Xmx{}k Main' limits = dict(self.limits) return epicbox.run(self.profile, command=command, stdin=dataset, limits=limits, workdir=self.workdir)
def test_save_source(self): source = "epic source code here" with BaseCodeRunner({}, source) as runner: runner._save_source() assert runner._is_source_saved result = epicbox.run(BaseProfiles.BASE, 'cat epiccode', workdir=runner.workdir) assert result['exit_code'] == 0 assert result['stdout'].decode() == source + '\n'
def yeetyeetyeet(): if 'run' in session and session['run']: return { 'error': True, 'msg': 'You already have code running, please wait for it to finish.' } session['run'] = True code = request.data tests = [(2, 3, 5), (5, 7, 12)] for _ in range(8): a, b = random.randint(1, 100), random.randint(1, 100) tests.append((a, b, a + b)) cmd = 'from code import f\n' outputs = [] for case in tests: a, b, ans = case cmd += f'print(f({a}, {b}))\n' outputs.append(str(ans)) files = [{ 'name': 'flag.txt', 'content': flag.encode() }, { 'name': 'code.py', 'content': code }] limits = {'cputime': 1, 'memory': 16} result = epicbox.run('python', command='python3', stdin=cmd, files=files, limits=limits) if result['exit_code'] != 0: session['run'] = False return { 'error': True, 'msg': 'Oops! Your code has an error in it. Please try again.' } actual = result['stdout'].decode().strip().split('\n') passes = 0 fails = 0 for i in range(len(outputs)): if outputs[i] == actual[i]: passes += 1 else: fails += 1 session['run'] = False return {'error': False, 'p': passes, 'f': fails}
def sandbox_node(code, stdins=[""], callback=print): files = [{'name': 'index.js', 'content': code.encode()}] limits = {'cputime': 1, 'memory': 64} for stdin in stdins: result = epicbox.run('node_run', 'node index.js', files=files, limits=limits, stdin=stdin) result['type'] = 'run' callback(result)
def sandbox_python(code, stdins=[""], callback=print): files = [{'name': 'main.py', 'content': code.encode()}] limits = {'cputime': 1, 'memory': 64} for stdin in stdins: result = epicbox.run('python_run', 'python3 main.py', files=files, limits=limits, stdin=stdin) result['type'] = 'run' callback(result)
def test_save_source(self): source = "epic source code here" with BaseCodeRunner({}, source) as runner: runner._save_source() assert runner._is_source_saved result = epicbox.run(BaseProfiles.BASE, 'cat epiccode', workdir=runner.workdir) assert result['exit_code'] == 0 assert result['stdout'].decode() == source + '\n'
def compile_code(compiler, flags, prepared_files, wd): filestr = '' for filedict in prepared_files: filestr += ' ' + filedict['name'] compile_str = compiler + ' ' + flags + ' ' + filestr + ' -o main' print("COMPILE_STR: " + compile_str) comp = epicbox.run('test_code', compile_str, files=prepared_files, workdir=wd) print("COMPILED SUCCESSFULLY") return comp
def generate_tests(exercise, wd): if 'inputs_path' not in exercise['tests']: run = epicbox.run('gcc', './main', workdir=wd) with open( os.path.join(exercise['tests']['outputs_path'], 'single_test'), 'w+') as wfile: wfile.write(run['stdout'].decode()) else: input_files = os.listdir(exercise['tests']['inputs_path']) for inpfile in input_files: with open(os.path.join(exercise['tests']['inputs_path'], inpfile), 'r+') as inp: input_value = inp.read() with open( os.path.join(exercise['tests']['outputs_path'], inpfile), 'w+') as wfile: run = epicbox.run('gcc', './main', stdin=input_value, workdir=wd) wfile.write(run['stdout'].decode())
def _run(file, run_command, run_profile, stdin, workdir, other_files) -> ExecutedProcess: """ Executes the file by compiling it if compilation_command is not none. If compilation fails, an ExecutedProcess with the tag "compilation" is returned with the stderr streams. The code is then run using the run_command returning ExecutedProcess with run tag and stdout and stderr stream outputs The run profile is the profile of the docker images outlined above """ if file is not None: with open(file, 'rb') as f: code = f.read() else: file = "not-exists" code = b"" file = os.path.basename(file) try: if ("python" in run_command or "lua" in run_command) and len(run_command.split()) < 2: run_command = run_command + " " + file files = [{ 'name': file, 'content': code }] # skip compilation and run file directly if other_files is not None: _Executor._validate_files_list(other_files) files.extend(other_files) if run_command is None: raise HandinExecutorException( "No run_command provided for execution") else: result = epicbox.run(run_profile, run_command, files=files, workdir=workdir, stdin=stdin) return ExecutedProcess(tag="run", stdout=result["stdout"].decode(), stderr=result["stderr"].decode(), exit_code=result["exit_code"], timeout=result["timeout"]) except Exception as e: if isinstance(e, HandinExecutorException): raise e raise HandinExecutorException( "An exception occurred executing the file") from e
def build_each(exercise, compiler, wd): prepared_files = prepare_files(exercise) compile_str = '' files_str = '' for f in prepared_files: files_str += ' ' + f['name'] compile_str += compiler if 'flags' in exercise: compile_str += ' ' + exercise['flags'] compile_str += files_str + ' -o main' run = epicbox.run('gcc', compile_str, files=prepared_files, workdir=wd) print("COMPILED") print(run) generate_tests(exercise, wd)
def run_device(user_id, dev_id, firmware_content, q_messages=None, q_exceptions=None): input_files = [] try: docker_ps_result = subprocess.run(DOCKER_PS_COMMAND, stdout=subprocess.PIPE).stdout.decode('utf-8') print("Result of ps: {}".format(docker_ps_result)) if docker_ps_result: for process in docker_ps_result.splitlines(): if (" " + user_id + " ") in process and (" " + dev_id + " ") in process: docker_kill(process.split()[0]) input_files.append({'name': 'main.py', 'content': firmware_content}) # with open("device/device_api_implementation.py", 'rb') as file: with open("device/__pycache__/device_api_implementation.cpython-35.pyc", 'rb') as file: api_content = file.read() # input_files.append({'name': 'api.py', 'content': api_content}) input_files.append({'name': 'api.pyc', 'content': api_content}) print("File device_api_implementation.py loaded! size = {}" .format(len(api_content))) if len(input_files) != 2: print(input_files) print("Files were not loaded!") exit(-1) result = epicbox.run('python', 'python3.5 main.py {} {} {} {}' .format(user_id, dev_id, secure_device_id(dev_id), get_local_ip()), files=input_files, limits=EPICBOX_LIMITS) print(result) if q_messages and q_exceptions: if result['exit_code'] != 0: q_messages.put((user_id, "Firmware finished with following error:\n\t{}" .format(result['stderr'].decode('ascii')))) q_exceptions.put((user_id, str(result))) else: q_messages.put((user_id, "Firmware finished work without errors!")) q_exceptions.put((user_id, str(result))) except Exception as exc: exc_buffer = io.StringIO() traceback.print_exc(file=exc_buffer) if q_messages and q_exceptions: q_messages.put((user_id, "Firmware crashed with following exception:\n\t{}" .format(exc.__class__.__name__))) q_exceptions.put((user_id, exc_buffer.getvalue())) raise exc finally: pass
def sandbox_csharp(code, stdins=[""], callback=print): files = [{'name': 'main.cs', 'content': code.encode()}] compile_limits = {'cputime': 3, 'memory': 128} run_limits = {'cputime': 1, 'memory': 64} with epicbox.working_directory() as workdir: result = epicbox.run('mono_compile', 'csc main.cs', files=files, limits=compile_limits, workdir=workdir) result['type'] = 'compile' callback(result) if result["exit_code"] == 0: for stdin in stdins: result = epicbox.run('mono_run', 'mono main.exe', files=files, limits=run_limits, workdir=workdir, stdin=stdin) result['type'] = 'run' callback(result)
def execute_python(untrusted_code, input_values, limits): result = [] with epicbox.working_directory() as workdir: files = [{"name": "main.py", "content": untrusted_code}] for val in input_values: result.append( epicbox.run( "python", "python3 main.py", files=files, workdir=workdir, stdin=val, limits=limits, )) return result
def run_programs(): data = request.get_json() program = data['program'] language = data['language'] commands = {'rust': 'rustc main.rs && RUST_BACKTRACE=1 ./main'} files = [{'name': 'main.rs', 'content': program.encode('utf-8')}] limits = {'cputime': 5, 'memory': 64} response = epicbox.run(language, commands[language], files=files, limits=limits) response['stdout'] = response['stdout'].decode('utf-8') response['stderr'] = response['stderr'].decode('utf-8') return response
def docker_run(profile, command, files, limits): """Function used to run a specific command in a docker image. :param profile: the profile linked to the docker image :type profile: str :param command: the shell command executed in the docker :type command: str :param files: The files to copy in the sandbox :type files: list :param limits: limits to use that will rules the sandbox :type limits: dict :return: a dictionary with all the contents produce by the sandbox :rtype: dict """ run_result = epicbox.run(profile, command, files=files, limits=limits) # Decode the stdout and the stderr run_result["stdout"] = run_result["stdout"].decode("utf-8") run_result["stderr"] = run_result["stderr"].decode("utf-8") return run_result
def run(self, source_code: str, input_json: str) -> dict: ''' @return { 'exit_code': 0, 'stdout': b"{'test': 1}\n", 'stderr': b'', 'duration': 0.067299, 'timeout': False, 'oom_killed': False } ''' files = [{'name': 'main.py', 'content': source_code.encode('UTF-8')}] limits = {'cputime': self.CPU_TIME, 'memory': self.MEMORY} command = '''python3 main.py <<EOS %s EOS ''' % input_json result = epicbox.run('python', command, files=files, limits=limits) return result
def graph_from_external_file(keras_code, general_code): """Get a graph from an external file defining the neural network. Arguments: keras_code {String} -- the keras code defining the network general_code {String} -- the keras code used to load the network Returns: object -- the result of this translation, can be an error """ files = [{ 'name': 'model.py', 'content': keras_code }, { 'name': 'main.py', 'content': general_code }] limits = {'cputime': 100, 'memory': 2000} result = epicbox.run('python', 'python3 main.py', files=files, limits=limits) if b'Traceback' in result["stderr"]: raise Exception(result["stderr"].decode('utf-8')) model_json = json.loads(result["stdout"]) model_keras = keras.models.model_from_json(result["stdout"]) layers_extracted = model_json['config']['layers'] graph = Graph() graph.set_input_shape( layers_extracted[0]['config']['batch_input_shape'][1:]) previous_node = '' for index, json_layer in enumerate(layers_extracted): if len(layers_extracted) > len(model_keras.layers): index = index - 1 if index >= 0: previous_node = add_layer_type(json_layer, model_keras.layers[index], graph, previous_node) graph.resolve_input_names() return graph
#!/usr/bin/env python3 import epicbox epicbox.configure( profiles=[ epicbox.Profile('base', 'stepic/epicbox-base'), epicbox.Profile('python', 'stepic/epicbox-python', network_disabled=False), ], selinux_enforced=False, ) files = [{'name': 'main.py', 'content': b'print(42)\n'}] r = epicbox.run('python', 'python3 main.py', files=files, limits={'cputime': 1}) print(r)
def _compile_source(self): return epicbox.run(self.compile_profile, command=self.compile_command, limits=settings.EPICBOX_COMPILE_LIMITS, workdir=self.workdir)
def _run_dataset(self, dataset): return epicbox.run(self.profile, command=self.command, stdin=dataset, limits=self.limits, workdir=self.workdir)