def check(process_output, judge_output, precision, **kwargs): process_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(process_output)))) judge_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(judge_output)))) if len(process_lines) != len(judge_lines): return False epsilon = 10**-int(precision) try: for process_line, judge_line in zip(process_lines, judge_lines): process_tokens = process_line.split() judge_tokens = judge_line.split() if len(process_tokens) != len(judge_tokens): return False for process_token, judge_token in zip(process_tokens, judge_tokens): try: judge_float = float(judge_token) except: if process_token != judge_token: return False else: process_float = float(process_token) # since process_float can be nan, this is NOT equivalent to (process_float - judge_float) > epsilon # the code below will always reject nan, even if judge_float is nan if not abs(process_float - judge_float) <= epsilon: return False except: return False return True
def check(process_output: bytes, judge_output: bytes, point_value: float = 1, point_distribution: List[int] = [1], filler_lines_required: bool = True, **kwargs) -> Union[CheckerResult, bool]: judge_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(judge_output)))) if len(judge_lines) != len(point_distribution): raise InternalError( 'point distribution length must equal to judge output length') if sum(point_distribution) == 0: raise InternalError('sum of point distribution must be positive') process_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(process_output)))) if filler_lines_required and len(process_lines) != len(judge_lines): return False points = 0 for process_line, judge_line, line_points in zip(process_lines, judge_lines, point_distribution): if process_line == judge_line: points += line_points return CheckerResult(points > 0, point_value * (points / sum(point_distribution)))
def check(process_output, judge_output, split_on='lines', **kwargs): split_pattern = { 'lines': b'[\r\n]', 'whitespace': b'[\s]', }.get(split_on) if not split_pattern: raise InternalError('invalid `split_on` mode') process_lines = list(filter(None, resplit(split_pattern, utf8bytes(process_output)))) judge_lines = list(filter(None, resplit(split_pattern, utf8bytes(judge_output)))) if len(process_lines) != len(judge_lines): return False if split_on == 'lines': process_lines = list(map(six.binary_type.split, process_lines)) judge_lines = list(map(six.binary_type.split, judge_lines)) process_lines.sort() judge_lines.sort() for process_line, judge_line in zip(process_lines, judge_lines): if process_line != judge_line: return False return True
def launch(self, *args, **kwargs): agent = self._file('setbufsize.so') shutil.copyfile(setbufsize_path, agent) env = { # Forward LD_LIBRARY_PATH for systems (e.g. Android Termux) that require # it to find shared libraries 'LD_LIBRARY_PATH': os.environ.get('LD_LIBRARY_PATH', ''), 'LD_PRELOAD': agent, 'CPTBOX_STDOUT_BUFFER_SIZE': kwargs.get('stdout_buffer_size'), 'CPTBOX_STDERR_BUFFER_SIZE': kwargs.get('stderr_buffer_size'), } env.update(self.get_env()) return SecurePopen( [utf8bytes(a) for a in self.get_cmdline() + list(args)], executable=utf8bytes(self.get_executable()), security=self.get_security(launch_kwargs=kwargs), address_grace=self.get_address_grace(), personality=self.personality, fds=kwargs.get('fds'), time=kwargs.get('time'), memory=kwargs.get('memory'), wall_time=kwargs.get('wall_time'), stderr=(PIPE if kwargs.get('pipe_stderr', False) else None), env=env, cwd=utf8bytes(self._dir), nproc=self.get_nproc())
def check(process_output: bytes, judge_output: bytes, point_value: float, feedback: bool = True, match: Callable[[bytes, bytes], bool] = lambda p, j: p.strip() == j.strip(), **kwargs) -> Union[CheckerResult, bool]: process_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(process_output)))) judge_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(judge_output)))) if len(process_lines) > len(judge_lines): return False if not judge_lines: return True if isinstance(match, str): match = eval(match) cases = [verdict[0]] * len(judge_lines) count = 0 for i, (process_line, judge_line) in enumerate(zip(process_lines, judge_lines)): if match(process_line, judge_line): cases[i] = verdict[1] count += 1 return CheckerResult(count == len(judge_lines), point_value * (1.0 * count / len(judge_lines)), ''.join(cases) if feedback else "")
def check(process_output: bytes, judge_output: bytes, point_value: float, feedback: bool = True, **kwargs) -> Union[CheckerResult, bool]: process_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(process_output)))) judge_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(judge_output)))) if len(process_lines) > len(judge_lines): return False if not judge_lines: return True cases = [verdict[0]] * len(judge_lines) count = 0 for i, (process_line, judge_line) in enumerate(zip(process_lines, judge_lines)): if process_line.strip() == judge_line.strip(): cases[i] = verdict[1] count += 1 return CheckerResult(count == len(judge_lines), point_value * count / len(judge_lines), extended_feedback='Case Feedback:\n' + ''.join(cases) if feedback else '')
def check(process_output, judge_output, split_on='lines', **kwargs): split_pattern = { 'lines': b'[\r\n]', 'whitespace': b'[\s]', }.get(split_on) if not split_pattern: raise InternalError('invalid `split_on` mode') process_lines = list( filter(None, resplit(split_pattern, utf8bytes(process_output)))) judge_lines = list( filter(None, resplit(split_pattern, utf8bytes(judge_output)))) if len(process_lines) != len(judge_lines): return False if split_on == 'lines': process_lines = list(map(six.binary_type.split, process_lines)) judge_lines = list(map(six.binary_type.split, judge_lines)) process_lines.sort() judge_lines.sort() for process_line, judge_line in zip(process_lines, judge_lines): if process_line != judge_line: return False return True
def launch(self, *args, **kwargs): agent = self._file('setbufsize.so') shutil.copyfile(setbufsize_path, agent) env = { 'LD_PRELOAD': agent, 'CPTBOX_STDOUT_BUFFER_SIZE': kwargs.get('stdout_buffer_size'), 'CPTBOX_STDERR_BUFFER_SIZE': kwargs.get('stderr_buffer_size'), } env.update(self.get_env()) return SecurePopen( [utf8bytes(a) for a in self.get_cmdline() + list(args)], executable=utf8bytes(self.get_executable()), security=self.get_security(launch_kwargs=kwargs), address_grace=self.get_address_grace(), personality=self.personality, time=kwargs.get('time'), memory=kwargs.get('memory'), wall_time=kwargs.get('wall_time'), stderr=(PIPE if kwargs.get('pipe_stderr', False) else None), env=env, cwd=utf8bytes(self._dir), nproc=self.get_nproc())
def check(process_output, judge_output, pe_allowed=True, **kwargs): if judge_output == process_output: return True feedback = None if pe_allowed and standard(utf8bytes(judge_output), utf8bytes(process_output)): # in the event the standard checker would have passed the problem, raise a presentation error feedback = "Presentation Error, check your whitespace" return CheckerResult(False, 0, feedback=feedback)
def check(process_output, judge_output, Counter=Counter, regex=re.compile(br'\s+'), **kwargs): process_all = regex.sub(b'', utf8bytes(process_output)) judge_all = regex.sub(b'', utf8bytes(judge_output)) return Counter(process_all.lower()) == Counter(judge_all.lower())
def check(process_output: bytes, judge_output: bytes, pe_allowed: bool = True, **kwargs) -> Union[CheckerResult, bool]: if judge_output == process_output: return True feedback = None if pe_allowed and standard(utf8bytes(judge_output), utf8bytes(process_output)): # in the event the standard checker would have passed the problem, raise a presentation error feedback = "Presentation Error, check your whitespace" return CheckerResult(False, 0, feedback=feedback)
def handle_compile_error(self, output): if b'symbol: class Scanner' in utf8bytes(output): raise CompileError( 'Te has olvidado de importar el Scanner. Has de copiar también todos los imports \n' ) if b'is public, should be declared in a file named' in utf8bytes( output): raise CompileError('Solo debe haber UNA clase pública. \n') raise CompileError(output)
def check(process_output, judge_output, pe_allowed=True, **kwargs): if judge_output == process_output: return True feedback = None if pe_allowed and standard(utf8bytes(judge_output), utf8bytes(process_output)): # in the event the standard checker would have passed the problem, raise a presentation error feedback = "Presentation Error" return CheckerResult(False, 0, feedback=feedback)
def create_compile_process(self, args: List[str]) -> TracedPopen: # Some languages may insist on providing certain functionality (e.g. colored highlighting of errors) if they # feel they are connected to a terminal. Some are more persistent than others in enforcing this, so this hack # aims to provide a convincing-enough lie to the runtime so that it starts singing in color. # # Emulate the streams of a process connected to a terminal: stdin, stdout, and stderr are all ptys. _master, _slave = pty.openpty() # Some runtimes *cough cough* Swift *cough cough* actually check the environment variables too. env = self.get_compile_env() or os.environ.copy() env['TERM'] = 'xterm' # Instruct compilers to put their temporary files into the submission directory, # so that we can allow it as writeable, rather than of all of /tmp. assert self._dir is not None env['TMPDIR'] = self._dir proc = TracedPopen( [utf8bytes(a) for a in args], **{ 'executable': utf8bytes(args[0]), 'security': self.get_compiler_security(), 'stderr': _slave, 'stdout': _slave, 'stdin': _slave, 'cwd': utf8bytes(self._dir), 'env': env, 'nproc': -1, 'fsize': self.executable_size, 'time': self.compiler_time_limit or 0, 'memory': 0, **self.get_compile_popen_kwargs(), }, ) class io_error_wrapper: """ Wrap pty-related IO errors so that we don't crash Popen.communicate() """ def __init__(self, io: IO) -> None: self.io = io def read(self, *args, **kwargs): try: return self.io.read(*args, **kwargs) except (IOError, OSError): return b'' def __getattr__(self, attr): return getattr(self.io, attr) # Since stderr and stdout are connected to the same slave pty, proc.stderr will contain the merged stdout # of the process as well. proc.stderr = io_error_wrapper(os.fdopen(_master, 'rb')) # type: ignore os.close(_slave) return proc
def launch(self, *args, **kwargs) -> TracedPopen: assert self._dir is not None for src, dst in kwargs.get('symlinks', {}).items(): src = os.path.abspath(os.path.join(self._dir, src)) # Disallow the creation of symlinks outside the submission directory. if os.path.commonprefix([src, self._dir]) == self._dir: # If a link already exists under this name, it's probably from a # previous case, but might point to something different. if os.path.islink(src): os.unlink(src) os.symlink(dst, src) else: raise InternalError( 'cannot symlink outside of submission directory') agent = self._file('setbufsize.so') # Hardcode the ABIs for different executors for now if self.name == 'CPP17X': shutil.copyfile(setbufsize32_path, agent) elif self.name == 'TUR': shutil.copyfile(setbufsize86_path, agent) else: shutil.copyfile(setbufsize_path, agent) env = { # Forward LD_LIBRARY_PATH for systems (e.g. Android Termux) that require # it to find shared libraries 'LD_LIBRARY_PATH': os.environ.get('LD_LIBRARY_PATH', ''), 'LD_PRELOAD': agent, 'CPTBOX_STDOUT_BUFFER_SIZE': kwargs.get('stdout_buffer_size'), 'CPTBOX_STDERR_BUFFER_SIZE': kwargs.get('stderr_buffer_size'), } env.update(self.get_env()) executable = self.get_executable() assert executable is not None return TracedPopen( [utf8bytes(a) for a in self.get_cmdline(**kwargs) + list(args)], executable=utf8bytes(executable), security=self.get_security(launch_kwargs=kwargs), address_grace=self.get_address_grace(), data_grace=self.data_grace, personality=self.personality, time=kwargs.get('time', 0), memory=kwargs.get('memory', 0), wall_time=kwargs.get('wall_time'), stdin=kwargs.get('stdin'), stdout=kwargs.get('stdout'), stderr=kwargs.get('stderr'), env=env, cwd=utf8bytes(self._dir), nproc=self.get_nproc(), fsize=self.fsize, )
def check(process_output, judge_output, precision=6, error_mode='default', **kwargs): # Discount empty lines process_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(process_output)))) judge_lines = list( filter(None, resplit(b'[\r\n]', utf8bytes(judge_output)))) if len(process_lines) != len(judge_lines): return False verify_float = { 'absolute': verify_absolute, 'relative': verify_relative, 'default': verify_default, }.get(error_mode) if not verify_float: raise InternalError('invalid `error_mode` value') epsilon = 10**-int(precision) try: for process_line, judge_line in zip(process_lines, judge_lines): process_tokens = process_line.split() judge_tokens = judge_line.split() if len(process_tokens) != len(judge_tokens): return False for process_token, judge_token in zip(process_tokens, judge_tokens): # Allow mixed tokens, for lines like "abc 0.68 def 0.70" try: judge_float = float(judge_token) except: # If it's not a float the token must match exactly if process_token != judge_token: return False else: process_float = float(process_token) if not verify_float(process_float, judge_float, epsilon): return False except: return False return True
def check(process_output, judge_output, **kwargs): process_lines = resplit(b'[\r\n]', utf8bytes(process_output)) judge_lines = resplit(b'[\r\n]', utf8bytes(judge_output)) if kwargs.get('filter_new_line'): process_lines = list(filter(None, process_lines)) judge_lines = list(filter(None, judge_lines)) if len(process_lines) != len(judge_lines): return False for process_line, judge_line in zip(process_lines, judge_lines): if process_line.rstrip() != judge_line.rstrip(): return False return True
def handle_compile_error(self, output): if b'is public, should be declared in a file named' in utf8bytes( output): raise CompileError( 'Public Class Error - Please use exactly one public class in your submission.\n' ) raise CompileError(output)
def create_files(self, problem_id: str, source_code: bytes, *args, **kwargs) -> None: self._code = self._file( self.source_filename_format.format(problem_id=problem_id, ext=self.ext)) with open(self._code, 'wb') as fo: fo.write(utf8bytes(source_code))
def __call__(self, *args, **kwargs): is_cached = kwargs.get('cached') if is_cached: kwargs['dest_dir'] = env.compiled_binary_cache_dir # Finish running all constructors before compiling. obj = super(CompiledExecutorMeta, self).__call__(*args, **kwargs) obj.is_cached = is_cached # Before writing sources to disk, check if we have this executor in our cache. if is_cached: cache_key = obj.__class__.__name__ + obj.__module__ + obj.get_binary_cache_key() cache_key = hashlib.sha384(utf8bytes(cache_key)).hexdigest() if cache_key in self.compiled_binary_cache: executor = self.compiled_binary_cache[cache_key] # Minimal sanity checking: is the file still there? If not, we'll just recompile. if os.path.isfile(executor._executable): obj._executable = executor._executable obj._dir = executor._dir return obj obj.create_files(*args, **kwargs) obj.compile() if is_cached: self.compiled_binary_cache[cache_key] = obj return obj
def oom_score_adj(score: int, to: Optional[int] = None) -> None: if not (OOM_SCORE_ADJ_MIN <= score <= OOM_SCORE_ADJ_MAX): raise OSError() with open(f'/proc/{"self" if to is None else to}/oom_score_adj', 'wb') as f: f.write(utf8bytes(str(score)))
def _grade_cases(self) -> Generator[Tuple[IPC, tuple], None, None]: problem = Problem(self.submission.problem_id, self.submission.time_limit, self.submission.memory_limit, self.submission.meta) try: self.grader = problem.grader_class( self, problem, self.submission.language, utf8bytes(self.submission.source)) except CompileError as compilation_error: error = compilation_error.args[0] or b'compiler exited abnormally' yield IPC.COMPILE_ERROR, (error, ) return else: binary = self.grader.binary if hasattr(binary, 'warning') and binary.warning is not None: yield IPC.COMPILE_MESSAGE, (binary.warning, ) yield IPC.GRADING_BEGIN, (self.grader.is_pretested, ) flattened_cases: List[Tuple[Optional[int], Union[TestCase, BatchedTestCase]]] = [] batch_number = 0 for case in self.grader.cases(): if isinstance(case, BatchedTestCase): batch_number += 1 for batched_case in case.batched_cases: flattened_cases.append((batch_number, batched_case)) else: flattened_cases.append((None, case))
def __call__(self, *args, **kwargs) -> 'CompiledExecutor': is_cached: bool = kwargs.pop('cached', False) if is_cached: kwargs['dest_dir'] = env.compiled_binary_cache_dir # Finish running all constructors before compiling. obj: 'CompiledExecutor' = super().__call__(*args, **kwargs) obj.is_cached = is_cached # Before writing sources to disk, check if we have this executor in our cache. if is_cached: cache_key_material = utf8bytes(obj.__class__.__name__ + obj.__module__) + obj.get_binary_cache_key() cache_key = hashlib.sha384(cache_key_material).hexdigest() if cache_key in self.compiled_binary_cache: executor = self.compiled_binary_cache[cache_key] assert executor._executable is not None # Minimal sanity checking: is the file still there? If not, we'll just recompile. if os.path.isfile(executor._executable): obj._executable = executor._executable obj._dir = executor._dir return obj obj.create_files(*args, **kwargs) obj.compile() if is_cached: self.compiled_binary_cache[cache_key] = obj return obj
def run_self_test(cls, sandbox=True, output=True, error_callback=None): if not cls.test_program: return True if output: print(ansi_style("%-39s%s" % ('Self-testing #ansi[%s](|underline):' % cls.get_executor_name(), '')), end=' ') try: executor = cls(cls.test_name, utf8bytes(cls.test_program)) proc = executor.launch(time=cls.test_time, memory=cls.test_memory) if sandbox else executor.launch_unsafe() test_message = b'echo: Hello, World!' stdout, stderr = proc.communicate(test_message + b'\n') res = stdout.strip() == test_message and not stderr if output: # Cache the versions now, so that the handshake packet doesn't take ages to generate cls.get_runtime_versions() print(ansi_style(['#ansi[Failed](red|bold)', '#ansi[Success](green|bold)'][res])) if stdout.strip() != test_message and error_callback: error_callback('Got unexpected stdout output:\n' + utf8text(stdout)) if stderr: if error_callback: error_callback('Got unexpected stderr output:\n' + utf8text(stderr)) else: print(stderr, file=sys.stderr) if hasattr(proc, 'protection_fault') and proc.protection_fault: print_protection_fault(proc.protection_fault) return res except Exception: if output: print(ansi_style('#ansi[Failed](red|bold)')) traceback.print_exc() if error_callback: error_callback(traceback.format_exc()) return False
def check(process_output, judge_output, **kwargs): process_lines = resplit(b'[\r\n]', utf8bytes(process_output)) judge_lines = resplit(b'[\r\n]', utf8bytes(judge_output)) if 'filter_new_line' in kwargs: process_lines = list(filter(None, process_lines)) judge_lines = list(filter(None, judge_lines)) if len(process_lines) != len(judge_lines): return False for process_line, judge_line in zip(process_lines, judge_lines): if process_line.rstrip() != judge_line.rstrip(): return False return True
def get_binary_cache_key(self) -> bytes: command = self.get_command() assert command is not None key_components = ( [self.problem, command, self.get_march_flag()] + self.get_defines() + self.get_flags() + self.get_ldflags() ) return utf8bytes(''.join(key_components)) + b''.join(self.source_dict.values())
def run_self_test( cls, output: bool = True, error_callback: Optional[Callable[[Any], Any]] = None) -> bool: if not cls.test_program: return True if output: print_ansi( f'Self-testing #ansi[{cls.get_executor_name()}](|underline):'. ljust(39), end=' ') try: executor = cls(cls.test_name, utf8bytes(cls.test_program)) proc = executor.launch(time=cls.test_time, memory=cls.test_memory, stdin=subprocess.PIPE, stdout=subprocess.PIPE) test_message = b'echo: Hello, World!' stdout, stderr = proc.communicate(test_message + b'\n') if proc.is_tle: print_ansi('#ansi[Time Limit Exceeded](red|bold)') return False if proc.is_mle: print_ansi('#ansi[Memory Limit Exceeded](red|bold)') return False res = stdout.strip() == test_message and not stderr if output: # Cache the versions now, so that the handshake packet doesn't take ages to generate cls.get_runtime_versions() usage = f'[{proc.execution_time:.3f}s, {proc.max_memory} KB]' print_ansi( f'{["#ansi[Failed](red|bold) ", "#ansi[Success](green|bold)"][res]} {usage:<19}', end=' ') print_ansi(', '.join([ f'#ansi[{runtime}](cyan|bold) {".".join(map(str, version))}' for runtime, version in cls.get_runtime_versions() ])) if stdout.strip() != test_message and error_callback: error_callback('Got unexpected stdout output:\n' + utf8text(stdout)) if stderr: if error_callback: error_callback('Got unexpected stderr output:\n' + utf8text(stderr)) else: print(stderr, file=sys.stderr) if proc.protection_fault: print_protection_fault(proc.protection_fault) return res except Exception: if output: print_ansi('#ansi[Failed](red|bold)') traceback.print_exc() if error_callback: error_callback(traceback.format_exc()) return False
def _generate_binary(self): siggraders = ('C', 'C11', 'CPP03', 'CPP11', 'CPP14', 'CPP17', 'CLANG', 'CLANGX') if self.language in siggraders: aux_sources = {} handler_data = self.problem.config['signature_grader'] entry_point = self.problem.problem_data[handler_data['entry']] header = self.problem.problem_data[handler_data['header']] submission_prefix = '#include "%s"\n' % handler_data['header'] if not handler_data.get('allow_main', False): submission_prefix += '#define main main_%s\n' % uuid.uuid4( ).hex aux_sources[ self.problem.id + '_submission'] = utf8bytes(submission_prefix) + self.source aux_sources[handler_data['header']] = header entry = entry_point return executors[self.language].Executor( self.problem.id, entry, aux_sources=aux_sources, defines=['-DSIGNATURE_GRADER'], ) else: raise InternalError( 'no valid runtime for signature grading %s found' % self.language)
def handle_compile_error(self, output: bytes): if b'is public, should be declared in a file named' in utf8bytes( output): raise CompileError( 'You are a troll. Trolls are not welcome. As a judge, I sentence your code to death.\n' ) raise CompileError(output)
def check(process_output, judge_output, **kwargs): process_lines = list(filter(None, utf8bytes(process_output).split(b'\n'))) judge_lines = list(filter(None, utf8bytes(judge_output).split(b'\n'))) if len(process_lines) != len(judge_lines): return False process_lines = list(map(six.binary_type.split, process_lines)) judge_lines = list(map(six.binary_type.split, judge_lines)) process_lines.sort() judge_lines.sort() for process_line, judge_line in zip(process_lines, judge_lines): if process_line != judge_line: return False return True
def create_files(self, problem_id, main_source, **kwargs): self.source_paths = [] for name, source in self.source_dict.items(): if '.' not in name: name += self.ext with open(self._file(name), 'wb') as fo: fo.write(utf8bytes(source)) self.source_paths.append(name)
def __init__(self, judge, problem, language, source): self.source = utf8bytes(source) self.language = language self.problem = problem self.judge = judge self.binary = self._generate_binary() self._terminate_grading = False self._current_proc = None
def create_files(self, problem_id: str, source_code: bytes, *args, **kwargs) -> None: self.source_paths = [] for name, source in self.source_dict.items(): if '.' not in name: name += '.' + self.ext with open(self._file(name), 'wb') as fo: fo.write(utf8bytes(source)) self.source_paths.append(name)
def create_files(self, problem_id, source_code): self._loader = self._file('-loader.py') with open(self._code, 'wb') as fo, open(self._loader, 'w') as loader: # We want source code to be UTF-8, but the normal (Python 2) way of having # "# -*- coding: utf-8 -*-" in header changes line numbers, so we write # UTF-8 BOM instead. fo.write(b'\xef\xbb\xbf') fo.write(utf8bytes(source_code)) loader.write(self.unbuffered_loader_script if self.unbuffered else self.loader_script)
def judge_proc(need_monitor): from dmoj import judgeenv logfile = judgeenv.log_file try: logfile = logfile % env['id'] except TypeError: pass logging.basicConfig(filename=logfile, level=logging.INFO, format='%(levelname)s %(asctime)s %(process)d %(module)s %(message)s') proctitle = 'DMOJ Judge: %s on %s' % (env['id'], make_host_port(judgeenv)) if six.PY2: setproctitle(utf8bytes(proctitle)) else: setproctitle(proctitle) judge = ClassicJudge(judgeenv.server_host, judgeenv.server_port, secure=judgeenv.secure, no_cert_check=judgeenv.no_cert_check, cert_store=judgeenv.cert_store) if need_monitor: monitor = Monitor() monitor.callback = judge.update_problems else: monitor = DummyMonitor() if hasattr(signal, 'SIGUSR2'): def update_problem_signal(signum, frame): judge.update_problems() signal.signal(signal.SIGUSR2, update_problem_signal) if need_monitor and judgeenv.api_listen: judge_instance = judge class Handler(JudgeControlRequestHandler): judge = judge_instance api_server = HTTPServer(judgeenv.api_listen, Handler) thread = threading.Thread(target=api_server.serve_forever) thread.daemon = True thread.start() else: api_server = None print() with monitor, judge: try: judge.listen() except Exception: traceback.print_exc() finally: judge.murder() if api_server: api_server.shutdown()
def check(process_output, judge_output, precision=6, error_mode='default', **kwargs): # Discount empty lines process_lines = list(filter(None, resplit(b'[\r\n]', utf8bytes(process_output)))) judge_lines = list(filter(None, resplit(b'[\r\n]', utf8bytes(judge_output)))) if len(process_lines) != len(judge_lines): return False verify_float = { 'absolute': verify_absolute, 'relative': verify_relative, 'default': verify_default, }.get(error_mode) if not verify_float: raise InternalError('invalid `error_mode` value') epsilon = 10 ** -int(precision) try: for process_line, judge_line in zip(process_lines, judge_lines): process_tokens = process_line.split() judge_tokens = judge_line.split() if len(process_tokens) != len(judge_tokens): return False for process_token, judge_token in zip(process_tokens, judge_tokens): # Allow mixed tokens, for lines like "abc 0.68 def 0.70" try: judge_float = float(judge_token) except: # If it's not a float the token must match exactly if process_token != judge_token: return False else: process_float = float(process_token) if not verify_float(process_float, judge_float, epsilon): return False except: return False return True
def _find_exe(path): if os.path.isabs(path): return path if os.sep in path: return os.path.abspath(path) for dir in os.environ.get('PATH', os.defpath).split(os.pathsep): p = os.path.join(dir, path) if os.access(p, os.X_OK): return utf8bytes(p) raise OSError()
def __init__(self, debugger, _, args, executable=None, security=None, time=0, memory=0, stdin=PIPE, stdout=PIPE, stderr=None, env=None, nproc=0, address_grace=4096, data_grace=0, personality=0, cwd='', fds=None, wall_time=None): self._debugger_type = debugger self._syscall_index = index = _SYSCALL_INDICIES[debugger] self._executable = executable or _find_exe(args[0]) self._args = args self._chdir = cwd self._env = [utf8bytes('%s=%s' % (arg, val)) for arg, val in six.iteritems(env if env is not None else os.environ) if val is not None] self._time = time self._wall_time = time * 3 if wall_time is None else wall_time self._cpu_time = time + 5 if time else 0 self._memory = memory self._child_personality = personality self._child_memory = memory * 1024 + data_grace * 1024 self._child_address = memory * 1024 + address_grace * 1024 if memory else 0 self._nproc = nproc self._tle = False self._fds = fds self.__init_streams(stdin, stdout, stderr) self.protection_fault = None self.debugger._syscall_index = index self.debugger.address_bits = 64 if debugger in (DEBUGGER_X64, DEBUGGER_ARM64) else 32 self._security = security self._callbacks = [None] * MAX_SYSCALL_NUMBER self._syscall_whitelist = [False] * MAX_SYSCALL_NUMBER if security is None: self._trace_syscalls = False else: for i in range(SYSCALL_COUNT): handler = security.get(i, DISALLOW) for call in translator[i][index]: if call is None: continue if isinstance(handler, int): self._syscall_whitelist[call] = handler == ALLOW else: if not callable(handler): raise ValueError('Handler not callable: ' + handler) self._callbacks[call] = handler handler = _CALLBACK self._handler(call, handler) self._started = threading.Event() self._died = threading.Event() if time: # Spawn thread to kill process after it times out self._shocker = threading.Thread(target=self._shocker_thread) self._shocker.start() self._worker = threading.Thread(target=self._run_process) self._worker.start()
def get_grader_from_source(self, grader_class, problem, language, source, report=print): try: grader = grader_class(self, problem, language, utf8bytes(source)) except CompileError as ce: report(ansi_style('#ansi[Failed compiling submission!](red|bold)')) report(ce.args[0].rstrip()) # don't print extra newline grader = None except: # if custom grader failed to initialize, report it to the site return self.internal_error() return grader
def check(process_output, judge_output, point_value, feedback=True, match=lambda p, j: p.strip() == j.strip(), **kwargs): process_lines = list(filter(None, resplit(b'[\r\n]', utf8bytes(process_output)))) judge_lines = list(filter(None, resplit(b'[\r\n]', utf8bytes(judge_output)))) if len(process_lines) > len(judge_lines): return False if not judge_lines: return True if isinstance(match, six.string_types): match = eval(match) cases = [verdict[0]] * len(judge_lines) count = 0 for i, (process_line, judge_line) in enumerate(zip(process_lines, judge_lines)): if match(process_line, judge_line): cases[i] = verdict[1] count += 1 return CheckerResult(count == len(judge_lines), point_value * (1.0 * count / len(judge_lines)), ''.join(cases) if feedback else "")
def _send_packet(self, packet, rewrite=True): if rewrite and 'submission-id' in packet and self.judge.get_process_type() != 'submission': packet['%s-id' % self.judge.get_process_type()] = packet['submission-id'] del packet['submission-id'] for k, v in packet.items(): if isinstance(v, six.binary_type): # Make sure we don't have any garbage utf-8 from e.g. weird compilers # *cough* fpc *cough* that could cause this routine to crash # We cannot use utf8text because it may not be text. packet[k] = v.decode('utf-8', 'replace') raw = zlib.compress(utf8bytes(json.dumps(packet))) with self._lock: self.output.writelines((PacketManager.SIZE_PACK.pack(len(raw)), raw))
def create_files(self, problem_id, source_code, *args, **kwargs): super(JavacExecutor, self).create_files(problem_id, source_code, *args, **kwargs) # This step is necessary because of Unicode classnames try: source_code = utf8text(source_code) except UnicodeDecodeError: raise CompileError('Your UTF-8 is bad, and you should feel bad') class_name = find_class(source_code) self._code = self._file('%s.java' % class_name.group(1)) try: with open(self._code, 'wb') as fo: fo.write(utf8bytes(source_code)) except IOError as e: if e.errno in (errno.ENAMETOOLONG, errno.ENOENT, errno.EINVAL): raise CompileError('Why do you need a class name so long? ' 'As a judge, I sentence your code to death.\n') raise self._class_name = class_name.group(1)
def run_self_test(cls, sandbox=True, output=True, error_callback=None): if not cls.test_program: return True if output: print(ansi_style("%-39s%s" % ('Self-testing #ansi[%s](|underline):' % cls.get_executor_name(), '')), end=' ') try: executor = cls(cls.test_name, utf8bytes(cls.test_program)) proc = executor.launch(time=cls.test_time, memory=cls.test_memory) if sandbox else executor.launch_unsafe() test_message = b'echo: Hello, World!' stdout, stderr = proc.communicate(test_message + b'\n') if proc.tle: print(ansi_style('#ansi[Time Limit Exceeded](red|bold)')) return False if proc.mle: print(ansi_style('#ansi[Memory Limit Exceeded](red|bold)')) return False res = stdout.strip() == test_message and not stderr if output: # Cache the versions now, so that the handshake packet doesn't take ages to generate cls.get_runtime_versions() usage = '[%.3fs, %d KB]' % (proc.execution_time, proc.max_memory) print(ansi_style(['#ansi[Failed](red|bold)', '#ansi[Success](green|bold)'][res]), usage) if stdout.strip() != test_message and error_callback: error_callback('Got unexpected stdout output:\n' + utf8text(stdout)) if stderr: if error_callback: error_callback('Got unexpected stderr output:\n' + utf8text(stderr)) else: print(stderr, file=sys.stderr) if hasattr(proc, 'protection_fault') and proc.protection_fault: print_protection_fault(proc.protection_fault) return res except Exception: if output: print(ansi_style('#ansi[Failed](red|bold)')) traceback.print_exc() if error_callback: error_callback(traceback.format_exc()) return False
def _generate_binary(self): siggraders = ('C', 'CPP03', 'CPP0X', 'CPP11', 'CPP14', 'CPP17') for i in reversed(siggraders): if i in executors: siggrader = i break else: raise CompileError(b"can't signature grade, why did I get this submission?") if self.language in siggraders: aux_sources = {} handler_data = self.problem.config['signature_grader'] entry_point = self.problem.problem_data[handler_data['entry']] header = self.problem.problem_data[handler_data['header']] submission_prefix = ( '#include "%s"\n' '#define main main_%s\n' ) % (handler_data['header'], str(uuid.uuid4()).replace('-', '')) aux_sources[self.problem.id + '_submission'] = utf8bytes(submission_prefix) + self.source aux_sources[handler_data['header']] = header entry = entry_point # Compile as CPP regardless of what the submission language is try: return executors[siggrader].Executor(self.problem.id, entry, aux_sources=aux_sources, writable=handler_data['writable'] or (1, 2), fds=handler_data['fds'], defines=['-DSIGNATURE_GRADER']) except CompileError as compilation_error: self.judge.packet_manager.compile_error_packet(ansi.format_ansi( compilation_error.args[0] or 'compiler exited abnormally' )) # Compile error is fatal raise self.judge.packet_manager.compile_error_packet('no valid handler compiler exists')
def handle_compile_error(self, output): if b'is public, should be declared in a file named' in utf8bytes(output): raise CompileError('You are a troll. Trolls are not welcome. ' 'As a judge, I sentence your code to death.\n') raise CompileError(output)
def create_files(self, problem_id, source_code, *args, **kwargs): self._code = self._file(problem_id + self.ext) with open(self._code, 'wb') as fo: fo.write(utf8bytes(source_code))
def create_files(self, problem_id, source_code): with open(self._code, 'wb') as fo: fo.write(utf8bytes(source_code))