def send(fpath, conn): """send the file at @fpath, return the speed in kb/s OFTPError may be raised""" def _write_msg(m): msg.write_msg(conn, m) def _read_msg(): return msg.read_msg(conn) def _check_msg(m): if m != _read_msg(): log.warning("message check error.") raise OFTPError try: time_start = datetime.datetime.now() sha_ctx = hashlib.sha1() fsize = os.path.getsize(fpath) with open(fpath, "rb") as fptr: _write_msg(msg.OFTP_BEGIN) _check_msg(msg.OFTP_BEGIN) _write_msg(_OFTP_VERSION) if conn.read_uint32() != _OFTP_VERSION: log.warning("version check error.") raise OFTPError conn.write_uint32(fsize) _check_msg(msg.OFTP_TRANS_BEGIN) s = 0 while s < fsize: psize = _PACKET_SIZE s += psize if s > fsize: psize -= s - fsize buf = fptr.read(psize) sha_ctx.update(buf) conn.write(buf) _write_msg(msg.OFTP_END) _check_msg(msg.OFTP_END) if conn.read(sha_ctx.digest_size) != sha_ctx.digest(): _write_msg(msg.OFTP_CHECK_FAIL) else: _write_msg(msg.OFTP_CHECK_OK) return fsize / 1024.0 / _td2seconds(datetime.datetime.now() - time_start) except EnvironmentError as e: log.error("error while sending file [errno {0}] [filename {1!r}]: {2}" . format(e.errno, e.filename, e.strerror)) conn.write_uint32(msg.OFTP_SYSTEM_ERROR) raise OFTPError except snc.Error: log.warning("failed to transfer file because of network error.") raise OFTPError
def _check_msg(m): while True: m1 = _read_msg() if m1 == msg.TELL_ONLINE: continue if m1 != m: log.warning( "message check error: expecting {0}, got {1}".format( m, m1)) raise Error return
def _clean(self): global _task_queue, _judge_id_set, _judge_id_set_lock if self._cur_task: _task_queue.put(self._cur_task) self._cur_task = None judge = self._judge if judge.id: with _judge_id_set_lock: _judge_id_set.remove(judge.id) if self._web_registered: try: web.remove_judge(judge) except web.Error: log.warning( "[judge {0!r}] failed to unregister on website".format( judge.id)) log.info("[judge {0!r}] disconnected".format(judge.id))
def run(self, var_dict, stdin=None, stdout=None, stderr=None): """run the limiter under variables defined in @var_dict Note: @var_dict may be changed execution result can be accessed via self.exe_status, self.exe_time (in microseconds), self.exe_mem (in kb) and self.exe_extra_info if @stdout and/or @stderr is SAVE_OUTPUT, stdout and/or stderr will be stored in self.stdout and self.stderr """ self.stdout = None self.stderr = None if self._type == _LIMITER_FILE: try: ftmp = tempfile.mkstemp() var_dict["FILENAME"] = ftmp[1] except Exception as e: log.error( "[limiter {0!r}] failed to create temporary file: {1}". format(self._name, e)) raise SysError("limiter communication error") else: var_dict["SOCKNAME"] = self._socket_name try: args = eval_arg_list(self._args, var_dict) except Exception as e: log.error( "[limiter {0!r}] failed to evaluate argument: {1}".format( self._name, e)) raise SysError("limiter configuration error") log.debug("executing command: {0!r}".format(args)) try: stdout_ = stdout if stdout_ is SAVE_OUTPUT: stdout_ = subprocess.PIPE stderr_ = stderr if stderr_ is SAVE_OUTPUT: stderr_ = subprocess.PIPE p = subprocess.Popen(args, stdin=stdin, stdout=stdout_, stderr=stderr_) except OSError as e: log.error("error while calling Popen [errno {0}] " "[filename {1!r}]: {2}".format(e.errno, e.filename, e.strerror)) raise SysError("failed to execute limiter") except Exception as e: log.error("error while calling Popen: {0}".format(e)) raise SysError("failed to execute limiter") if self._type == _LIMITER_SOCKET: try: s = self._socket s.settimeout(1) (conn, addr) = s.accept() s.settimeout(None) (self.exe_status, self.exe_time, self.exe_mem, info_len) = \ struct.unpack("IIII", conn.recv(16)) if info_len: self.exe_extra_info = conn.recv(info_len) else: self.exe_extra_info = '' except socket.timeout: log.error("[limiter {0!r}] socket timed out".format( self._name)) raise SysError("limiter socket error") except Exception as e: log.error( "[limiter {0!r}] failed to retrieve data through socket: {1}" .format(self._name, e)) raise SysError("limiter socket error") if stdout is SAVE_OUTPUT or stderr is SAVE_OUTPUT: (self.stdout, self.stderr) = p.communicate() else: p.wait() log.debug('the command above now finished') if self._type == _LIMITER_FILE: try: with open(ftmp[1], 'rb') as f: (self.exe_status, self.exe_time, self.exe_mem, info_len) = \ struct.unpack("IIII", f.read(16)) if info_len: self.exe_extra_info = f.read(info_len) else: self.exe_extra_info = '' os.close(ftmp[0]) os.remove(ftmp[1]) except Exception as e: log.error( "[limiter {0!r}] failed to retrieve data through file: {1}" .format(self._name, e)) raise SysError("limiter file error") if self._type == _LIMITER_SOCKET: try: conn.close() except Exception as e: log.warning("failed to close socket connection: {0}".format(e))
def __del_(self): if self._type == _LIMITER_SOCKET: try: self._socket.close() except Exception as e: log.warning("failed to close socket: {0}".format(e))
def _check_msg(m): if m != _read_msg(): log.warning("[judge {0!r} message check error".format( judge.id)) raise _internal_error
def _solve_task(self): judge = self._judge def _write_msg(m): msg.write_msg(self._snc, m) def _write_str(s): self._snc.write_str(s) def _write_uint32(v): self._snc.write_uint32(v) def _read_msg(): return msg.read_msg(self._snc) def _read_str(): return self._snc.read_str() def _read_uint32(): return self._snc.read_uint32() def _check_msg(m): if m != _read_msg(): log.warning("[judge {0!r} message check error".format( judge.id)) raise _internal_error def _stop_web_report(tell_online=True): th_report.stop() if tell_online: while th_report.is_alive(): th_report.join(msg.TELL_ONLINE_INTERVAL) _write_msg(msg.TELL_ONLINE) else: th_report.join() global _task_queue task = _task_queue.get(self._lang_id_set) if task is None: _write_msg(msg.TELL_ONLINE) time.sleep(msg.TELL_ONLINE_INTERVAL) return log.info("[judge {0!r}] received task #{1} for problem {2!r}".format( judge.id, task.id, task.prob)) self._cur_task = task th_report = _thread_web_communicate() th_report.start() if not os.path.isdir(task.prob): self._cur_task = None log.error("No data for problem {0!r}, task #{1} discarded".format( task.prob, task.id)) th_report.report(web.report_no_data, [task]) _stop_web_report() return th_report.report(web.report_sync_data, [task, judge]) _write_msg(msg.PREPARE_DATA) _write_str(task.prob) speed = sync_dir.send(task.prob, self._snc) if speed: log.info("[judge {0!r}] file transfer speed: {1!r} kb/s".format( judge.id, speed)) m = _read_msg() if m == msg.DATA_ERROR: self._cur_task = None reason = _read_str() log.error( "[judge {0!r}] [task #{1}] [prob: {2!r}] data error:\n{3}". format(judge.id, task.id, task.prob, reason)) th_report.report(web.report_error, [task, "data error"]) _stop_web_report() return elif m != msg.DATA_OK: log.warning("[judge {0!r}] message check error".format(judge.id)) th_report.report(web.report_error, [task, "message check error"]) _stop_web_report(False) raise _internal_error ncase = _read_uint32() _write_msg(msg.START_JUDGE) _write_str(task.lang) _write_str(task.src) _write_str(task.input) _write_str(task.output) while True: m = _read_msg() if m == msg.START_JUDGE_OK: break if m != msg.START_JUDGE_WAIT: log.warning("[judge {0!r}] message check error".format( judge.id)) th_report.report(web.report_error, [task, "message check error"]) _stop_web_report(False) raise _internal_error th_report.report(web.report_compiling, [task]) while True: m = _read_msg() if m == msg.TELL_ONLINE: continue if m == msg.COMPILE_SUCCEED: th_report.report(web.report_compile_success, [task, ncase]) break else: if m != msg.COMPILE_FAIL: th_report.report(web.report_error, [task, "message check error"]) log.warning("[judge {0!r}] message check error".format( judge.id)) _stop_web_report(False) raise _internal_error self._cur_task = None th_report.report(web.report_compile_failure, [task, _read_str()]) _stop_web_report() return prob_res = list() for i in range(ncase): th_report.lazy_report(web.report_judge_progress, [task, i]) while True: m = _read_msg() if m == msg.REPORT_CASE: break if m != msg.TELL_ONLINE: log.warning("[judge {0!r}] message check error".format( judge.id)) th_report.report(web.report_error, [task, "message check error"]) _stop_web_report(False) raise _internal_error result = structures.case_result() result.read(self._snc) prob_res.append(result) th_report.clean_lazy() th_report.report(web.report_prob_result, [task, prob_res]) _check_msg(msg.REPORT_JUDGE_FINISH) self._cur_task = None _stop_web_report() if th_report.check_error(): log.warning( "[judge {0!r}] error while reporting judge results for task #{1}" .format(judge.id, task.id)) else: log.info("[judge {0!r}] finished task #{1} normally".format( judge.id, task.id))
def run(self): judge = self._judge def _write_msg(m): msg.write_msg(self._snc, m) def _write_str(s): self._snc.write_str(s) def _read_msg(): return msg.read_msg(self._snc) def _read_str(): return self._snc.read_str() def _read_uint32(): return self._snc.read_uint32() def _check_msg(m): if m != _read_msg(): log.warning("[judge {0!r}] message check error".format( judge.id)) raise _internal_error global _id_max_len, _judge_id_set, _judge_id_set_lock try: self._snc = snc.snc(self._sock, True) _check_msg(msg.HELLO) judge_id = _read_str() if len(judge_id) > _id_max_len: _write_msg(msg.ID_TOO_LONG) raise _internal_error with _judge_id_set_lock: if judge_id in _judge_id_set: _write_msg(msg.DUPLICATED_ID) log.warning( "another judge declares duplicated id {0!r}".format( judge_id)) raise _internal_error _judge_id_set.add(judge_id) judge.id = judge_id del judge_id if _read_uint32() != msg.PROTOCOL_VERSION: log.warning("[judge {0!r}] version check error".format( judge.id)) _write_msg(msg.ERROR) raise _internal_error cnt = _read_uint32() while cnt: cnt -= 1 lang = _read_str() judge.lang_supported.add(lang) self._lang_id_set.add(_get_lang_id(lang)) _write_msg(msg.CONNECT_OK) query_ans = dict() for i in web.get_query_list(): _write_msg(msg.QUERY_INFO) _write_str(i) _check_msg(msg.ANS_QUERY) query_ans[i] = _read_str() web.register_new_judge(judge, query_ans) self._web_registered = True log.info("[judge {0!r}] successfully connected".format(judge.id)) while not control.test_termination_flag(): self._solve_task() self._snc.close() self._sock.close() except snc.Error: log.warning("[judge {0!r}] failed because of network error".format( judge.id)) self._clean() except _internal_error: self._clean() except web.Error: log.warning( "[judge {0!r}] failed because of error while communicating with website" .format(judge.id)) _write_msg(msg.ERROR) self._clean() except sync_dir.Error: log.warning( "[judge {0!r}] failed to synchronize data directory".format( judge.id)) self._clean() except Exception as e: log.warning("[judge {0!r}] error happens: {1}".format(judge.id, e)) log.debug(traceback.format_exc()) self._clean()
def recv(path, conn): """save the directory to @path via snc connection @conn, return the speed in kb/s, or None if no file transferred""" def _write_msg(m): msg.write_msg(conn, m) def _write_str(s): conn.write_str(s) def _write_uint32(v): conn.write_uint32(v) def _read_msg(): return msg.read_msg(conn) def _read_str(): return conn.read_str() def _read_uint32(): return conn.read_uint32() def _check_msg(m): while True: m1 = _read_msg() if m1 == msg.TELL_ONLINE: continue if m1 != m: log.warning( "message check error: expecting {0}, got {1}".format( m, m1)) raise Error return try: if os.path.isdir(path): th_hash = _thread_get_file_list(path, False) th_hash.start() while th_hash.is_alive(): th_hash.join(msg.TELL_ONLINE_INTERVAL) _write_msg(msg.TELL_ONLINE) flist_local = th_hash.result else: if os.path.exists(path): os.remove(path) os.mkdir(path) flist_local = dict() if flist_local is None: raise Error flist_needed = list() _check_msg(msg.SYNCDIR_BEGIN) for i in range(_read_uint32()): fname = _read_str() checksum = _read_str() try: if checksum != flist_local[fname]: os.remove(os.path.join(path, fname)) flist_needed.append(i) del flist_local[fname] except KeyError: flist_needed.append(i) for i in flist_local: os.remove(os.path.join(path, i)) _write_msg(msg.SYNCDIR_FILELIST) _write_uint32(len(flist_needed)) if len(flist_needed) == 0: _write_msg(msg.SYNCDIR_DONE) return None for i in flist_needed: _write_uint32(i) _check_msg(msg.SYNCDIR_FTRANS) try: ftar = tempfile.mkstemp('orzoj') speed = filetrans.recv(ftar[1], conn) with open(ftar[1], 'r') as f: th_extar = _thread_extract_tar(f, path) th_extar.start() while th_extar.is_alive(): th_extar.join(msg.TELL_ONLINE_INTERVAL) _write_msg(msg.TELL_ONLINE) if th_extar.error: raise Error _write_msg(msg.SYNCDIR_DONE) return speed finally: os.close(ftar[0]) os.remove(ftar[1]) except Error as e: raise e except snc.Error: log.warning("network error while synchronizing directory") raise Error except filetrans.OFTPError: log.warning("failed to transfer file while synchronizing directory") raise Error except Exception as e: log.error("failed to synchronize directory: {0}".format(e)) log.debug(traceback.format_exc()) raise Error
def send(path, conn): """send the directory at @path via snc connection @conn, return the speed in kb/s, or None if no file transferred""" def _write_msg(m): msg.write_msg(conn, m) def _write_str(s): conn.write_str(s) def _write_uint32(v): conn.write_uint32(v) def _read_msg(): return msg.read_msg(conn) def _read_str(): return conn.read_str() def _read_uint32(): return conn.read_uint32() def _check_msg(m): while True: m1 = _read_msg() if m1 == msg.TELL_ONLINE: continue if m1 != m: log.warning( "message check error: expecting {0}, got {1}".format( m, m1)) raise Error return flist = _thread_get_file_list(path) flist.start() while flist.is_alive(): flist.join(msg.TELL_ONLINE_INTERVAL) _write_msg(msg.TELL_ONLINE) flist = flist.result if flist is None: raise Error try: _write_msg(msg.SYNCDIR_BEGIN) _write_uint32(len(flist)) for i in flist: _write_str(i[0]) _write_str(i[1]) _check_msg(msg.SYNCDIR_FILELIST) nfile = _read_uint32() if nfile == 0: _check_msg(msg.SYNCDIR_DONE) return None flist_req = list() while nfile: nfile -= 1 flist_req.append(flist[_read_uint32()][0]) ftar = tempfile.mkstemp('orzoj') try: with open(ftar[1], 'wb') as f: th_mktar = _thread_make_tar(f, path, flist_req) th_mktar.start() while th_mktar.is_alive(): th_mktar.join(msg.TELL_ONLINE_INTERVAL) _write_msg(msg.TELL_ONLINE) if th_mktar.error: raise Error _write_msg(msg.SYNCDIR_FTRANS) speed = filetrans.send(ftar[1], conn) _check_msg(msg.SYNCDIR_DONE) return speed finally: os.close(ftar[0]) os.remove(ftar[1]) except Error as e: raise e except snc.Error: log.warning("network error while synchronizing directory") raise Error except filetrans.OFTPError: log.warning("failed to transfer file while synchronizing directory") raise Error except Exception as e: log.error("failed to synchronize directory: {0}".format(e)) log.debug(traceback.format_exc()) raise Error
def connect(sock): """connect to orzoj-server via socket @sock may raise Error""" def _write_msg(m): msg.write_msg(conn, m) def _write_str(s): conn.write_str(s) def _write_uint32(v): conn.write_uint32(v) def _read_msg(timeout = 0): return msg.read_msg(conn, timeout) def _read_str(): return conn.read_str() def _read_uint32(): return conn.read_uint32() def _check_msg(m): if m != _read_msg(): log.error("message check error.") raise Error try: conn = snc.snc(sock) _write_msg(msg.HELLO) global _judge_id _write_str(_judge_id) _write_uint32(msg.PROTOCOL_VERSION) _write_uint32(len(core.lang_dict)) for i in core.lang_dict: _write_str(i) m = _read_msg() if m == msg.ERROR: log.warning("failed to connect: orzoj-server says an error happens there`") raise Error if m == msg.DUPLICATED_ID: log.error("failed to connect: duplicated id: {0!r}" . format(_judge_id)) raise Error if m == msg.ID_TOO_LONG: log.error("failed to connect: id {0!r} is too long for the orzoj-server" . format(_judge_id)) raise Error if m != msg.CONNECT_OK: log.error("unexpected message from orzoj-server: {0}" . format(m)) raise Error log.info('connection established') while not control.test_termination_flag(): m = _read_msg() if m == msg.TELL_ONLINE: continue if m == msg.ERROR: log.warning("failed to work: orzoj-server says an error happens there") raise Error if m == msg.QUERY_INFO: global _info_dict q = _read_str() _write_msg(msg.ANS_QUERY) try: _write_str(_info_dict[q]) except KeyError: _write_str("unknown") continue if m != msg.PREPARE_DATA: log.error("unexpected message from orzoj-server: {0}" . format(m)) raise Error pcode = _read_str() log.info("received task for problem {0!r}" . format(pcode)) try: speed = sync_dir.recv(pcode, conn) if speed: log.info("file transfer speed: {0!r}" . format(speed)) except sync_dir.Error: log.error("failed to synchronize data for problem {0!r}" . format(pcode)) raise Error try: pconf = probconf.Prob_conf(pcode) except Exception as e: errmsg = "failed to parse problem configuration: {0}" . format(e) _write_msg(msg.DATA_ERROR) _write_str(errmsg) log.error(errmsg) log.debug(traceback.format_exc()) continue _write_msg(msg.DATA_OK) _write_uint32(len(pconf.case)) _check_msg(msg.START_JUDGE) lang = _read_str() src = _read_str() input = _read_str() output = _read_str() core.lang_dict[lang].judge(conn, pcode, pconf, src, input, output) except snc.Error as e: log.error("failed to communicate with orzoj-server because of network error") control.set_termination_flag() raise Error except core.Error: control.set_termination_flag() raise Error
def _read(data, maxlen=None): """if @maxlen is not None, data should be of dict type and is sent via GET method and without checksum and the data read is returned; otherwise @data will dumped by phpserialize and sent via POST method and the data read is returned Note: @maxlen is not None iff now trying to login """ global _retry_cnt, _web_addr, _thread_req_id, _lock_thread_req_id, _passwd def make_data(): """return a tuple (checksum_base, data_sent)""" thread_id = threading.current_thread().ident with _lock_thread_req_id: try: req_id = _thread_req_id[thread_id] except KeyError: req_id = 0 _thread_req_id[thread_id] = req_id + 1 checksum_base = str(thread_id) + '$' + str(req_id) + '$' + _passwd data_sent = urllib.urlencode({ "data": phpserialize.dumps({ "thread_id": thread_id, "data": data, "checksum": _sha1sum(checksum_base + data) }) }) return (checksum_base, data_sent) if maxlen: url = _web_addr + "?" + urllib.urlencode(data) else: data = phpserialize.dumps(data) (checksum_base, data_sent) = make_data() cnt = _retry_cnt while cnt: if control.test_termination_flag(): raise Error cnt -= 1 try: ret = None if maxlen: return urllib2.urlopen(url, None, _timeout).read(maxlen) ret = urllib2.urlopen(_web_addr, data_sent, _timeout).read() if ret == 'relogin': if _lock_relogin.acquire(False): log.warning("website requests relogin") _login() with _lock_thread_req_id: _thread_req_id.clear() _lock_relogin.release() else: _lock_relogin.acquire() # wait until relogin finishes _lock_relogin.release() (checksum_base, data_sent) = make_data() cnt = _retry_cnt continue ret = phpserialize.loads(ret) ret_status = ret["status"] ret_data = ret["data"] if ret["checksum"] != _sha1sum(checksum_base + str(ret_status) + ret_data): raise _internal_error("website checksum error") if int(ret_status): raise _internal_error( "website says an error happens there: {0}".format( ret_data)) return phpserialize.loads(ret_data) except Exception as e: log.error( "website communication error [left retries: {0}]: {1}".format( cnt, e)) sys.stderr.write( "orzoj-server: website communication error. See the log for details.\n" ) log.debug("raw data from server: {0!r}".format(ret)) time.sleep(_retry_wait) continue raise Error
def judge(self, conn, pcode, pconf, src, input, output): """@pcode: problem code @pconf: problem configuration (defined in probconf.py) may raise Error or snc.Error""" def _write_msg(m): msg.write_msg(conn, m) def _write_str(s): conn.write_str(s) def _write_uint32(v): conn.write_uint32(v) locked = False global _lock_file_fd if _lock_file_fd: while True: try: fcntl.flock(_lock_file_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError as e: if e.errno == errno.EACCES or e.errno == errno.EAGAIN: _write_msg(msg.START_JUDGE_WAIT) time.sleep(msg.TELL_ONLINE_INTERVAL) continue else: log.error("failed to lock file: {0}".format(e)) _write_msg(msg.ERROR) raise Error except Exception as e: log.error("failed to lock file: {0}".format(e)) _write_msg(msg.ERROR) raise Error locked = True break # successfully locked try: _write_msg(msg.START_JUDGE_OK) _clean_temp() global _prog_path_abs, _cmd_vars if self._compiler: with open(_prog_path_abs + self._src_ext, "w") as f: f.write(src) _cmd_vars["MEMORY"] = 0 _cmd_vars["DATADIR"] = os.path.abspath(pcode) th_tell_online = _thread_tell_online(conn) th_tell_online.start() if pconf.compiler and self._name in pconf.compiler: (ok, info) = self._compiler.run_as_compiler( _prog_path_abs, pconf.compiler[self._name]) else: (ok, info) = self._compiler.run_as_compiler(_prog_path_abs) th_tell_online.stop() th_tell_online.join() if not ok: _write_msg(msg.COMPILE_FAIL) _write_str(info) return _write_msg(msg.COMPILE_SUCCEED) global _dir_temp_abs os.chmod( _prog_path_abs + self._exe_ext, stat.S_IRUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) global _prog_path th_report_case = _thread_report_case_result(conn, len(pconf.case)) th_report_case.start() for case in pconf.case: try: if pconf.extra_input: for i in pconf.extra_input: shutil.copy(_join_path(pcode, i), _dir_temp_abs) stdin_path = _join_path(pcode, case.stdin) if not input: # use stdin prog_fin = open(stdin_path) else: tpath = _join_path(_dir_temp_abs, input) shutil.copy(stdin_path, tpath) os.chmod(tpath, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) prog_fin = limiter.get_null_dev(False) if not output: # use stdout prog_fout_path = _join_path( _dir_temp_abs, "output.{0}".format(time.time())) prog_fout = open(prog_fout_path, "w") else: prog_fout_path = _join_path(_dir_temp_abs, output) prog_fout = limiter.get_null_dev() except Exception as e: log.error("failed to open data file: {0}".format( stdin_path, e)) case_result = structures.case_result() case_result.exe_status = structures.EXESTS_SYSTEM_ERROR case_result.score = 0 case_result.full_score = 0 case_result.time = 0 case_result.memory = 0 case_result.extra_info = "failed to open data file" else: _cmd_vars["TIME"] = case.time _cmd_vars["MEMORY"] = case.mem umask_prev = os.umask(0) case_result = self._executor.run(_prog_path, stdin=prog_fin, stdout=prog_fout) case_result.full_score = case.score os.umask(umask_prev) if prog_fin: prog_fin.close() if prog_fout: prog_fout.close() if case_result.exe_status == structures.EXESTS_NORMAL: if (not os.path.isfile(prog_fout_path) ) or os.path.islink(prog_fout_path): (case_result.score, case_result.extra_info) = ( 0, "output file not found") else: (case_result.score, case_result.extra_info) = pconf.verify_func( case.score, stdin_path, _join_path(pcode, case.stdout), prog_fout_path) if case_result.score is None: case_result.score = 0 case_result.exe_status = structures.EXESTS_SYSTEM_ERROR if input: try: os.unlink(tpath) except Exception as e: log.warning( "failed to remove program input file: {0}". format(e)) try: os.unlink(prog_fout_path) except Exception as e: log.warning( "failed to remove program output file: {0}".format( e)) th_report_case.add(case_result) th_report_case.join() th_report_case.check_error() _write_msg(msg.REPORT_JUDGE_FINISH) if locked: fcntl.flock(_lock_file_fd, fcntl.LOCK_UN) except Error: if locked: fcntl.flock(_lock_file_fd, fcntl.LOCK_UN) raise except snc.Error: if locked: fcntl.flock(_lock_file_fd, fcntl.LOCK_UN) raise Error except Exception as e: if locked: fcntl.flock(_lock_file_fd, fcntl.LOCK_UN) log.error("[lang {0!r}] failed to judge: {1}".format( self._name, e)) log.debug(traceback.format_exc()) _write_msg(msg.ERROR) raise Error
def _parse_1d0(self, root): # for version 1.0 global _verifier_cache for section in root: try: if section.tag == "compiler": if self.compiler is None: self.compiler = dict() name = section.attrib["name"] opt = _parse_compiler_opt(section.attrib["opt"]) if name not in self.compiler: self.compiler[name] = list() self.compiler[name].extend(opt) continue if section.tag == "verifier": if self.verify_func: raise _Parse_error("duplicated tag: verifier"); if "standard" in section.attrib: self.verify_func = _std_verifier else: ok = False for i in section: if i.tag != "source": raise _Parse_error("unknown tag {0!r} in 'verifier'" . format(i.tag)); lang = i.attrib["lang"] try: lang = core.lang_dict[lang] except KeyError: log.warning("language {0!r} for verifier not supported" . format(lang)) continue time = 0 mem = 0 opt = None try: opt = _parse_compiler_opt(i.attrib["opt"]) time = int(i.attrib["time"]) mem = int(i.attrib["mem"]) except KeyError: pass vf_path = os.path.abspath(os.path.join(_verifier_cache, self._pcode)) if "file" in i.attrib: with open(os.path.join(self._pcode, i.attrib["file"]), "r") as f: src = f.read() else: src = i.text if not src: raise _Parse_error("no verifier source") try: ret = lang.verifier_compile(self._pcode, vf_path, src, opt) except core.Error: raise _Parse_error("failed to compile verifier") if not ret[0]: raise _Parse_error("failed to compile verifier: {0}" . format(ret[1])) self.verify_func = _build_verifier(self._pcode, lang, time, mem, vf_path) ok = True break if not ok: raise _Parse_error("no usable verifier") continue if section.tag == "extra": if self.extra_input is None: self.extra_input = list() self.extra_input.append(section.attrib["file"]) continue if section.tag == "case": case = Case_conf() case.stdin = section.attrib["input"] case.stdout = section.attrib["output"] case.time = int(section.attrib["time"]) case.mem = int(section.attrib["mem"]) case.score = int(section.attrib["score"]) self.case.append(case) continue except _Parse_error: raise except Exception as e: raise _Parse_error("error while parsing section {0!r}: {1}" . format(section.tag, e)) raise _Parse_error("unknown tag: {0!r}" . format(section.tag)) if self.verify_func is None: raise _Parse_error("no verifier specified")
def _check_msg(m): if m != _read_msg(): log.warning("message check error.") raise OFTPError