def compile(self, sandbox): logger().info('Compiling task {id}'.format(id=self.task.task_id)) sandbox.write_compile_config() self.report_status(Status.COMPILING) compiler = CompilerMaster() return compiler.compile(self.task)
def clean(self): if self.path and not is_debug(): logger().info("Clean working dir {path}".format(path=self.path)) if os.path.exists(self.path): shutil.rmtree(self.path) else: logger().warning("path is not exist!") self.restore()
def get_error(self): try: return open("user.err").read(MAX_USER_OUT) except FileNotFoundError as e: logger().error('user.err not found') self.result = Status.RUNTIME_ERROR self.time_cost = 0 self.memory_cost = 0
def __init__(self, task): self.task = task self.backup() self.path = self.prepare_working_dir() logger().info('Task {sid} dir is {path}'.format(sid=task.task_id, path=self.path)) self.write_code(task.language_type.source_name, task.code) # write config CaseConfig(task).write_to_file()
def get_data(self, pid): if self.is_cached(pid): data = self.read_data(pid) return json.loads(data) logger().info( 'Data of {pid} is not cached, will fetch from remote'.format( pid=pid)) response = self._remote.get_data(pid) self.write_data(pid, response) return json.loads(response.to_data())
def process(self, task: Task, data: TaskData): self.task = task self.cases = data self.result = CaseResult.make(Status.ACCEPTED, task.task_id) try: sandbox = self.prepare() self.task.working_dir = sandbox.path self.compile(sandbox) self.running(sandbox) sandbox.clean() except CompileException as e: logger().warning('Task {id} Compile failed: {e}'.format( id=self.task.task_id, e=e)) self.report_status(Status.COMPILE_ERROR) except TimeLimitException: logger().error('Task {id} time limit in runner!!'.format( id=self.task.task_id)) self.report_status(Status.TIME_LIMIT) except ExecuteException as e: logger().error('Task {id} Execute failed: {e}'.format( id=self.task.task_id, e=e)) self.report_status(Status.RUNTIME_ERROR) except RuntimeError as e: logger().error('Catch Runtime Error: %s', e)
def running(self, sandbox: Environment): logger().info('Executing {id} @ {path}'.format(id=self.task.task_id, path=sandbox.path)) total = self.cases.count for index in range(0, total): logger().info('Task %d, case %d, total %d', self.task.task_id, index, total) case_result = self.execute(sandbox, index) self.result.update_by_case(case_result) if not self.result.is_ok(): break # report final result self.reporter.report(self.result)
def execute(self, sandbox: Environment, index): standard_input = self.cases.get_input(index) standard_output = self.cases.get_output(index) try: execution = Execution(sandbox) case_result = execution.execute(self.task, standard_input) logger().info('Task %d, case %d Execute success', self.task.task_id, index) if case_result.is_ok(): case_result.result = ResultFiles.compare_output( standard_output) return case_result except ExecuteException as e: logger().error( 'Task %d, case %d failed: code: %d, out: %s, err: %s.', self.task.task_id, index, e.code, e.user_out, e.user_err)
def execute(self, task, standard_input): self.sandbox.prepare_for_next() self.sandbox.place_input(standard_input) try: runner = Runner() runner.execute(task.working_dir) logger().info('Executor: Task %d finished, result: %s', task.task_id, runner.get_status()) if not runner.is_ok(): raise ExecuteException(runner.get_status(), runner.get_stdout(), runner.get_stderr()) return CaseResult.parse_runner(runner.get_stdout()) except DockerException as err: logger().error('Docker Exception:', err) exit(1)
def get_data(self, pid) -> DataResponse: log = logger() log.info('Fetch data of {pid}'.format(pid=pid)) payload = {'pid': pid} r = self._client.get(self.url_manager.data, params=payload) if r.status_code != 200: log.error('fetch data failed: {r}'.format(r=r.content)) raise FetchDataFailed() log.info('fetch data of {pid}: {content}'.format(pid=pid, content=r.content[:200])) return DataResponse(r.content)
def _compile(self): args = self.language_type.full_compile_command() logger().debug('Compile task use {args}'.format(args=args)) p = subprocess.Popen(args, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) signal(SIGALRM, alarm_handler) alarm(MAX_COMPILE_TIME) try: (stdoutdata, stderrdata) = p.communicate() # python 3.3 add timeout if stderrdata or stdoutdata: logger().warning("Compile alert: out => %s, err => %s", stdoutdata, stderrdata) alarm(0) if p.returncode != 0: raise CompileException(stderrdata) except Alarm: p.kill() raise CompileException('Exceed Compile Time Limit')
def compile(self, task): """ :param judge.task.Task task: :return: """ try: compiler = Compiler() compiler.execute(task.working_dir) logger().info('Compiler: Task %d finished, result: %s', task.task_id, compiler.get_status()) logger().info('Compiler stdout: %s', compiler.get_stdout()) # todo: check target is ok # self.check_result(task.language_type.target_name) except DockerException as err: logger().error('Docker Exception:', err)
#!/bin/env python # coding: utf8 from time import sleep, time from judge.config import Config from judge.data import TaskData, new_data_manager from judge.language import load_languages from judge.libs.graceful import GracefulKiller from judge.remote import new_api from judge.task import TaskCentre, Task from judge.utils.log import logger from judge.worker import Worker logger = logger() class Judged(object): cfg = ... duration = 0.2 idle_from = 0 def __init__(self, cfg: Config): self.cfg = cfg self.killer = GracefulKiller() self.api = new_api(cfg.api) self.dataProvider = new_data_manager(cfg.judged['data_cache'], self.api) load_languages() def run(self): dispatcher = TaskCentre(self.cfg.message_queue)
def exit_gracefully(self, signum, frame): logger().info('Judged receive signal, graceful exit...') self.stop = True
def __init__(self): logger().info('Signal register...') signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully)
def prepare_for_next(self): logger().info("Clear working dir for next case") files = ['user.in', 'user.out', 'user.err'] for file in files: if os.path.exists(file): os.unlink(file)
def get_language(self, language_id) -> LanguageType: if language_id in self._languages: return self._languages[language_id] logger().info('Language id not exist: {id}'.format(id=language_id)) raise LanguageNotExist()
def prepare(self) -> Environment: logger().info('Prepare environment for task %d', self.task.task_id) environ = Environment(self.task) return environ
def get_data(self, pid): path = self._get_data_path(pid) logger().info('get data of %d, %s', pid, path) return get_file_content(path)
def report(self, result): if isinstance(result, CaseResult): result = result.as_dict() logger().info('Report Status %s', json.dumps(result)) return self._client.post(self.url_manager.report, data=result)
def save_data(self, pid, content): # type: (int, bytes) -> None path = self._get_data_path(pid) logger().info('write {path} data'.format(path=path)) write_file(path, content.decode())