Example #1
0
    def _process_nsjail_log(self, file):
        jail_report = {}

        for ln in file:
            mat = re.match(
                r'\[S\]\[\d+?\] __STAT__:0 (?:\d+?:)?([\w]+)\s+=\s+(.*)', ln)
            if mat is None:
                # TODO: triage the message to separate file
                # self.logger.debug('SANDBOX >>> %s', ln[:-1])
                continue
            jail_report[mat.group(1)] = mat.group(2)

        self.logger.debug('captured stat dict:\n%s', pformat(jail_report))

        mandatory_keys = [
            'cgroup_memory_failcnt', 'cgroup_memory_max_usage',
            'exit_normally', 'time'
        ]

        for k in mandatory_keys:
            if k not in jail_report:
                raise IrukaInternalError(
                    'Cannot extract key "{}" from log, which is mandatory'.
                    format(k))

        return jail_report
Example #2
0
    def __init__(self,
                 spec,
                 config,
                 *,
                 logger=None,
                 log1=None,
                 log2=None,
                 nsjail_cfg_path):
        self.spec = spec
        self.nsjail_path = config.nsjail_path
        self.nsjail_cfg_path = nsjail_cfg_path

        self.cwd_build = Path('/run/shm')

        self.BUILD_OUT_LIM = 128 * 1024
        self.BUILD_MEM_LIM = 256 * 1024 * 1024
        self.RUN_OUT_LIM = 64 * 1024 * 1024
        self.USEROUT_PATH = '/run/shm/judge/out'

        # self.current_group = None
        # self.current_group_index = -1
        # self.current_subtask = None
        # self.current_subtask_index = -1

        self.logger = logger
        if logger is None:
            self.logger = logging.getLogger(type(self).__name__)

        # populate some structures
        # no's are all 1-indexed; no=0 is reserved for special semantics (usually none)

        _tasks = [(0, spec.samples)]
        st_iter = iter(spec.subtasks)
        for i, (count, _) in enumerate(spec.task_groups):
            _tasks.append((i + 1, [next(st_iter) for _ in range(count)]))
        # _tasks.extend([(i+1, s) for i, s in enumerate()])
        self.tasks = _tasks

        self.logger.debug('tasks %s', pformat(_tasks))

        # prepare logging facilities
        self.logfile_stdout = log1
        self.logfile_stderr = log2
        self.journals = Journals(self.logfile_stdout, self.logfile_stderr)

        self.user_temp = None
        self._reset_state()
Example #3
0
def submit_job(servicer, submission_id, callback):
    resultEnum = iruka_admin_pb2.ExecResult

    with models.connection_context():
        try:
            subm = models.Submission.get_by_id(submission_id)
        except peewee.DoesNotExist:
            # logger.error('Submission of id {} does not exist'.format(submission_id))
            return resultEnum.RECORD_NOT_FOUND

        prob = subm.problem
        pid = prob.problem_id

    logging.debug('submission: %s', pformat(subm.debug_view))
    logging.debug('problem: %d - %s', pid, prob.problem_title)

    # transform 2D list to protobuf
    prob_spec = [common_pb2.Int64Array(value=l) for l in prob.problem_testdata]

    prob_type_enum = iruka_rpc_pb2.SubmissionRequest.HojProblemType
    prob_type = prob_type_enum.REGULAR
    map_extra_files = {}

    if prob.problem_special:
        prob_type = prob_type_enum.SPECIAL_JUDGE
        map_extra_files['checker.cpp'] = prob.problem_check.encode()
    elif prob.problem_id in [23, 118, 119, 120, 121, 122, 257, 290, 363]:
        # TODO
        prob_type = prob_type_enum.INTERACTIVE

    job = Job(
        submission_id,
        iruka_rpc_pb2.ServerEvent(
            type=iruka_rpc_pb2.ServerEvent.REQUEST_JUDGE))

    if subm.submission_status != 0:
        logger.warn(
            'Submission %d has a resolved status. The result will not be updated.',
            subm.submission_id)
        job.dry_run = True

    # backward compatibility
    preset = subm.submission_preset
    if preset is None:
        preset = 'cpp'

    submObj = iruka_rpc_pb2.SubmissionRequest(
        id=job.id,
        submission_id=submission_id,
        submission=iruka_rpc_pb2.Submission(
            problem_id=pid,
            code=subm.submission_code,
            build_preset=subm.submission_preset,
            files=map_extra_files),
        hoj_spec=prob_spec,
        hoj_type=prob_type)

    job.content.submission_req.CopyFrom(submObj)

    job.callback = partial(callback, job, subm)

    if not servicer._submit_job(job):
        return resultEnum.ALREADY_IN_QUEUE

    logger.debug('Job %s submitted', job)
    return resultEnum.SUCCESS
Example #4
0
def _writeback_subm_result(job, submission, pb_event, finalize=False):
    orig_status = submission.submission_status
    is_resolving = any((pb_event.HasField(x) for x in ['result', 'exception']))

    # NOTE that partial_stat is convert in server.py as a workaround!
    # when the workaround is removed the logic here should be modified as well.

    if pb_event.HasField('ack') and pb_event.ack.reject_reason:
        # somehow, client fails
        submission.submission_status = common_pb2.OTHER
        submission.submission_error = 'Client rejected this job: {} :('.format(
            iruka_rpc_pb2.SubmissionAck.RejectReason.Name(
                pb_event.ack.reject_reason))

    elif pb_event.HasField('result'):
        result = pb_event.result
        logger.debug('Submission result: %s', pformat_pb(result, max_level=1))

        if result.pipeline_success:
            _update_subm(submission, pb_event)
        else:
            final_verdict = result.final_stat.verdict

            arr = []
            for stat in job.content.submission_req.hoj_spec:
                # Fill in all CEs, for example
                # but how many rows should I fill in?
                arr.append([final_verdict, 0, 0])

            # actually, N/A
            submission.submission_time = 0
            submission.submission_mem = 0
            submission.submission_score = 0
            submission.submission_result = arr

        _finalize_subm(submission, pb_event.result)

        logger.debug('submission now: %s', pformat(submission.debug_view))

    elif pb_event.HasField('exception'):
        submission.submission_status = common_pb2.SERR
        # hmm should backtrace be shown to the user?

        # err_buf = io.StringIO()
        # err_buf.write(pb_event.exception.message + '\n')
        # err_buf.write(pb_event.exception.backtrace)

        # err_buf.seek(0)
        # submission.submission_error = ''.join(['> ' + ln for ln in err_buf.readlines()])

        submission.submission_error = '*** Please contact the staff ***'
        if hasattr(submission, 'submission_verbose'):
            submission.submission_verbose = '{}\n{}\n'.format(
                pb_event.exception.message, pb_event.exception.backtrace)
        else:
            logger.info('Submission has no `submission_verbose` field. '
                        'The verbose info. is lost.')

        # FIXME: the values should be computed for partial stats before
        # the exception

        arr = []
        for stat in job.content.submission_req.hoj_spec:
            arr.append([common_pb2.SERR, 0, 0])

        submission.submission_time = 0
        submission.submission_mem = 0
        submission.submission_score = 0
        submission.submission_result = arr

    if is_resolving and not job.dry_run:
        with models.connection_context():
            submission.save()

        logger.info('Updated submission %d in database.',
                    submission.submission_id)
Example #5
0
    def ReportSubmission(self, request, context):
        # prioritize operating on scheduler's methods to mutate the state

        logger.debug('Start submission report')

        try:
            event = next(request)
        except StopIteration:
            logger.error('Client call ReportSubmission with an empty payload!')
            context.abort(grpc.StatusCode.INVALID_ARGUMENT, 'Empty report')

        if not event.HasField('ack'):
            return iruka_rpc_pb2.GeneralResponse(
                ok=0, msg='The first reported event should be an ACK!')

        event_ack = event.ack
        logger.debug('Received ACK')
        job_obj = self.server_job_list.get(event_ack.id, None)
        if job_obj is None:
            logger.warn('Client sends an invalid job id %d', event_ack.id)
            return iruka_rpc_pb2.GeneralResponse(ok=0, msg='invalid job id!')

        client = self.scheduler._clients.get(context.peer(), None)
        if client is None:
            context.abort(grpc.StatusCode.UNAUTHENTICATED, 'Uncognized peer')
            return

        if event_ack.reject_reason:
            reason_str = iruka_rpc_pb2.SubmissionAck.RejectReason.Name(
                event_ack.reject_reason)
            logger.warn('Submission is rejected by client %s: %s.',
                        client.peer, reason_str)
            self.scheduler.job_reject(client, job_obj, event)
            return iruka_rpc_pb2.GeneralResponse(ok=1, msg='rejection acked')

        self.scheduler.job_accept(client, job_obj)
        self.scheduler.job_progress(job_obj, event)

        # FIXME: collect partial_stat and pretend if we have a total result
        # at once, see below with care
        stat_buffer = []
        has_exceptions = False

        for event in request:
            if event.HasField('partial_stat'):
                for stctx in event.partial_stat.values:
                    # FIXME: ignore number & label, assume in order
                    stat = stctx.stat
                    stat_buffer.append(stat)
            elif event.HasField('result'):
                if len(event.result.subtasks):
                    logging.warn('Client reports a non-empty result.subtasks, '
                                 'which is unsupported for now.')
                event.result.subtasks.extend(stat_buffer)
                break
            elif event.HasField('exception'):
                logger.error('Exception raised when judging: %s',
                             event.exception.message)
                has_exceptions = True
                break
            else:
                logger.warn(
                    'Unexpected event type encountered: %s; do nothing.',
                    event.WhichOneOf('event'))

            self.scheduler.job_progress(job_obj, event)

        logger.debug('End submission report')
        logger.debug(
            'Stat buffer: %s',
            pformat([pformat_pb(s, max_level=0) for s in stat_buffer]))

        if has_exceptions:
            job_obj.status = common_pb2.JOB_FAILED
        else:
            job_obj.status = common_pb2.JOB_COMPLETED

        self.scheduler.job_done(job_obj, event)

        return iruka_rpc_pb2.GeneralResponse(ok=1, msg='accepted')