def update_exercise_master_metadata_formwise(separates, bundles, new_deadlines, new_drive):
    for exercise in itertools.chain(*bundles.values(), separates):
        filepath = os.path.join(exercise.dirpath, f'{exercise.key}.ipynb')
        cells, metadata = ipynb_util.load_cells(filepath)
        deadlines_cur = ipynb_metadata.master_metadata_deadlines(metadata)
        deadlines = new_deadlines.get(exercise.key, deadlines_cur)
        if deadlines != deadlines_cur:
            logging.info(f'[INFO] Renew deadline of {exercise.key}')
        drive_cur = ipynb_metadata.master_metadata_drive(metadata)
        drive = new_drive.get(exercise.key, drive_cur)
        if drive != drive_cur:
            logging.info(f'[INFO] Renew Google Drive ID/URL of {exercise.key}')
        metadata = ipynb_metadata.master_metadata(exercise.key, True, exercise.version, exercise.title, deadlines, drive)
        ipynb_util.save_as_notebook(filepath, cells, metadata)

    for dirpath, exercises in bundles.items():
        dirname = os.path.basename(dirpath)
        for exercise in exercises:
            filepath = os.path.join(exercise.dirpath, f'{exercise.key}.ipynb')
            cells, metadata = ipynb_util.load_cells(filepath)
            deadlines_cur = ipynb_metadata.master_metadata_deadlines(metadata)
            deadlines = new_deadlines.get(f'{dirname}/', deadlines_cur)
            if deadlines != deadlines_cur:
                logging.info(f'[INFO] Renew deadline of bundle {dirname}/{exercise.key}')
            drive_cur = ipynb_metadata.master_metadata_drive(metadata)
            drive = new_drive.get(f'{dirname}/', drive_cur)
            if drive != drive_cur:
                logging.info(f'[INFO] Renew Google Drive ID/URL of bundle {dirname}/{exercise.key}')
            metadata = ipynb_metadata.master_metadata(exercise.key, True, exercise.version, exercise.title, deadlines, drive)
            ipynb_util.save_as_notebook(filepath, cells, metadata)
def cleanup_exercise_master(exercise, new_version=None):
    filepath = os.path.join(exercise.dirpath, f'{exercise.key}.ipynb')
    cells, metadata = ipynb_util.load_cells(filepath, True)
    cells_new = [x.to_ipynb() for x in ipynb_util.normalized_cells(cells)]

    if new_version is None:
        new_version = exercise.version
    elif new_version == hashlib.sha1:
        exercise_definition = {
            'description': [x.to_ipynb() for x in exercise.description],
            'answer_cell': exercise.answer_cell().to_ipynb(),
            'instructive_test': [x.to_ipynb() for x in exercise.instructive_test],
        }
        m = hashlib.sha1()
        m.update(json.dumps(exercise_definition).encode())
        new_version = m.hexdigest()
    else:
        assert isinstance(new_version, str)

    if new_version != exercise.version:
        logging.info(f'[INFO] Renew version of {exercise.key}')
        exercise.version = new_version

    deadlines = ipynb_metadata.master_metadata_deadlines(metadata)
    drive = ipynb_metadata.master_metadata_drive(metadata)
    metadata_new = ipynb_metadata.master_metadata(exercise.key, True, exercise.version, exercise.title, deadlines, drive)
    ipynb_util.save_as_notebook(filepath, cells_new, metadata_new)
Exemple #3
0
def add_question_exists_into_answer_cell(filepath):
    CONTENT_TYPE_REGEX = r'\*\*\*CONTENT_TYPE:\s*(.+?)\*\*\*'

    cells, metadata = ipynb_util.load_cells(filepath)
    for i, c in enumerate(cells):
        if c['cell_type'] == 'markdown':
            matches = list(
                re.finditer(CONTENT_TYPE_REGEX, ''.join(c['source'])))
            if not matches:
                continue
            key = matches[0][1]
            if key == 'ANSWER_CELL_CONTENT' and all(
                    'QUESTION_EXISTS = False' not in x
                    for x in cells[i + 1]['source']):
                print('Append QUESTION_EXISTS: ', filepath)
                cells[i + 1]['source'][0:0] = [
                    'QUESTION_EXISTS = False # 質問がある場合は True にしてコメントに質問を記述\n',
                    '\n'
                ]

            if key == 'SYSTEM_TESTCODE' and all(
                    'question_exists' not in x
                    for x in cells[i + 1]['source']):
                print('Append question_exists: ', filepath)
                cells[i + 1]['source'][-1] = cells[i + 1]['source'][-1] + '\n'
                cells[i + 1]['source'].extend(
                    PRECHECK_QUESTION_EXISTS_TEMPLATE.splitlines(True))

    ipynb_util.save_as_notebook(filepath, cells, metadata)
def create_bundled_intro(dirpath):
    dirname = os.path.basename(dirpath)
    try:
        raw_cells, _ = ipynb_util.load_cells(os.path.join(dirpath, INTRODUCTION_FILE))
        return list(ipynb_util.normalized_cells(raw_cells))
    except FileNotFoundError:
        return [ipynb_util.markdown_cell(f'# {os.path.basename(dirpath)}')]
def convert_master(filepath):
    CONTENT_TYPE_REGEX = r'\*\*\*CONTENT_TYPE:\s*(.+?)\*\*\*'

    cells, metadata = ipynb_util.load_cells(filepath)
    for i, c in enumerate(cells):
        if c['cell_type'] == 'markdown':
            matches = list(
                re.finditer(CONTENT_TYPE_REGEX, ''.join(c['source'])))
            if not matches:
                continue
            key = matches[0][1]
            if key in REWRITE_RULES:
                c['source'] = REWRITE_RULES[key].splitlines(True)
            if key == 'PLAYGROUND':
                cells[i + 1]['source'] = ['judge_util.unittest_main()']

    new_cells = []
    deleting = False
    for c in cells:
        if c['cell_type'] == 'markdown':
            matches = list(
                re.finditer(CONTENT_TYPE_REGEX, ''.join(c['source'])))
            if matches:
                deleting = False
                if matches[0][1] in DELETED_FIELDS:
                    deleting = True
        if not deleting:
            new_cells.append(c)

    ipynb_util.save_as_notebook(filepath, new_cells, metadata)
Exemple #6
0
def generate_template(exercise):
    FieldKey = build_autograde.FieldKey
    CONTENT_TYPE_REGEX = r'\*\*\*CONTENT_TYPE:\s*(.+?)\*\*\*'

    cells, metadata = ipynb_util.load_cells(
        os.path.join(exercise.dirpath, exercise.key + '.ipynb'))
    gen_cells = []
    for i, c in enumerate(cells):
        if c['cell_type'] == 'markdown':
            matches = list(
                re.finditer(CONTENT_TYPE_REGEX, ''.join(c['source'])))
            gen_cells.append(c)
            if not matches:
                continue
            key = getattr(FieldKey, matches[0][1])
            if key == FieldKey.SYSTEM_TESTCODE:
                gen_cells.append(generate_precheck_test_code(exercise))
                given_test = generate_given_test_code(exercise)
                gen_cells.extend(given_test)
                gen_cells.append(generate_hidden_test_code(exercise))
            if key == FieldKey.PLAYGROUND:
                gen_cells.append(
                    ipynb_util.code_cell(
                        'judge_util.unittest_main()').to_ipynb())
        else:
            gen_cells.append(c)

    filepath = os.path.join(exercise.dirpath, f'template_{exercise.key}.ipynb')
    ipynb_util.save_as_notebook(filepath, gen_cells, metadata)
def load_exercise(dirpath, exercise_key):
    raw_cells, metadata = ipynb_util.load_cells(os.path.join(dirpath, exercise_key + '.ipynb'))
    version = ipynb_metadata.master_metadata_version(metadata)

    fields = dict(split_cells_into_fields(FieldKey, raw_cells))

    heading_regex = r'#+\s+(.*)'
    description_first_line = fields[FieldKey.DESCRIPTION][0].source.strip().splitlines()[0]
    title = re.fullmatch(heading_regex, description_first_line).groups()[0]

    test_modules = interpret_testcode_cells(dirpath, fields.pop(FieldKey.SYSTEM_TESTCODE))

    exercise_kwargs = {
        'key': exercise_key, 'dirpath': dirpath, 'version': version, 'title': title, 'test_modules': test_modules,
        **{f.name.lower(): cs[0] if f == FieldKey.ANSWER_CELL_CONTENT else cs for f, cs in fields.items()},
    }
    return Exercise(**exercise_kwargs)
Exemple #8
0
def release_ipynb(master_path,
                  new_version,
                  new_deadlines,
                  new_drive,
                  form_dir=None):
    key, ext = os.path.splitext(os.path.basename(master_path))
    cells, metadata = ipynb_util.load_cells(master_path, True)
    title = extract_first_heading(cells)
    version = ipynb_metadata.master_metadata_version(metadata)

    if new_version is None:
        new_version = version
    elif new_version == hashlib.sha1:
        m = hashlib.sha1()
        m.update(json.dumps(cells).encode())
        new_version = m.hexdigest()
    else:
        assert isinstance(new_version, str)
    if new_version != version:
        logging.info(f'[INFO] Renew version of `{master_path}`')
        version = new_version

    deadlines_cur = ipynb_metadata.master_metadata_deadlines(metadata)
    deadlines = new_deadlines.get(key, deadlines_cur)
    if deadlines != deadlines_cur:
        logging.info(f'[INFO] Renew deadline of `{master_path}`')
    drive_cur = ipynb_metadata.master_metadata_drive(metadata)
    drive = new_drive.get(key)
    if drive != drive_cur:
        logging.info(f'[INFO] Renew Google Drive ID/URL of `{master_path}`')

    master_metadata = ipynb_metadata.master_metadata(key, False, version,
                                                     title, deadlines, drive)
    ipynb_util.save_as_notebook(master_path, cells, master_metadata)
    logging.info(f'[INFO] Released master `{master_path}`')

    if form_dir:
        form_path = os.path.join(form_dir, f'{key}.ipynb')
    else:
        form_path = os.path.join(os.path.dirname(master_path),
                                 f'form_{key}.ipynb')
    submission_metadata = ipynb_metadata.submission_metadata({key: version},
                                                             False)
    ipynb_util.save_as_notebook(form_path, cells, submission_metadata)
    logging.info(f'[INFO] Released form `{form_path}`')
def create_exercise_configuration(exercise: Exercise):
    tests_dir = os.path.join(CONF_DIR, exercise.key)
    os.makedirs(tests_dir, exist_ok=True)

    cells = [x.to_ipynb() for x in itertools.chain(exercise.description, [exercise.answer_cell_content])]
    _, metadata = ipynb_util.load_cells(os.path.join(exercise.dirpath, exercise.key + '.ipynb'), True)
    ipynb_metadata.extend_master_metadata_for_trial(metadata, exercise.answer_cell_content.source)
    ipynb_util.save_as_notebook(os.path.join(CONF_DIR, exercise.key + '.ipynb'), cells, metadata)

    setting = judge_setting.generate_judge_setting(exercise.key, exercise.version, [stage for stage, _ in exercise.test_modules])
    with open(os.path.join(tests_dir, 'setting.json'), 'w', encoding='utf-8') as f:
        json.dump(setting, f, indent=1, ensure_ascii=False)

    for stage, content in exercise.test_modules:
        with open(os.path.join(tests_dir, f'{stage.name}.py'), 'w', encoding='utf-8', newline='\n') as f:
            print(content, 'judge_util.unittest_main()', sep='\n', file=f)

    for path in itertools.chain(*(stage.required_files for stage, _ in exercise.test_modules)):
        dest = os.path.join(tests_dir, path)
        os.makedirs(os.path.dirname(dest), exist_ok=True)
        shutil.copyfile(os.path.join(exercise.dirpath, path), dest)