def get_metadata(metadata_file: str) -> Metadata: try: metadata = Metadata.load_from(metadata_file) return metadata except IOError: logger.warning("{} is not found. Default metadata is selected. ".format( metadata_file) ) return Metadata.default_metadata()
def main(prog, args): parser = argparse.ArgumentParser( prog=prog, usage="Compile your program in the current directory (no argument)", formatter_class=argparse.RawTextHelpFormatter) parser.parse_args(args) metadata = Metadata.load_from("./metadata.json") compile_main_and_judge_programs(metadata, force_compile=True)
def get_sample_patterns(metadata_file: str) -> Tuple[str, str]: try: metadata = Metadata.load_from(metadata_file) return metadata.sample_in_pattern, metadata.sample_out_pattern except IOError: logging.warning( "{} is not found. Assume the example file name patterns are {} and {}" .format(metadata_file, DEFAULT_IN_EXAMPLE_PATTERN, DEFAULT_OUT_EXAMPLE_PATTERN)) return DEFAULT_IN_EXAMPLE_PATTERN, DEFAULT_OUT_EXAMPLE_PATTERN
def get_sample_patterns(metadata_file: str) -> Tuple[str, str]: try: metadata = Metadata.load_from(metadata_file) return metadata.sample_in_pattern, metadata.sample_out_pattern except IOError: logging.warning("{} is not found. Assume the example file name patterns are {} and {}".format( metadata_file, DEFAULT_IN_EXAMPLE_PATTERN, DEFAULT_OUT_EXAMPLE_PATTERN) ) return DEFAULT_IN_EXAMPLE_PATTERN, DEFAULT_OUT_EXAMPLE_PATTERN
def test_run_single_test_multisolution(self): test_dir = os.path.join(self.temp_dir, "test") shutil.copytree( os.path.join(RESOURCE_DIR, "test_run_single_test_multisolution"), test_dir) setter_main('', ['-d', test_dir, "-j", "multisolution"]) metadata = Metadata.load_from(os.path.join(test_dir, "metadata.json")) self.assertTrue(isinstance(metadata.judge_method, MultiSolutionJudge)) # Already set setter_main('', ['-d', test_dir, "--lang", "cpp"]) metadata = Metadata.load_from(os.path.join(test_dir, "metadata.json")) self.assertTrue(metadata.lang.name == 'cpp') self.assertTrue( tester.main('', ['-d', test_dir, '-n', '1', '-c', "True"])) self.assertTrue( tester.main('', ['-d', test_dir, "-n", "2", "-c", "True"])) self.assertTrue( tester.main('', ['-d', test_dir, "-n", "3", "-c", "True"])) self.assertTrue( tester.main('', ['-d', test_dir, "-n", "4", "-c", "True"]))
def test_compiler_and_tester(self): test_dir = os.path.join(self.temp_dir, "test") shutil.copytree(os.path.join(RESOURCE_DIR, "test_compiler_and_tester"), test_dir) for lang in ALL_LANGUAGES: setter_main('', ["--lang", lang.name, '-d', test_dir]) metadata = Metadata.load_from( os.path.join(test_dir, "metadata.json")) compile_main_and_judge_programs(metadata, force_compile=True, cwd=test_dir) for i in [1, 2, 3, 4]: self.assertTrue( tester.main('', [ '-d', test_dir, "-n", "{:d}".format(i), "-j", "normal" ]))
def main(prog, args, credential_supplier=None, use_local_session_cache=True) -> bool: parser = argparse.ArgumentParser( prog=prog, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument( "--exec", '-e', help= "File path to the execution target. [Default] Automatically detected exec file", default=None) parser.add_argument( "--dir", '-d', help="Target directory to test. [Default] Current directory", default=".") parser.add_argument("--timeout", '-t', help="Timeout for each test cases (sec) [Default] 1", type=int, default=1) parser.add_argument( "--code", '-c', help= "Path to the source code to submit [Default] Code path written in metadata.json", type=str, default=None) parser.add_argument( "--force", "-f", action="store_true", help= "Submit the code regardless of the local test result [Default] False", default=False) parser.add_argument("--save-no-session-cache", action="store_true", help="Save no session cache to avoid security risk", default=False) parser.add_argument( "--unlock-safety", "-u", action="store_true", help= "By default, this script only submits the first code per problem. However, you can remove" " the safety by this option in order to submit codes twice or more.", default=False) args = parser.parse_args(args) metadata_file = os.path.join(args.dir, "metadata.json") try: metadata = Metadata.load_from(metadata_file) except IOError: logger.error( "{0} is not found! You need {0} to use this submission functionality." .format(metadata_file)) return False try: client = AtCoderClient() client.login( save_session_cache=args.save_no_session_cache, credential_supplier=credential_supplier, use_local_session_cache=use_local_session_cache, ) except LoginError: logger.error("Login failed. Try again.") return False tester_args = [] if args.exec: tester_args += ["-e", args.exec] if args.dir: tester_args += ["-d", args.dir] if args.timeout: tester_args += ["-t", str(args.timeout)] if args.force or tester.main("", tester_args): submissions = client.download_submission_list(metadata.problem.contest) if not args.unlock_safety: for submission in submissions: if submission.problem_id == metadata.problem.problem_id: logger.error( with_color( "Cancel submitting because you already sent some code to the problem. Please " "specify -u to send the code. {}".format( metadata.problem.contest.get_submissions_url( submission)), Fore.LIGHTRED_EX)) return False code_path = args.code or os.path.join(args.dir, metadata.code_filename) with open(code_path, 'r') as f: source = f.read() logger.info("Submitting {} as {}".format(code_path, metadata.lang.name)) submission = client.submit_source_code(metadata.problem.contest, metadata.problem, metadata.lang, source) logger.info("{} {}".format( with_color("Done!", Fore.LIGHTGREEN_EX), metadata.problem.contest.get_submissions_url(submission)))
def main(prog, args): metadata = Metadata.load_from("./metadata.json") compile_main_and_judge_programs(metadata, force_compile=True)
def prepare_procedure(atcoder_client: AtCoderClient, problem: Problem, config: Config): workspace_root_path = config.code_style_config.workspace_dir template_code_path = config.code_style_config.template_file lang = config.code_style_config.lang pid = problem.get_alphabet() problem_dir_path = os.path.join(workspace_root_path, problem.get_contest().get_id(), pid) def emit_error(text): logger.error(with_color("Problem {}: {}".format(pid, text), Fore.RED)) def emit_warning(text): logger.warning("Problem {}: {}".format(pid, text)) def emit_info(text): logger.info("Problem {}: {}".format(pid, text)) emit_info('{} is used for template'.format(template_code_path)) original_html = atcoder_client.download_problem_content_raw_html(problem) constants = predict_constants(original_html) if constants.judge_method.judge_type != JudgeType.Interactive: # Fetch problem data from the statement try: content = get_problem_content(original_html) except InputFormatDetectionError as e: emit_error("Failed to download input format.") raise e except SampleDetectionError as e: emit_error("Failed to download samples.") raise e # Store examples to the directory path if len(content.get_samples()) == 0: emit_info("No samples.") else: os.makedirs(problem_dir_path, exist_ok=True) create_examples(content.get_samples(), problem_dir_path, config.etc_config.in_example_format, config.etc_config.out_example_format) emit_info("Created examples.") code_file_path = os.path.join(problem_dir_path, "main.{}".format(lang.extension)) # If there is an existing code, just create backup if os.path.exists(code_file_path): backup_id = 1 while True: backup_name = "{}.{}".format(code_file_path, backup_id) if not os.path.exists(backup_name): new_path = backup_name shutil.copy(code_file_path, backup_name) break backup_id += 1 emit_info("Backup for existing code '{}' -> '{}'".format( code_file_path, new_path)) if constants.judge_method.judge_type != JudgeType.Interactive: try: prediction_result = predict_format(content) emit_info( with_color("Format prediction succeeded", Fore.LIGHTGREEN_EX)) except (NoPredictionResultError, MultiplePredictionResultsError) as e: prediction_result = FormatPredictionResult.empty_result() if isinstance(e, NoPredictionResultError): msg = "No prediction -- Failed to understand the input format" else: msg = "Too many prediction -- Failed to understand the input format" emit_warning(with_color(msg, Fore.LIGHTRED_EX)) else: prediction_result = FormatPredictionResult.empty_result() code_generator = config.code_style_config.code_generator with open(template_code_path, "r") as f: template = f.read() create_code( code_generator( CodeGenArgs(template, prediction_result.format, constants, config.code_style_config)), code_file_path) emit_info("Saved code to {}".format(code_file_path)) # Save metadata metadata_path = os.path.join(problem_dir_path, "metadata.json") Metadata( problem, os.path.basename(code_file_path), config.etc_config.in_example_format.replace("{}", "*"), config.etc_config.out_example_format.replace("{}", "*"), lang, constants.judge_method, ).save_to(metadata_path) emit_info("Saved metadata to {}".format(metadata_path)) if config.postprocess_config.exec_cmd_on_problem_dir is not None: emit_info( _message_on_execution( problem_dir_path, config.postprocess_config.exec_cmd_on_problem_dir)) config.postprocess_config.execute_on_problem_dir(problem_dir_path) output_splitter()
def main(prog, args) -> None: if len(args) == 0: print("Usage: atcoder tools set [options]") return parser = argparse.ArgumentParser( prog=prog, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--judge-type', '-j', help='error type' ' must be one of [{}]'.format( ', '.join(USER_FACING_JUDGE_TYPE_LIST)), type=str, default=None) parser.add_argument('--error-value', '-v', help='error value for decimal number judge:' ' [Default] ' + str(DEFAULT_EPS), type=float, default=None) parser.add_argument( "--lang", help="Programming language of your template code, {}.\n".format( " or ".join([lang.name for lang in ALL_LANGUAGES])), default=None) parser.add_argument( "--dir", '-d', help="Target directory to test. [Default] Current directory", default=".") args = parser.parse_args(args) old_metadata = Metadata.load_from(os.path.join(args.dir, "metadata.json")) # Use the old metadata as base metadata. output_metadata = Metadata.load_from( os.path.join(args.dir, "metadata.json")) if args.judge_type in ["absolute", "relative", "absolute_or_relative"]: new_metadata_judge_type = "decimal" else: new_metadata_judge_type = args.judge_type old_metadata_judge_type = old_metadata.judge_method.judge_type.value if new_metadata_judge_type is not None and new_metadata_judge_type != old_metadata_judge_type: if new_metadata_judge_type == JudgeType.Normal.value: output_metadata.judge_method = NormalJudge() elif new_metadata_judge_type == JudgeType.Decimal.value: output_metadata.judge_method = DecimalJudge() elif new_metadata_judge_type == JudgeType.MultiSolution.value: output_metadata.judge_method = MultiSolutionJudge() elif new_metadata_judge_type == JudgeType.Interactive.value: output_metadata.judge_method = InteractiveJudge() else: raise NoJudgeTypeException() judge_code_filename = os.path.join(args.dir, "judge.cpp") if new_metadata_judge_type == JudgeType.Decimal.value: if args.error_value is not None: output_metadata.judge_method.diff = args.error_value else: logger.warn( "Error-value is not specified. Default value will be set.") output_metadata.judge_method.error_type = ErrorType(args.judge_type) elif new_metadata_judge_type == JudgeType.MultiSolution.value: if not os.path.exists(judge_code_filename): print("Creating {} (multi-solution)".format(judge_code_filename)) judge_template_path = get_default_judge_template_path('cpp') shutil.copy(judge_template_path, judge_code_filename) else: print("Judge code exists. Skipping creating judge code...") elif new_metadata_judge_type == JudgeType.Interactive.value: if not os.path.exists(judge_code_filename): print("Creating {} (interactive)".format(judge_code_filename)) judge_template_path = get_default_judge_template_path('cpp') shutil.copy(judge_template_path, judge_code_filename) else: print("Judge code exists. Skipping creating judge code...") if args.lang is not None: if args.lang != output_metadata.lang.name: output_metadata.lang = Language.from_name(args.lang) output_metadata.code_filename = output_metadata.lang.get_code_filename( 'main') url = "https://atcoder.jp/contests/{}/tasks/{}".format( output_metadata.problem.contest.contest_id, output_metadata.problem.problem_id) main_code_filename = os.path.join(args.dir, output_metadata.code_filename) if not os.path.exists(main_code_filename): codegen_main("", ["--lang", output_metadata.lang.name, url], open(main_code_filename, 'w')) else: print("File exists: ", output_metadata.code_filename) else: print("Already set to {}. Skipping changing language...".format( args.lang)) output_metadata.save_to(os.path.join(args.dir, "metadata.json"))
def main(prog, args): if len(args) == 0: print("Usage: atcoder tools set [options]") return parser = argparse.ArgumentParser( prog=prog, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--judge-type', '-j', help='error type' ' must be one of [{}]'.format( ', '.join(USER_FACING_JUDGE_TYPE_LIST)), type=str, default=None) parser.add_argument('--error-value', '-v', help='error value for decimal number judge:' ' [Default] ' + str(DEFAULT_EPS), type=float, default=None) parser.add_argument("--lang", help="Programming language of your template code, {}.\n".format( " or ".join([lang.name for lang in ALL_LANGUAGES])), default=None) parser.add_argument("--dir", '-d', help="Target directory to test. [Default] Current directory", default=".") args = parser.parse_args(args) metadata = Metadata.load_from(args.dir + "/metadata.json") new_judge_type = args.judge_type if new_judge_type in ["decimal", "absolute", "relative", "absolute_or_relative"]: new_judge_type = "decimal" if args.judge_type == "decimal": args.judge_type = "absolute_or_relative" old_judge_type = metadata.judge_method.judge_type.value if new_judge_type is not None and new_judge_type != old_judge_type: if new_judge_type == JudgeType.Normal.value: metadata.judge_method = NormalJudge() elif new_judge_type == JudgeType.Decimal.value: metadata.judge_method = DecimalJudge() elif new_judge_type == JudgeType.MultiSolution.value: metadata.judge_method = MultiSolutionJudge() elif new_judge_type == JudgeType.Interactive.value: metadata.judge_method = InteractiveJudge() else: raise NoJudgeTypeException() if new_judge_type == JudgeType.Decimal.value: if args.error_value is not None: metadata.judge_method.diff = args.error_value else: print("Warning: error-value is not specified default value is set. ") metadata.judge_method.error_type = ErrorType(args.judge_type) elif new_judge_type == JudgeType.MultiSolution.value: if not os.path.exists("./judge.cpp"): print("touch ./judge.cpp (multi sotlution)") judge_template_path = get_default_judge_template_path('cpp') shutil.copy(judge_template_path, "./judge.cpp") else: print("Judge Code exists") elif new_judge_type == JudgeType.Interactive.value: if not os.path.exists("/judge.cpp"): print("touch ./judge.cpp (interactive)") judge_template_path = get_default_judge_template_path('cpp') shutil.copy(judge_template_path, "./judge.cpp") else: print("Judge Code exists") if args.lang is not None: if args.lang != metadata.lang.name: metadata.lang = Language.from_name(args.lang) metadata.code_filename = metadata.lang.get_code_filename('main') url = "https://atcoder.jp/contests/{}/tasks/{}".format( metadata.problem.contest.contest_id, metadata.problem.problem_id) if not os.path.exists(metadata.code_filename): codegen_main("", ["--lang", metadata.lang.name, url], open(metadata.code_filename, 'w')) else: print("file exists: ", metadata.code_filename) else: print("already set to {}".format(args.lang)) metadata.save_to(args.dir + "/metadata.json") return metadata
def main(prog, args, credential_supplier=None, use_local_session_cache=True) -> bool: parser = argparse.ArgumentParser( prog=prog, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("--exec", '-e', help="File path to the execution target. [Default] Automatically detected exec file", default=None) parser.add_argument("--dir", '-d', help="Target directory to test. [Default] Current directory", default=".") parser.add_argument("--timeout", '-t', help="Timeout for each test cases (sec) [Default] 1", type=int, default=1) parser.add_argument("--code", '-c', help="Path to the source code to submit [Default] Code path written in metadata.json", type=str, default=None) parser.add_argument("--force", "-f", action="store_true", help="Submit the code regardless of the local test result [Default] False", default=False) parser.add_argument("--save-no-session-cache", action="store_true", help="Save no session cache to avoid security risk", default=False) parser.add_argument("--unlock-safety", "-u", action="store_true", help="By default, this script only submits the first code per problem. However, you can remove" " the safety by this option in order to submit codes twice or more.", default=False) args = parser.parse_args(args) metadata_file = os.path.join(args.dir, "metadata.json") try: metadata = Metadata.load_from(metadata_file) except IOError: logging.error( "{0} is not found! You need {0} to use this submission functionality.".format(metadata_file)) return False try: client = AtCoderClient() client.login(save_session_cache=args.save_no_session_cache, credential_supplier=credential_supplier, use_local_session_cache=use_local_session_cache, ) except LoginError: logging.error("Login failed. Try again.") return False tester_args = [] if args.exec: tester_args += ["-e", args.exec] if args.dir: tester_args += ["-d", args.dir] if args.timeout: tester_args += ["-t", str(args.timeout)] if args.force or tester.main("", tester_args): submissions = client.download_submission_list(metadata.problem.contest) if not args.unlock_safety: for submission in submissions: if submission.problem_id == metadata.problem.problem_id: logging.error(with_color("Cancel submitting because you already sent some code to the problem. Please " "specify -u to send the code. {}".format( metadata.problem.contest.get_submissions_url(submission)), Fore.LIGHTRED_EX)) return False code_path = args.code or os.path.join(args.dir, metadata.code_filename) with open(code_path, 'r') as f: source = f.read() logging.info( "Submitting {} as {}".format(code_path, metadata.lang.name)) submission = client.submit_source_code( metadata.problem.contest, metadata.problem, metadata.lang, source) logging.info("{} {}".format( with_color("Done!", Fore.LIGHTGREEN_EX), metadata.problem.contest.get_submissions_url(submission)))