def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("kifu")
    parser.add_argument("engine")
    parser.add_argument("value_ranges")
    parser.add_argument("out_dir")
    parser.add_argument("--trials", type=int, default=10)
    parser.add_argument("--resume", action="store_true")
    args = parser.parse_args()

    base_config["engine"] = yaml_load(args.engine)
    base_config["value_ranges"] = yaml_load(args.value_ranges)
    base_config["kifu"] = args.kifu
    base_config["out_dir"] = args.out_dir

    os.makedirs(base_config["out_dir"], exist_ok=True)
    study_name = 'next_move_evaluation_opt'  # Unique identifier of the study.
    # sqliteの指定は絶対パスでもokのようだ
    if args.resume:
        study = optuna.Study(study_name=study_name,
                             storage='sqlite:///' +
                             os.path.join(base_config["out_dir"], "optuna.db"))
    else:
        study = optuna.create_study(
            study_name=study_name,
            storage='sqlite:///' +
            os.path.join(base_config["out_dir"], "optuna.db"))
    study.optimize(objective, n_trials=args.trials)
Ejemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("train_dir")
    parser.add_argument("--device", help="cpu / cuda:0 etc.")
    parser.add_argument("--resume",
                        help="checkpoint directory to resume training")
    parser.add_argument("--initmodel",
                        help="model.pt file instead of random initialization")
    args = parser.parse_args()
    train_dir = args.train_dir
    model_config = yaml_load(os.path.join(train_dir, "model.yaml"))
    train_config = yaml_load(os.path.join(train_dir, "train.yaml"))
    device = torch.device(args.device or
                          ("cuda:0" if torch.cuda.is_available() else "cpu"))

    model, criterion_policy, criterion_value, optimizer, lr_scheduler = setup_trainer(
        model_config, train_config, device)
    train_loader, val_loader = setup_data_loader(train_config)
    summary_writer = SummaryWriter(os.path.join(train_dir, "log"))
    if args.resume:
        resumed_status = resume_status(train_dir,
                                       args.resume,
                                       device=device,
                                       model=model,
                                       optimizer=optimizer,
                                       lr_scheduler=lr_scheduler)
        train_manager = resumed_status["train_manager"]
    else:
        if args.initmodel:
            model.load_state_dict(
                torch.load(args.initmodel,
                           map_location=str(device))["model_state_dict"])
        train_manager = TrainManager(**train_config["manager"])
    create_stop_file(train_dir)
    train_loop(train_manager=train_manager,
               train_config=train_config,
               device=device,
               model=model,
               criterion_policy=criterion_policy,
               criterion_value=criterion_value,
               optimizer=optimizer,
               lr_scheduler=lr_scheduler,
               train_loader=train_loader,
               val_loader=val_loader,
               summary_writer=summary_writer,
               train_dir=train_dir)
Ejemplo n.º 3
0
def get_win_rate(output_path):
    auto_match_result = util.yaml_load(output_path)
    n_win = 0
    n_game = 0
    for match_result in auto_match_result.match_results:
        if match_result.winner == 1:
            n_win += 1
        n_game += 1
    return n_win / n_game
Ejemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("data_dir")
    parser.add_argument("--n_configs", type=int, default=20)
    args = parser.parse_args()
    data_dir = args.data_dir
    assert os.path.exists(f"{data_dir}/config/rule.yaml")
    for i in range(args.n_configs):
        run_dir = generate_run_dir(args.data_dir)
        print(run_dir)
        rule_file_path = f"{run_dir}/rule.yaml"
        shutil.copy(f"{data_dir}/config/rule.yaml", rule_file_path)
        config_target_path = f"{run_dir}/engine_target.yaml"
        config_base = util.yaml_load(f"{data_dir}/config/engine_base.yaml")
        config_target = util.yaml_load(f"{data_dir}/config/engine_target.yaml")
        generate_random_options(config_target)
        print(config_target)
        util.yaml_dump(config_target, config_target_path)

        for strength in range(4):
            print("strength", strength)
            config_base_path = f"{run_dir}/engine_base_{strength}.yaml"
            util.yaml_dump(config_base, config_base_path)
            output_prefix = f"{run_dir}/result_{strength}"
            cmd = [
                "python", "-m", "neneshogi.auto_match", rule_file_path,
                config_base_path, config_target_path, "--log_prefix",
                output_prefix
            ]
            subprocess.check_call(cmd)

            win_rate = get_win_rate(output_prefix + ".yaml")
            print("win_rate", win_rate)
            if win_rate >= 0.5:
                increment_base_strength(config_base)
            else:
                break
Ejemplo n.º 5
0
def load_model(checkpoint_dir, device):
    # checkpoint_dir: path/to/training/checkpoints/train_012345
    model_config = yaml_load(
        os.path.join(os.path.dirname(os.path.dirname(checkpoint_dir)),
                     "model.yaml"))
    model_class = getattr(models, model_config["model"])
    model = model_class(board_shape=(119, 9, 9),
                        move_dim=27 * 9 * 9,
                        **model_config.get("kwargs", {}))
    model.eval()
    model.to(device)
    saved = torch.load(os.path.join(checkpoint_dir, "model.pt"),
                       map_location=str(device))
    model.load_state_dict(saved["model_state_dict"])
    return model
Ejemplo n.º 6
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("rule")
    parser.add_argument("base_info")
    parser.add_argument("target_engine")
    parser.add_argument("count", type=int)
    parser.add_argument("dst")
    args = parser.parse_args()
    rule = Rule.load(args.rule)
    base_info = yaml_load(args.base_info)
    base_engines_rates = []
    for er in base_info["engines"]:
        base_engines_rates.append({"config": EngineConfig.load_obj(er["config"]), "rate": er["rate"]})
    target_config = EngineConfig.load(args.target_engine)
    iter_match(rule, base_engines_rates, target_config, args.count, args.dst)
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("model", help="model cmf file")
    parser.add_argument("kifu_config",
                        help="yaml file of kwargs of PackedSfenDataSource")
    parser.add_argument("dst")
    parser.add_argument("--batchsize", type=int, default=256)
    args = parser.parse_args()
    # FULL_DATA_SWEEP=1周で終わり
    ds = PackedSfenDataSource(max_samples=C.io.FULL_DATA_SWEEP,
                              **util.yaml_load(args.kifu_config))
    model_f = C.load_model(args.model)
    feature_var = C.input_variable(ds.board_shape)
    model = model_f(feature_var)
    with open(args.dst, "wb") as f:
        loop(f, model, ds, args.batchsize)
Ejemplo n.º 8
0
 def load(cls, engine_file) -> "EngineConfig":
     return EngineConfig.load_obj(yaml_load(engine_file))
Ejemplo n.º 9
0
 def load(cls, rule_file) -> "Rule":
     item_dict = yaml_load(rule_file)
     inst = cls()
     inst.__dict__.update(item_dict)
     return inst