def main(cmd):
    cfg = yaml.safe_load(cmd.cfg.read())
    print(yaml.dump(cfg, default_flow_style=False))

    root_dir = os.path.join(cfg['training']['path'], cfg['name'])
    if not os.path.exists(root_dir):
        os.makedirs(root_dir)

    x = [
        tf.placeholder(tf.float32, [None, 112, 8 * 8]),
        tf.placeholder(tf.float32, [None, 1858]),
        tf.placeholder(tf.float32, [None, 3]),
        tf.placeholder(tf.float32, [None, 3]),
    ]

    tfprocess = TFProcess(cfg)
    tfprocess.init_net(x)

    if os.path.exists(os.path.join(root_dir, 'checkpoint')):
        cp = tf.train.latest_checkpoint(root_dir)
        tfprocess.restore(cp)

    START_FROM = cmd.start

    update_global_step = tfprocess.global_step.assign(START_FROM)
    tfprocess.session.run(update_global_step)
    path = os.path.join(root_dir, cfg['name'])
    save_path = tfprocess.saver.save(tfprocess.session,
                                     path,
                                     global_step=START_FROM)

    tfprocess.session.close()
def main(cmd):
    cfg = yaml.safe_load(cmd.cfg.read())
    print(yaml.dump(cfg, default_flow_style=False))

    root_dir = os.path.join(cfg['training']['path'], cfg['name'])
    if not os.path.exists(root_dir):
        os.makedirs(root_dir)

    x = [
        tf.placeholder(tf.float32, [None, 112, 8*8]),
        tf.placeholder(tf.float32, [None, 1858]),
        tf.placeholder(tf.float32, [None, 3]),
        tf.placeholder(tf.float32, [None, 3]),
    ]

    tfprocess = TFProcess(cfg)
    tfprocess.init_net(x)

    if os.path.exists(os.path.join(root_dir, 'checkpoint')):
        cp = tf.train.latest_checkpoint(root_dir)
        reader = tf.train.NewCheckpointReader(cp)
        saved_shapes = reader.get_variable_to_shape_map()
        new_names = sorted(
            [var.name.split(':')[0] for var in tf.global_variables()
             if var.name.split(':')[0] not in saved_shapes])
        for saved_var_name in new_names:
            print("New name {} will use default value".format(saved_var_name))
        var_names = sorted(
            [(var.name, var.name.split(':')[0]) for var in tf.global_variables()
             if var.name.split(':')[0] in saved_shapes])
        restore_vars = []
        restore_names = []
        for var_name, saved_var_name in var_names:
            curr_var = tf.get_default_graph().get_tensor_by_name(var_name)
            var_shape = curr_var.get_shape().as_list()
            if var_shape == saved_shapes[saved_var_name]:
                restore_vars.append(curr_var)
                restore_names.append(saved_var_name)
            else:
                print("Dropping {} due to shape change".format(saved_var_name))
        legacy_names = sorted(
            [name for name in saved_shapes.keys()
             if name not in restore_names])
        for saved_var_name in legacy_names:
            print("Dropping {} as no longer used".format(saved_var_name))
        opt_saver = tf.train.Saver(restore_vars)
        opt_saver.restore(tfprocess.session, cp)
    else:
        print("No checkpoint to upgrade!")
        exit(1)

    steps = tf.train.global_step(tfprocess.session, tfprocess.global_step)
    path = os.path.join(root_dir, cfg['name'])
    save_path = tfprocess.saver.save(tfprocess.session, path, global_step=steps)
    tfprocess.session.close()
Exemple #3
0
def main(cmd):
    cfg = yaml.safe_load(cmd.cfg.read())
    print(yaml.dump(cfg, default_flow_style=False))

    root_dir = os.path.join(cfg['training']['path'], cfg['name'])
    if not os.path.exists(root_dir):
        os.makedirs(root_dir)

    tfprocess = TFProcess(cfg)
    tfprocess.init_net()

    tfprocess.restore()

    START_FROM = cmd.start

    tfprocess.global_step.assign(START_FROM)
    tfprocess.manager.save(checkpoint_number=START_FROM)
                                name='x')
        probs = tf.placeholder(tf.float32, [None, BOARD_SIZE * BOARD_SIZE + 1])
        winner = tf.placeholder(tf.float32, [None, 1])
    else:
        planes = tf.placeholder(tf.float32,
                                [None, FEATURES, BOARD_SIZE, BOARD_SIZE],
                                name='x')
        probs = tf.placeholder(tf.float32, [None, BOARD_SIZE * BOARD_SIZE + 1])
        winner = tf.placeholder(tf.float32, [None, 1])

    tfprocess = TFProcess()
    tfprocess.TFCOREML = True
    tfprocess.DATA_FORMAT = data_format
    tfprocess.BOARD_SIZE = BOARD_SIZE
    tfprocess.INPUT_DIM = 2
    tfprocess.FEATURES = FEATURES
    tfprocess.RESIDUAL_FILTERS = filters
    tfprocess.RESIDUAL_BLOCKS = blocks
    if BOARD_SIZE == 9:
        tfprocess.VALUE_FULLY_CONNECTED = 64
    tfprocess.training = False  # batch normalizationをコンバートするため
    tfprocess.init_net(planes, probs, winner)
    tfprocess.replace_weights(weights)
    tf.train.write_graph(tf.get_default_graph(),
                         os.path.dirname(sys.argv[3]),
                         os.path.basename(sys.argv[3]),
                         as_text=True)
    with tf.get_default_graph().as_default():
        saver = tf.train.Saver()
        print(saver.save(tfprocess.session, "./tmp/model.ckpt"))
Exemple #5
0
            print("Version", line.strip())
            if line != '1\n':
                raise ValueError("Unknown version {}".format(line.strip()))
        else:
            weights.append(list(map(float, line.split(' '))))
        if e == 2:
            filters = len(line.split(' '))
            print("Channels", filters)
    blocks = e - (4 + 14)
    if blocks % 8 != 0:
        raise ValueError("Inconsistent number of weights in the file")
    blocks //= 8
    print("Blocks", blocks)

cfg['model']['filters'] = filters
cfg['model']['residual_blocks'] = blocks
print(yaml.dump(cfg, default_flow_style=False))

x = [
    tf.placeholder(tf.float32, [None, 120, 8 * 8]),
    tf.placeholder(tf.float32, [None, 1924]),
    tf.placeholder(tf.float32, [None, 1])
]

tfprocess = TFProcess(cfg)
tfprocess.init_net(x)
tfprocess.replace_weights(weights)
path = os.path.join(os.getcwd(), cfg['name'])
save_path = tfprocess.saver.save(tfprocess.session, path, global_step=0)
print("Writted model to {}".format(path))