示例#1
0
def main(_):

    if not FLAGS.config_file:
        raise ValueError("Must set --config_file to set model's hyperparams")

    config = Config(FLAGS.config_file)

    data = dataloader.Dataloader(config.data_file)
    dataset = data.load_data()

    config.num_steps = data.sentence_max_len
    config.vocab_size = data.vocab_size
    config.label_size = data.label_size
    initializer = tf.random_uniform_initializer(-config.init_scale,
                                                config.init_scale)

    kfold = config.kfold
    total_data = len(dataset)
    for i in range(kfold):
        test_data = dataset[int(total_data / kfold * i):int(total_data /
                                                            kfold * (i + 1))]
        train_data = dataset[0:int(total_data / kfold * i)]
        train_data.extend(dataset[int(total_data / kfold *
                                      (i + 1)):total_data])

        tf.reset_default_graph()
        with tf.name_scope("Train"):
            train_input = ModelInput(raw_data=train_data,
                                     batch_size=config.batch_size)
            with tf.variable_scope("Model",
                                   reuse=None,
                                   initializer=initializer):
                train_model = Model(is_training=True,
                                    config=config,
                                    input_=train_input)

        with tf.name_scope("Test"):
            test_input = ModelInput(raw_data=test_data,
                                    batch_size=config.batch_size)
            with tf.variable_scope("Model",
                                   reuse=True,
                                   initializer=initializer):
                test_model = Model(is_training=False,
                                   config=config,
                                   input_=test_input)

        with tf.name_scope("Test_train"):
            test_train_input = ModelInput(raw_data=train_data,
                                          batch_size=config.batch_size)
            with tf.variable_scope("Model",
                                   reuse=True,
                                   initializer=initializer):
                test_train_model = Model(is_training=False,
                                         config=config,
                                         input_=test_train_input)

        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())

            train(sess, train_model, test_model, config)
            test(sess, test_model, config)
            test(sess, test_train_model, config)
            print("""]}""")