Example #1
0
def wt2_opt():
    reader_opt = ChainMap(
        {
            'sentences': False,
            # if sentences is False
            'min_seq_len': 35,
            'max_seq_len': 35,
            # fi
            'shuffle': False,
            'batch_size': 64,
            'vocab_path': '',  # auto to data
            'text_path': ''  # auto to data
        },
        reader.default_reader_opt())
    model_opt = ChainMap(
        {
            'emb_dim': 650,
            'rnn_dim': 650,
            'rnn_layers': 2,
            'rnn_variational': True,
            'rnn_input_keep_prob': 0.5,
            'rnn_layer_keep_prob': 0.7,
            'rnn_output_keep_prob': 0.5,
            'rnn_state_keep_prob': 0.7,
            'logit_weight_tying': True,
            'vocab_size': -1  # auto to data
        },
        lm.default_rnnlm_opt())
    train_opt = ChainMap(
        {
            'loss_key': 'mean_token_nll',  # or sum_token_nll
            'ngram_loss_coeff': 0.1,
            'init_learning_rate': 0.003,
            'decay_rate': 0.85,
            'staircase': True,
            'optim': 'tensorflow.train.AdamOptimizer',
            # if adam
            'optim_beta1': 0.0,
            'optim_beta2': 0.999,
            'optim_epsilon': 1e-8,
            # fi
            'clip_gradients': 5.0,
            'max_epochs': 40,
            'checkpoint_path': 'tmp',  # auto to exp_dir
            'decay_steps': -1  # if -1 auto to an epoch
        },
        lm.default_train_opt())
    return reader_opt, model_opt, train_opt
Example #2
0
    'vocab_path': data_path('vocab.txt'),
    'text_path': data_path('train.txt'),
    'min_seq_len': 2,
    'max_seq_len': 7,
})

# loading training data and validating data
train_iter_wrapper = reader.get_batch_iter_from_file(reader_opt)
reader_opt['text_path'] = data_path('valid.txt')
valid_iter_wrapper = reader.get_batch_iter_from_file(reader_opt,
                                                     train_iter_wrapper.vocab)

# creating models
with open(exp_path('model_opt.json')) as fp:
    model_opt = json.load(fp)
    model_opt = ChainMap(model_opt, lm.default_rnnlm_opt())
model_opt['vocab_size'] = train_iter_wrapper.vocab.vocab_size
# model_opt['rnn_get_all_states'] = True
with tf.variable_scope('model'):
    eval_lm = lm.create_rnnlm(model_opt, with_dropout=False)
with tf.variable_scope('encoder'):
    train_enc = lm.create_encoder(model_opt, eval_lm, with_dropout=True)
with tf.variable_scope('encoder', reuse=True):
    eval_enc = lm.create_encoder(model_opt, eval_lm, with_dropout=False)

train_model = ChainMap(train_enc, eval_lm)
eval_model = ChainMap(eval_enc, eval_lm)

# creating optim
if train_opt['decay_steps'] == -1:
    train_opt['decay_steps'] = train_iter_wrapper.num_batches