Ejemplo n.º 1
0
def main(_):
    data = reader.Data().load_raw_data([task.train, task.valid, task.test],
                                       add_beg_token=None,
                                       add_end_token='</s>',
                                       add_unknwon_token='<unk>')
    nbest_cmp = task.NBestComputer()
    res_file = 'results.txt'

    # config = small_config(data)
    config = medium_config(data)
    # config = large_config(data)
    config.softmax_type = 'BNCE'
    config.fixed_logz_for_nce = 9

    work_dir = './lstm/' + create_name(config)
    wb.mkdir(work_dir, is_recreate=True)
    sys.stdout = wb.std_log(os.path.join(work_dir, 'lstm.log'))
    print(work_dir)
    config.print()

    data.write_vocab(work_dir + '/vocab.txt')
    data.write_data(data.datas[1], work_dir + '/valid.id')
    data.write_data(data.datas[2], work_dir + '/test.id')

    write_model = os.path.join(work_dir, 'model.ckpt')

    with tf.Graph().as_default():
        # lm = lstmlm.FastLM(config, device_list=['/gpu:0', '/gpu:0'])
        lm = lstmlm.LM(config, data, device='/gpu:0')

        sv = tf.train.Supervisor(logdir=os.path.join(work_dir, 'logs'),
                                 summary_op=None,
                                 global_step=lm.global_step())
        sv.summary_writer.add_graph(
            tf.get_default_graph())  # write the graph to logs
        session_config = tf.ConfigProto(allow_soft_placement=True,
                                        log_device_placement=False)
        session_config.gpu_options.allow_growth = True
        with sv.managed_session(config=session_config) as session:

            lm.train(session,
                     data,
                     write_model,
                     write_to_res=(res_file, create_name(config)))

            # rescore
            print('rescoring...')
            time_beg = time.time()
            for i, nbest in enumerate(nbest_cmp.nbests):
                nbest.lmscore = lm.rescore(session, nbest.get_nbest_list(data))
                wb.WriteScore(work_dir + 'lstm.lmscore.%d' % i, nbest.lmscore)
            print('rescore time={:.2f}m'.format((time.time() - time_beg) / 60))
            nbest_cmp.write_lmscore(work_dir + '/model')

            # tune lm-scale
            print('computing wer...')
            nbest_cmp.cmp_wer()
            nbest_cmp.write_to_res(res_file, create_name(config))
            print('wer_dev={}, wer_test={}'.format(nbest_cmp.get_valid_wer(),
                                                   nbest_cmp.get_test_wer()))
def main():
    nbest_cmp = task.NBestComputer()
    data = reader.Data().load_raw_data([task.train, task.valid, task.test],
                                       add_beg_token='<s>',
                                       add_end_token='</s>')

    config = ngramlm.Config(data)
    config.res_file = 'results.txt'

    order_reg = [2, 3]
    for order in order_reg:
        config.order = order
        config.cutoff = [0] * order

        workdir = wb.mkdir('ngramlm/' + str(config), is_recreate=False)
        sys.stdout = wb.std_log(workdir + '/ngram.log')
        print(workdir)

        m = ngramlm.Model(config, data, bindir, workdir)

        # train
        print('training...')
        m.train()

        # rescore
        print('rescoring...')
        time_beg = time.time()
        for nbest in nbest_cmp.nbests:
            nbest.lmscore = m.rescore(nbest.get_nbest_list(data))
            # print(len(nbest.lmscore))
        nbest_cmp.write_lmscore(workdir + '/model')
        print('rescore time={:.2f}m'.format((time.time() - time_beg) / 60))

        # tune lm-scale
        print('computing wer...')
        nbest_cmp.cmp_wer()
        nbest_cmp.write_to_res(config.res_file, str(config))
Ejemplo n.º 3
0
import tensorflow as tf
import os
import sys
import numpy as np
import time

from base import *
from trf.common import net
from trf.isample import trf_nce as trf
# from trf.nce import trf

import task

nbest_cmp = task.NBestComputer()


def get_config_cnn(vocab_size):
    config = net.Config(vocab_size)
    config.embedding_dim = 256
    config.structure_type = 'cnn'
    config.cnn_filters = [(i, 128) for i in range(1, 11)]
    config.cnn_hidden = 128
    config.cnn_width = 3
    config.cnn_layers = 3
    config.cnn_activation = 'relu'
    config.cnn_skip_connection = True
    config.cnn_residual = False
    config.cnn_batch_normalize = False
    config.cnn_final_activation = None
    return config