def setUp(self): fname = 'friends.txt' f = open(fname, 'r') self.txt = f.read() f.close() c = Corpus(self.txt) c.brownInit(500) rnn = RNN(100, c.V, 50) rnn.load('rnn.save') self.trainer = Trainer(c,rnn, nepochs=50, alpha = 1.8)
def tesT_TrainingOnSentances(self): c = Corpus(self.txt) rnn = RNN(100, c.V, 50) trainer = Trainer(c,rnn, nepochs=50, alpha = 1.8) trainer.train()
def search_for_parameter(): n_bis = [100000, 200000, 500000] steps = [5e-2, 1e-1, 5e-1, 1] n_epoch = 5 for n_bi in n_bis: for step in steps: print('-' * 20) print(n_bi, ' ', step) config['n_bigram'] = n_bi config['smooth'] = step corpus = Corpus(config['data_dir']) pct = perceptron() for cnt in range(n_epoch): train(corpus.trainSet, pct) test(corpus.testSet, pct)
from rnn import * from dataLoader import Corpus from trainer import Trainer fname = 'shakespear.txt' f = open(fname, 'r') txt = f.read() f.close() c = Corpus(txt) c.brownInit(10000) rnn = RNN(100, c.V, 100) # rnn = RNN.load('rnn.save') rnn.load('rnn.save') # rnn = RNN(100, c.V, 50) trainer = Trainer(c, rnn, nepochs=50, alpha=0.9) trainer.generate_sequence() # trainer.train() trainer.mainEventLoop()
steps = [5e-2, 1e-1, 5e-1, 1] n_epoch = 5 for n_bi in n_bis: for step in steps: print('-' * 20) print(n_bi, ' ', step) config['n_bigram'] = n_bi config['smooth'] = step corpus = Corpus(config['data_dir']) pct = perceptron() for cnt in range(n_epoch): train(corpus.trainSet, pct) test(corpus.testSet, pct) if __name__ == '__main__': #search_for_parameter() pct = perceptron() corpus = Corpus(config['data_dir']) n_epoch = 5 while True: for cnt in range(n_epoch): print('Happy training!') train(corpus.trainSet, pct) print('\nHappy testing!') test(corpus.testSet, pct) config['smooth'] /= 10 if config['smooth'] < 1.01e-10: break
def tesT_saving_model(self): c = Corpus(self.txt) rnn = RNN(100, c.V, 50) rnn.save()
def test_perplexicity(self): c = Corpus("asdada asdaa asd adada dadada. asdas dasd.da ad.a d.sa da asd") c.brownInit(500,0) p = self.trainer.calcPerplexicity(c) print Fore.CYAN, p, "\n"
from rnn import * from dataLoader import Corpus from trainer import Trainer fname = 'shakespear.txt' f = open(fname, 'r') txt = f.read() f.close() c = Corpus(txt) c.brownInit(10000) rnn = RNN(100, c.V, 100) # rnn = RNN.load('rnn.save') rnn.load('rnn.save') # rnn = RNN(100, c.V, 50) trainer = Trainer(c,rnn, nepochs=50, alpha = 0.9) trainer.generate_sequence() # trainer.train() trainer.mainEventLoop()