curve.append(l2) np.save('reg_curve', curve) elif args.mb: for mb in np.linspace(20, 50, 10): model = RNN(vocab=lexicon, reg=1) l1, l2 = model.train(X_trees_train, max_iter=1000, val_set=X_trees_dev, strat='AdaGrad', mini_batch_size=mb) curve.append(l2) np.save('mb_curve', curve) else: for lr in np.logspace(-2, 3, 10): model = RNN(vocab=lexicon, reg=1) l1, l2 = model.train(X_trees_train, max_iter=1000, val_set=X_trees_dev, strat='AdaGrad', mini_batch_size=30, learning_rate=lr) curve.append(l2) np.save('lr_curve', curve) sa_trn, sr_trn = model.score_fine(X_trees_train) sa_val, sr_val = model.score_fine(X_trees_dev) sa_tst, sr_tst = model.score_fine(X_trees_test) print 'Fine grain\tTrain\tTest\tValidation' print 'Overall\t\t{:.3}\t{:.3}\t{:.3}'.format(sa_trn, sa_tst, sa_val) print 'Root\t\t{:.3}\t{:.3}\t{:.3}'.format(sr_trn, sr_tst, sr_val)
from RNN import RNN curve = [] if args.reg: for reg in np.logspace(-2, 3, 10): model = RNN(vocab=lexicon, reg=reg) l1, l2 = model.train(X_trees_train, max_iter=1000, val_set=X_trees_dev, strat='AdaGrad', mini_batch_size=30) curve.append(l2) np.save('reg_curve', curve) elif args.mb: for mb in np.linspace(20, 50, 10): model = RNN(vocab=lexicon, reg=1) l1, l2 = model.train(X_trees_train, max_iter=1000, val_set=X_trees_dev, strat='AdaGrad', mini_batch_size=mb) curve.append(l2) np.save('mb_curve', curve) else: for lr in np.logspace(-2, 3, 10): model = RNN(vocab=lexicon, reg=1) l1, l2 = model.train(X_trees_train, max_iter=1000, val_set=X_trees_dev, strat='AdaGrad', mini_batch_size=30, learning_rate=lr) curve.append(l2) np.save('lr_curve', curve) sa_trn, sr_trn = model.score_fine(X_trees_train) sa_val, sr_val = model.score_fine(X_trees_dev) sa_tst, sr_tst = model.score_fine(X_trees_test) print 'Fine grain\tTrain\tTest\tValidation' print 'Overall\t\t{:.3}\t{:.3}\t{:.3}'.format(sa_trn, sa_tst, sa_val) print 'Root\t\t{:.3}\t{:.3}\t{:.3}'.format(sr_trn, sr_tst, sr_val)