default='rnn', choices=['rnn', 'birnn'], help='type of recurrent layer to use (rnn or birnn)') parser.add_argument('--use_lut', action='store_true', help='choose to use lut as first layer') parser.set_defaults() args = parser.parse_args() # these hyperparameters are from the paper args.batch_size = 50 time_steps = 150 hidden_size = 500 # download penn treebank tree_bank_data = PTB(path=args.data_dir) ptb_data = tree_bank_data.load_data() train_set = SequentialArrayIterator(ptb_data['train'], batch_size=args.batch_size, time_steps=time_steps, total_iterations=args.num_iterations) valid_set = SequentialArrayIterator(ptb_data['valid'], batch_size=args.batch_size, time_steps=time_steps) inputs = train_set.make_placeholders() ax.Y.length = len(tree_bank_data.vocab) def expand_onehot(x):
from ngraph.frontends.neon import PTB # parse the command line arguments parser = NgraphArgparser(__doc__) parser.set_defaults(batch_size=128, num_iterations=2000) args = parser.parse_args() # model parameters time_steps = 5 hidden_size = 256 gradient_clip_value = 5 # download penn treebank # set shift_target to be False, since it is going to predict the same sequence tree_bank_data = PTB(path=args.data_dir, shift_target=False) ptb_data = tree_bank_data.load_data() train_set = SequentialArrayIterator(ptb_data['train'], batch_size=args.batch_size, time_steps=time_steps, total_iterations=args.num_iterations, reverse_target=True, get_prev_target=True) valid_set = SequentialArrayIterator(ptb_data['valid'], batch_size=args.batch_size, time_steps=time_steps, total_iterations=10, reverse_target=True, get_prev_target=True) inputs = train_set.make_placeholders()