parser.add_argument('--use-cuda', type=bool, default=True, metavar='CUDA', help='use cuda (default: True)') args = parser.parse_args() prefix = 'poem' word_is_char = True batch_loader = BatchLoader('', prefix, word_is_char) params = Parameters(batch_loader.max_word_len, batch_loader.max_seq_len, batch_loader.words_vocab_size, batch_loader.chars_vocab_size, word_is_char) neg_loss = NEG_loss(params.word_vocab_size, params.word_embed_size) if args.use_cuda and t.cuda.is_available(): neg_loss = neg_loss.cuda() # NEG_loss is defined over two embedding matrixes with shape of [params.word_vocab_size, params.word_embed_size] optimizer = SGD(neg_loss.parameters(), 0.1) for iteration in range(args.num_iterations): input_idx, target_idx = batch_loader.next_embedding_seq( args.batch_size) input = Variable(t.from_numpy(input_idx).long()) target = Variable(t.from_numpy(target_idx).long()) if args.use_cuda and t.cuda.is_available(): input, target = input.cuda(), target.cuda()
help='num iterations (default: 1000000)') parser.add_argument('--batch-size', type=int, default=10, metavar='BS', help='batch size (default: 10)') parser.add_argument('--num-sample', type=int, default=5, metavar='NS', help='num sample (default: 5)') parser.add_argument('--use-cuda', type=bool, default=True, metavar='CUDA', help='use cuda (default: True)') args = parser.parse_args() batch_loader = BatchLoader('') params = Parameters(batch_loader.max_word_len, batch_loader.max_seq_len, batch_loader.words_vocab_size, batch_loader.chars_vocab_size) neg_loss = NEG_loss(params.word_vocab_size, params.word_embed_size) if args.use_cuda: neg_loss = neg_loss.cuda() # NEG_loss is defined over two embedding matrixes with shape of [params.word_vocab_size, params.word_embed_size] optimizer = SGD(neg_loss.parameters(), 0.1) for iteration in range(args.num_iterations): input_idx, target_idx = batch_loader.next_embedding_seq(args.batch_size) input = Variable(t.from_numpy(input_idx).long()) target = Variable(t.from_numpy(target_idx).long()) if args.use_cuda: input, target = input.cuda(), target.cuda()
type=int, default=14, metavar='NS', help='num sample (default: 14)') parser.add_argument('--use-cuda', type=bool, default=False, metavar='CUDA', help='use cuda (default: True)') args = parser.parse_args() batch_loader = BatchLoader('') params = Parameters(batch_loader.max_seq_len, batch_loader.vocab_size) neg_loss = NEG_loss( params.vocab_size, params.word_embed_size, weights=[1 - sqrt(5e-5 / i) for i in batch_loader.words_freq]) if args.use_cuda: neg_loss = neg_loss.cuda() """NEG_loss is defined over two embedding matrixes with shape of [params.vocab_size, params.word_embed_size]""" optimizer = SGD(neg_loss.parameters(), 0.1) for iteration in range(args.num_iterations): input_idx, target_idx = batch_loader.next_embedding_seq( args.batch_size) input = Variable(t.from_numpy(input_idx).long()) target = Variable(t.from_numpy(target_idx).long()) if args.use_cuda: input, target = input.cuda(), target.cuda()
default=5, metavar='NS', help='num sample (default: 5)') parser.add_argument('--use-cuda', type=bool, default=True, metavar='CUDA', help='use cuda (default: True)') args = parser.parse_args() batch_loader = BatchLoader('') params = Parameters(batch_loader.max_word_len, batch_loader.max_seq_len, batch_loader.words_vocab_size, batch_loader.chars_vocab_size) neg_loss = NEG_loss(params.word_vocab_size, params.word_embed_size) if args.use_cuda: neg_loss = neg_loss.cuda() # NEG_loss is defined over two embedding matrixes with shape of [params.word_vocab_size, params.word_embed_size] optimizer = SGD(neg_loss.parameters(), 0.1) for iteration in range(args.num_iterations): input_idx, target_idx = batch_loader.next_embedding_seq( args.batch_size) input = Variable(t.from_numpy(input_idx).long()) target = Variable(t.from_numpy(target_idx).long()) if args.use_cuda: