Ejemplo n.º 1
0
                        collate_fn=dataset.collate_fn)

# iterations per epoch
setattr(args, 'iter_per_epoch',
        math.ceil(dataset.num_data_points[args.split] / args.batch_size))
print("{} iter per epoch.".format(args.iter_per_epoch))

# ----------------------------------------------------------------------------
# setup the model
# ----------------------------------------------------------------------------

encoder = Encoder(model_args)
encoder.load_state_dict(components['encoder'])

decoder = Decoder(model_args, encoder)
decoder.load_state_dict(components['decoder'])
print("Loaded model from {}".format(args.load_path))

if args.gpuid >= 0:
    encoder = encoder.cuda()
    decoder = decoder.cuda()

# ----------------------------------------------------------------------------
# evaluation
# ----------------------------------------------------------------------------

print("Evaluation start time: {}".format(
    datetime.datetime.strftime(datetime.datetime.utcnow(),
                               '%d-%b-%Y-%H:%M:%S')))
encoder.eval()
decoder.eval()
Ejemplo n.º 2
0
# ----------------------------------------------------------------------------
# setup the model
# ----------------------------------------------------------------------------
encoder = Encoder(model_args)
decoder = Decoder(model_args, encoder)
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(list(encoder.parameters()) + list(decoder.parameters()),
                       lr=args.lr,
                       weight_decay=args.weight_decay)
encoder.word_embed.init_embedding('data/glove/glove6b_init_300d_1.0.npy')

start_epoch = 0
if args.load_path != '':
    components = torch.load(args.load_path)
    encoder.load_state_dict(components.get('encoder', components))
    decoder.load_state_dict(components.get('decoder', components))
    optimizer.load_state_dict(components.get('optimizer', components))
    start_epoch = components['epoch']
    print("Loaded model from {}".format(args.load_path))
print("Decoder: {}".format(args.decoder))

args_for_save = encoder.args
encoder = nn.DataParallel(encoder).cuda()
decoder = nn.DataParallel(decoder).cuda()
criterion = criterion.cuda()

# ----------------------------------------------------------------------------
# training
# ----------------------------------------------------------------------------
encoder.train()
decoder.train()