Пример #1
0
def model_load(fn):
    global my_model, criterion, optimizer
    with open(fn, 'rb') as f:
        my_model, criterion, optimizer = torch.load(f)


# Convert the data text files into something that is easy to work with
f_name = 'encoded_data'
if os.path.exists(f_name):
    print('Loading cached dataset...')
    allData = torch.load(f_name)
    print(allData.train)
else:
    print('Producing dataset...')
    allData = DataHandler.Corpus(env.data)
    torch.save(allData, f_name)

eval_batch_size = 10
test_batch_size = 1
train_data = batchify(allData.train, env.batch_size, env)
val_data = batchify(allData.valid, eval_batch_size, env)
test_data = batchify(allData.test, test_batch_size, env)

# ----------------------------------------------------------------------------------
# Building the model

criterion = nn.CrossEntropyLoss()

word_num = len(allData.dictionary)
my_model = model.RNNModel(env.model, word_num, env.input_size,