from torchvision import transforms, utils
import torch.nn.functional as F
learning_rate = 0.0005

model = Autoencoder()
criterion = nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
my_dataset = CustomTrainDataset(path='../data')
dataloader = DataLoader(my_dataset, batch_size=4, shuffle=True)

for epoch in range(40):
    for batch_idx, batch in enumerate(dataloader):

        # don't need labels, only the images (features)
        image = batch[0]
        out = model(image)
        model.encoder(image)
        cost = criterion(out, image)
        optimizer.zero_grad()
        cost.backward()

        ### UPDATE MODEL PARAMETERS
        optimizer.step()

        ### LOGGING
        print('Epoch: %03d/%03d | Batch %03d/%03d | Loss: %.4f' %
              (epoch + 1, epoch, batch_idx, len(dataloader), cost))
#save model
state = {'model': model, 'optimizer': optimizer}
torch.save(state, 'saved.tar')
示例#2
0
    # ---- train using training data

    # The n==0 statement is done because we only want to initialize the network once and then keep training
    # as we move through time

    if n == 0:
        auto1 = Autoencoder(feats_norm_train.shape[1], num_hidden_1)
    auto1.fit(feats_norm_train, n_epoch=n_epoch)

    inputs = torch.autograd.Variable(
        torch.from_numpy(feats_norm_train.astype(np.float32)))

    if n == 0:
        auto2 = Autoencoder(num_hidden_1, num_hidden_2)
    auto1_out = auto1.encoder(inputs).data.numpy()
    auto2.fit(auto1_out, n_epoch=n_epoch)

    if n == 0:
        auto3 = Autoencoder(num_hidden_2, num_hidden_3)
    auto1_out = torch.autograd.Variable(
        torch.from_numpy(auto1_out.astype(np.float32)))
    auto2_out = auto2.encoder(auto1_out).data.numpy()
    auto3.fit(auto2_out, n_epoch=n_epoch)

    if n == 0:
        auto4 = Autoencoder(num_hidden_3, num_hidden_4)
    auto2_out = torch.autograd.Variable(
        torch.from_numpy(auto2_out.astype(np.float32)))
    auto3_out = auto3.encoder(auto2_out).data.numpy()
    auto4.fit(auto3_out, n_epoch=n_epoch)