Exemplo n.º 1
0
 def __init__(self, ngram, architecture=None, output_size=10):
     super(Model, self).__init__()
     self.ngram = ngram
     self.n = ngram.n
     self.cnt = 1
     self.output_size = output_size
     for idx in ngram:
         ngram[idx]
     if architecture is None:
         self.primal = nn.Sequential(nn.Linear(28 * 28, 300), nn.ReLU(),
                                     nn.Linear(300, 100), nn.ReLU(),
                                     nn.Linear(100, output_size)).to(DEVICE)
     else:
         self.primal = architecture.to(DEVICE)
     self.dual = Ngram(self.ngram.n)
     for idx in self.ngram:
         self.dual[idx] = torch.tensor(
             -1. / self.ngram[idx]).to(DEVICE).requires_grad_()
         # self.dual[idx] = torch.tensor(0.).uniform_(-1, 0).to(DEVICE).requires_grad_()
     self.to(DEVICE)
     self.init_weights()
Exemplo n.º 2
0
from config import DEVICE
from src.data_processing import (Ngram, randomized_ngram,
                                 sequence_loader_MNIST, test_loader_MNIST,
                                 train_loader_MNIST)
from src.model import Model
from src.remote import mpl
from src.training import SGD, SPDG
from src.statistics import get_statistics
import matplotlib.pyplot as plt

torch.manual_seed(671470554)
np.random.seed(70914308)

# %% GENERATING DATASET
ngram = Ngram(3)
ngram[(0, 1, 2)] = 6.
ngram[(1, 2, 3)] = 4.
ngram.norm()

data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=40000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()

# optimizer = torch.optim.Adam(model.primal.parameters())
# history = SGD(model, optimizer, data_loader, test_loader, num_epochs=1, log_every=50, test_every=1)
Exemplo n.º 3
0
#     test_every = args.test_every
#     primal_lr = args.primal_lr
#     dual_lr = args.dual_lr
# else:

continuation = False
num_epochs = 1000
save_every = 200
log_every = 100
test_every = 5
primal_lr = 1e-6
dual_lr = 1e-4

# %% GENERATING DATASET
# ngram = randomized_ngram(3, 2, out_dim=4, min_var=5e-2)
ngram = Ngram(3)
ngram[(0, 1, 2)] = 9.
ngram[(1, 2, 3)] = 1.
ngram.norm()
ngram.show()

# %% CREATING MODEL
data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=50000)
sequence_test_loader = sequence_loader_MNIST(ngram, num_samples=10000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()
Exemplo n.º 4
0
from config import DEVICE
from src.data_processing import (Ngram, randomized_ngram,
                                 sequence_loader_MNIST, test_loader_MNIST,
                                 train_loader_MNIST)
from src.model import Model
from src.remote import mpl
from src.training import SGD, SPDG
from src.statistics import get_statistics
import matplotlib.pyplot as plt

torch.manual_seed(377807910)
np.random.seed(713693356)

# %% GENERATING DATASET
ngram = Ngram(1)
ngram[(0)] = 5.
ngram[(1)] = 95.
ngram.norm()

data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=40000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()

# optimizer = torch.optim.Adam(model.primal.parameters())
# history = SGD(model, optimizer, data_loader, test_loader, num_epochs=1, log_every=50, test_every=1)
Exemplo n.º 5
0
import torch
import numpy as np
import matplotlib as mpl
mpl.use('agg')
import matplotlib.pyplot as plt
from src.data_processing import Ngram, train_loader_MNIST, test_loader_MNIST, sequence_loader_MNIST, randomized_ngram
from src.model import Model
from config import DEVICE
from src.training import SGD, SPDG

torch.manual_seed(403249064)
np.random.seed(235084161)

# %% GENERATING DATASET
n = 1
ngram = Ngram(n)
ngram[(0)] = 1.
ngram[(1)] = 9.
ngram.norm()

data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=20000)

# %% DUAL TRAINING
model = Model(ngram, output_size=2)
model.to(DEVICE)
model.init_weights()

primal_lr = 1e-6
dual_lr = 1e-4