示例#1
0
from src.training import SGD, SPDG
from src.statistics import get_statistics
import matplotlib.pyplot as plt

torch.manual_seed(671470554)
np.random.seed(70914308)

# %% GENERATING DATASET
ngram = Ngram(3)
ngram[(0, 1, 2)] = 6.
ngram[(1, 2, 3)] = 4.
ngram.norm()

data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=40000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()

# optimizer = torch.optim.Adam(model.primal.parameters())
# history = SGD(model, optimizer, data_loader, test_loader, num_epochs=1, log_every=50, test_every=1)

# %% DUAL TRAINING
model = Model(ngram, output_size=4)
model.to(DEVICE)
model.init_weights()

primal_lr = 1e-6
示例#2
0
test_every = 5
primal_lr = 1e-6
dual_lr = 1e-4

# %% GENERATING DATASET
# ngram = randomized_ngram(3, 2, out_dim=4, min_var=5e-2)
ngram = Ngram(3)
ngram[(0, 1, 2)] = 9.
ngram[(1, 2, 3)] = 1.
ngram.norm()
ngram.show()

# %% CREATING MODEL
data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=50000)
sequence_test_loader = sequence_loader_MNIST(ngram, num_samples=10000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()

# optimizer = torch.optim.Adam(model.primal.parameters())
# history = SGD(model, optimizer, data_loader, test_loader, num_epochs=1, log_every=50, test_every=1)

# %% DUAL TRAINING
if continuation:
    history, model, ngram, optimizer_primal, optimizer_dual = load()
    print(model.ngram.n)
else:
示例#3
0
文件: main.py 项目: gcie/licencjat
# %% CREATING NGRAM
# ngram = randomized_ngram(3, 20, out_dim=4, min_var=-1e-2)
# ngram = Ngram(3)
# ngram[(0, 1, 2)] = 9.
# ngram[(1, 2, 3)] = 1.
# ngram.norm()
ngram_gen = get_brown_ngram(dim=3)
# ngram_gen = ngram
# ngram_gen.show()

# %% GENERATING DATASET
data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram_gen,
                                        num_samples=2000,
                                        sequence_length=30)
sequence_test_loader = sequence_loader_MNIST(ngram_gen,
                                             num_samples=10000,
                                             train=False)

t = time.time()
stats = get_sequences_distribution(sequence_loader)
print(time.time() - t)
print(stats)

# %%
ngram = retrieve_ngram(sequence_loader, 3)
ngram.show()

# %% REGULAR TRAINING (SGD)
示例#4
0
文件: main.py 项目: gcie/licencjat
# %% CREATING NGRAM
# ngram = randomized_ngram(3, 20, out_dim=4, min_var=-1e-2)
ngram = Ngram(3)
ngram[(0, 1, 2)] = 6.
ngram[(1, 2, 3)] = 4.
# ngram.norm()
# ngram_gen = get_brown_ngram(dim=3)
ngram_gen = ngram
# ngram_gen.show()


# %% GENERATING DATASET
data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram_gen, num_samples=40000)
sequence_test_loader = sequence_loader_MNIST(ngram_gen, num_samples=10000, train=False)

t = time.time()
stats = get_sequences_distribution(sequence_loader)
print(time.time() - t)
print(stats)


# %%
# ngram = retrieve_ngram(sequence_loader, 3)
# ngram.show()

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
示例#5
0
loss_on_test = False

# %% CREATING NGRAM
# ngram = randomized_ngram(3, 27, out_dim=3, min_var=-1e-2)
# ngram = Ngram(3)
# ngram[(0, 1, 2)] = 9.
# ngram[(1, 2, 3)] = 1.
# ngram.norm()
ngram = get_brown_ngram()
ngram.show()

# %% GENERATING DATASET
data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram,
                                        num_samples=2000,
                                        sequence_length=30)
sequence_test_loader = sequence_loader_MNIST(ngram, num_samples=10000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()

# optimizer = torch.optim.Adam(model.primal.parameters())
# history = SGD(model, optimizer, data_loader, test_loader, num_epochs=1, log_every=50, test_every=1)

# %% DUAL TRAINING
if continuation:
    history, model, ngram, optimizer_primal, optimizer_dual = load()
    print(model.ngram.n)