Exemple #1
0
    save_every = args.save_every
    log_every = args.log_every
    test_every = args.test_every
    primal_lr = args.primal_lr
    dual_lr = args.dual_lr
else:
    continuation = False
    num_epochs = 1000
    save_every = 100
    log_every = 100
    test_every = 5
    primal_lr = 1e-6
    dual_lr = 1e-4

# %% GENERATING DATASET
ngram = randomized_ngram(7, 2, out_dim=10)
ngram.show()

data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=20000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()

# optimizer = torch.optim.Adam(model.primal.parameters())
# history = SGD(model, optimizer, data_loader, test_loader, num_epochs=1, log_every=50, test_every=1)

# %% DUAL TRAINING
Exemple #2
0
    save_every = args.save_every
    log_every = args.log_every
    test_every = args.test_every
    primal_lr = args.primal_lr
    dual_lr = args.dual_lr
else:
    continuation = False
    num_epochs = 1000
    save_every = 100
    log_every = 100
    test_every = 5
    primal_lr = 1e-6
    dual_lr = 1e-4

# %% GENERATING DATASET
ngram = randomized_ngram(10, 2, out_dim=10, min_var=2e-2)
ngram.show()

# %% CREATING MODEL
data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=20000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()

# optimizer = torch.optim.Adam(model.primal.parameters())
# history = SGD(model, optimizer, data_loader, test_loader, num_epochs=1, log_every=50, test_every=1)
Exemple #3
0
    save_every = args.save_every
    log_every = args.log_every
    test_every = args.test_every
    primal_lr = args.primal_lr
    dual_lr = args.dual_lr
else:
    continuation = False
    num_epochs = 1000
    save_every = 100
    log_every = 100
    test_every = 5
    primal_lr = 1e-6
    dual_lr = 1e-4

# %% GENERATING DATASET
ngram = randomized_ngram(3, 20, out_dim=5)

data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=40000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)
# model.init_weights()

# optimizer = torch.optim.Adam(model.primal.parameters())
# history = SGD(model, optimizer, data_loader, test_loader, num_epochs=1, log_every=50, test_every=1)

# %% DUAL TRAINING
if continuation:
Exemple #4
0
num_epochs = 500
save_every = 100
log_every = 100
test_every = 2
primal_lr = 1e-6
dual_lr = 1e-4

show_dual = False
predictions_on_sequences = True
predictions_on_data = False
ngram_data_stats = False
ngram_test_stats = True
loss_on_test = False

# %% CREATING NGRAM
ngram = randomized_ngram(3, 10, out_dim=5, min_var=1e-2)
# ngram = Ngram(3)
# ngram[(0, 1, 2)] = 9.
# ngram[(1, 2, 3)] = 1.
# ngram.norm()
ngram.show()

# %% GENERATING DATASET
data_loader = train_loader_MNIST()
test_loader = test_loader_MNIST()
sequence_loader = sequence_loader_MNIST(ngram, num_samples=50000)
sequence_test_loader = sequence_loader_MNIST(ngram, num_samples=10000)

# %% REGULAR TRAINING (SGD)
# model = Model(ngram)
# model.to(DEVICE)