Esempio n. 1
0
train_data = data[:train_len]
train_tar = target[:train_len]
valid_data = data[train_len:train_len + valid_len]
valid_tar = target[train_len:train_len + valid_len]
test_data = data[train_len + valid_len:]
test_tar = target[train_len + valid_len:]

data = {
    'X_train': train_data,  # training data
    'y_train': train_tar,  # training labels
    'X_val': valid_data,  # validation data
    'y_val': valid_tar  # validation labels
}

model = log.LogisticClassifier(input_dim=D, hidden_dim=200, reg=0.001)

solver = solver.Solver(model, data,
                       update_rule='adam',
                       optim_config={
                           'learning_rate': 1e-3,
                       },
                       lr_decay=1,
                       num_epochs=3000, batch_size=500,
                       print_every=1)
solver.train()

acc = solver.check_accuracy(test_data, test_tar)

print('Test Accuracy: %f' % acc)
with open('data.pkl', 'rb') as f:
    data = pickle.load(f, encoding='latin1')
print(np.shape(data[0][0:500, :]))


x_test = data[0][750:, :]
y_test = data[1][750:]

data_input = {
    'X_train': data[0][0:500, :],
    'y_train': data[1][0:500],
    'X_val':  data[0][500:750, :],
    'y_val': data[1][500:750]# validation labels
}

model = logistic.LogisticClassifier(input_dim=20, reg=0.12)   #lr=2
#model = logistic.LogisticClassifier(input_dim=20, hidden_dim=16, reg=0.08) # lr=0.8
solver = solver.Solver(model, data_input,
                update_rule='sgd',
                optim_config={
                  'learning_rate': 0.8,
                },
                lr_decay=0.98,
                num_epochs=800, batch_size=40,
                print_every=2000)
solver.train()
print(solver.check_accuracy(x_test, y_test, num_samples=None, batch_size=40))
#print(solver.best_params)