train_fn = mlp.train_mlp_probe
test_fn = mlp.test_mlp_class

hyperparams = {
   'batch_size': [100],
   'learning_rate': [0.2],
   'lambda_reg': [0.0001],
   'num_hidden': [100, 250, 400],
   'n_epochs': 1000,
   'error_func': 'cross_ent',
   'final_layer': ['tanh', 'sigmoid'],
   'validate_params': ["batch_size", "learning_rate", "lambda_reg", 'num_hidden']}

# Cross-validate here
best_params, all_params = validation_helpers.validate_grid_search_cheat(train_fn, test_fn,
                                                                  False, train_samples, train_labels, valid_samples,
                                                                  valid_labels, hyperparams, num_repeat=2)

# Average results due to non-deterministic nature of the model
f1s = numpy.zeros((1, train_labels.shape[1]))
precisions = numpy.zeros((1, train_labels.shape[1]))
recalls = numpy.zeros((1, train_labels.shape[1]))

num_repeat = 3

print 'All params', all_params
print 'Best params', best_params

for i in range(num_repeat):
    model = train_fn(train_labels, train_samples, valid_labels, valid_samples, best_params)
    _, _, _, _, f1_c, precision_c, recall_c = test_fn(valid_labels, valid_samples, model)
hyperparams = {
    'batch_size': [100],
    'learning_rate': [0.2, 0.4],
    'lambda_reg': [0.000001, 0.00001],
    'num_hidden': [300, 400],
    'n_epochs': 50,
    'validate_params':
    ["batch_size", "learning_rate", "lambda_reg", 'num_hidden']
}

# Cross-validate here
best_params, all_params = validation_helpers.validate_grid_search_cheat(
    train_fn,
    test_fn,
    False,
    train_samples,
    train_labels,
    valid_samples,
    valid_labels,
    hyperparams,
    num_repeat=3)

# Average results due to non-deterministic nature of the model
f1s = numpy.zeros((1, train_labels.shape[1]))
precisions = numpy.zeros((1, train_labels.shape[1]))
recalls = numpy.zeros((1, train_labels.shape[1]))

num_repeat = 3

print 'All params', all_params
print 'Best params', best_params