[test_loss, test_acc, test_prediction])
gradient_fn = theano.function([input_var, target_var], gradient)

# The training loop
print("Starting training...")
num_epochs = 20
for epoch in range(num_epochs):

    # In each epoch, we do a full pass over the training data:

    train_err = 0
    train_batches = 0
    start_time = time.time()

    for batch in mini_batch.iterate_minibatches(X_train,
                                                y_train,
                                                500,
                                                shuffle=True):

        inputs, targets = batch
        train_err += train_fn(inputs, targets)
        train_batches += 1

    # And a full pass over the validation data:
    val_err = 0
    val_acc = 0
    val_batches = 0

    for batch in mini_batch.iterate_minibatches(X_val,
                                                y_val,
                                                500,
                                                shuffle=False):
view_fn = theano.function([input_var], test_reconstruct)

# The training loop
print("Starting training...")
num_epochs = 20
for epoch in range(num_epochs):

    # In each epoch, we do a full pass over the training data:

    train_err = 0
    train_batches = 0
    start_time = time.time()

    for batch in mini_batch.iterate_minibatches(flower_train,
                                                100,
                                                shuffle=True):

        inputs = batch
        train_err += train_fn(inputs)
        train_batches += 1

    # Then we print the results for this epoch:

    print("Epoch {} of {} took {:.3f}s".format(epoch + 1, num_epochs,
                                               time.time() - start_time))

    print("  training loss:\t\t{:.6f}".format(train_err / train_batches))
    #print("  validation loss:\t\t{:.6f}".format(val_err / val_batches))

    ################### View reconstruction###########