# cost function
cost = (nll_multiclass(mlp.output, it) + L1_reg * mlp.L1 + L2_reg * mlp.L2_sqr)

pred = pred_multiclass(mlp.output)

errors = pred_error(pred, it)

params = flatten(mlp.params)

print "training the MLP with rmsprop"
optimize(dataset=dataset,
         inputs=inputs,
         cost=cost,
         params=params,
         errors=errors,
         n_epochs=1000,
         batch_size=20,
         patience=5000,
         patience_increase=1.5,
         improvement_threshold=0.995,
         optimizer="rmsprop")

print "compiling the prediction function"
predict = theano.function(inputs=[x], outputs=pred)
distribution = theano.function(inputs=[x], outputs=mlp.output)

print "predicting the first 10 samples of the test dataset"
print "predict:", predict(mnist[2][0][0:10])
print "answer: ", mnist[2][1][0:10]

print "with dropout, the output distributions should all be slightly different"
    nll_multiclass(mlp.output, it)
    + L1_reg * mlp.L1
    + L2_reg * mlp.L2_sqr
)

pred = pred_multiclass(mlp.output)

errors = pred_error(pred, it)

params = flatten(mlp.params)

print "training the MLP with adadelta"
optimize(dataset=dataset,
        inputs=inputs,
        cost=cost,
        params=params,
        errors=errors,
        n_epochs=1000,
        batch_size=20,
        patience=5000,
        patience_increase=1.5,
        improvement_threshold=0.995,
        optimizer="adadelta")

print ""
print "compiling the prediction function"
predict = theano.function(inputs=[x], outputs=pred)

print "predicting the first 10 samples of the test dataset"
print "predict:", predict(mnist[2][0][0:10])
print "answer: ", mnist[2][1][0:10]
pred = pred_multiclass(mlp.output)

errors = pred_error(pred, it)

params = flatten(mlp.params)


print "training the MLP with rmsprop"
losses = optimize(
    dataset=dataset,
    inputs=inputs,
    cost=cost,
    params=params,
    errors=errors,
    n_epochs=2,
    batch_size=20,
    patience=5000,
    patience_increase=1.5,
    improvement_threshold=0.995,
    optimizer='rmsprop'
)

print "compiling the prediction function"
predict = theano.function(inputs=[x], outputs=pred)

print "predicting the first 10 samples of the test dataset"
print "predict:", predict(mnist[2][0][0:10])
print "answer: ", mnist[2][1][0:10]

Example #4
0
)

pred = pred_multiclass(y)

errors = pred_error(pred, it)

params = flatten(layers_params(layers))

print "training the LSTM with adadelta"
optimize(dataset=dataset,
        inputs=inputs,
        cost=cost,
        params=params,
        errors=errors,
        n_epochs=200,
        batch_size=64,
        patience=1500,
        patience_increase=1.25,
        improvement_threshold=0.995,
        test_batches=1,
        print_cost=True,
        optimizer="adadelta")

print "compiling the prediction function"
predict = theano.function(inputs=[x, mask], outputs=pred)

print "predicting the first 10 samples of the test dataset"
print "predict:", predict(dataset[2][0].get_value()[0:10], dataset[2][-1].get_value()[0:10])
print "answer: ", dataset[2][1].get_value()[0:10]

Example #5
0
    + L1_reg * mlp.L1
    + L2_reg * mlp.L2_sqr
)

pred = pred_multiclass(mlp.output)

errors = pred_error(pred, it)

params = flatten(mlp.params)

print "training the MLP with sgd"
optimize(dataset=dataset,
    inputs=inputs,
    cost=cost,
    params=params,
    errors=errors,
    learning_rate=0.01,
    momentum=0.2,
    n_epochs=1000,
    batch_size=20,
    patience=1000,
    patience_increase=1.5,
    improvement_threshold=0.995,
    optimizer="sgd")

print "compiling the prediction function"
predict = theano.function(inputs=[x], outputs=pred)

print "predicting the first 10 samples of the test dataset"
print "predict:", predict(mnist[2][0][0:10])
print "answer: ", mnist[2][1][0:10]