test_set.X = 2* test_set.X.reshape(-1, 784) - 1. # flatten targets test_set.y = test_set.y.reshape(-1) print('Building the MLP...') # Prepare Theano variables for inputs and targets input = T.matrix('inputs') target = T.vector('targets') mlp = lasagne.layers.InputLayer(shape=(None, 784),input_var=input) # Input layer is not binary -> use baseline kernel in first hidden layer mlp = binary_ops.DenseLayer( mlp, nonlinearity=lasagne.nonlinearities.identity, num_units=num_units, kernel = "baseline") mlp = lasagne.layers.BatchNormLayer(mlp) mlp = lasagne.layers.NonlinearityLayer(mlp,nonlinearity=binary_ops.SignTheano) for k in range(1,n_hidden_layers): mlp = binary_ops.DenseLayer( mlp, nonlinearity=lasagne.nonlinearities.identity, num_units=num_units, kernel = kernel) mlp = lasagne.layers.BatchNormLayer(mlp)
cnn = binary_ops.Conv2DLayer(cnn, num_filters=512, filter_size=(3, 3), pad=1, nonlinearity=lasagne.nonlinearities.identity, convOp=convOp) cnn = lasagne.layers.MaxPool2DLayer(cnn, pool_size=(2, 2)) cnn = lasagne.layers.BatchNormLayer(cnn) cnn = lasagne.layers.NonlinearityLayer(cnn, nonlinearity=binary_ops.SignTheano) cnn = binary_ops.DenseLayer(cnn, nonlinearity=lasagne.nonlinearities.identity, num_units=1024, kernel=kernel) cnn = lasagne.layers.BatchNormLayer(cnn) cnn = lasagne.layers.NonlinearityLayer(cnn, nonlinearity=binary_ops.SignTheano) cnn = binary_ops.DenseLayer(cnn, nonlinearity=lasagne.nonlinearities.identity, num_units=1024, kernel=kernel) cnn = lasagne.layers.BatchNormLayer(cnn) cnn = lasagne.layers.NonlinearityLayer(cnn, nonlinearity=binary_ops.SignTheano)