Exemple #1
0
sym_x = T.matrix('x')


def bernoullisample(x):
    return np.random.binomial(1,x,size=x.shape).astype(theano.config.floatX)


### LOAD DATA AND SET UP SHARED VARIABLES
if dataset is 'sample':
    print "Using real valued MNIST dataset to binomial sample dataset after every epoch "
    train_x, train_t, valid_x, valid_t, test_x, test_t = load_mnist_realval()
    del train_t, valid_t, test_t
    preprocesses_dataset = bernoullisample
else:
    print "Using fixed binarized MNIST data"
    train_x, valid_x, test_x = load_mnist_binarized()
    preprocesses_dataset = lambda dataset: dataset #just a dummy function

train_x = np.concatenate([train_x,valid_x])

train_x = train_x.astype(theano.config.floatX)
test_x = test_x.astype(theano.config.floatX)

num_features=train_x.shape[-1]

sh_x_train = theano.shared(preprocesses_dataset(train_x), borrow=True)
sh_x_test = theano.shared(preprocesses_dataset(test_x), borrow=True)


def batchnormlayer(l,num_units, nonlinearity, name, W=lasagne.init.GlorotUniform(), b=lasagne.init.Constant(0.)):
    l = lasagne.layers.DenseLayer(l, num_units=num_units, name="Dense-" + name, W=W, b=b, nonlinearity=None)
Exemple #2
0
    train_x = train_x.reshape(-1, num_features)
    np.random.shuffle(train_x)
    num_used_test = 100
    test_x = train_x[:num_used_test]
    train_x = train_x[num_used_test:]
    print 'Split fray_faces ...'
    print test_x.shape
    print train_x.shape
    preprocesses_dataset = lambda dataset: dataset  #just a dummy function
elif dataset == 'omniglot':
    print "Using omniglot dataset "
    train_x, test_x = omniglot()
    preprocesses_dataset = bernoullisample
elif dataset == 'fixed':
    print "Using fixed binarized MNIST data"
    train_x, valid_x, test_x = load_mnist_binarized()
    preprocesses_dataset = lambda dataset: dataset  #just a dummy function
elif dataset == 'caltech':
    print "Using CalTech101Silhouettes dataset"
    train_x, valid_x, test_x = CalTech101Silhouettes()
    preprocesses_dataset = lambda dataset: dataset  #just a dummy function
elif dataset == 'norb_48':
    print "Using NORB dataset, size = 48"
    x, y = load_numpy_subclasses(size=48, normalize=True, centered=False)
    x = x.T
    train_x = x[:24300]
    test_x = x[24300 * 2:24300 * 3]  # only for debug, compare generation only
    del y
    preprocesses_dataset = lambda dataset: dataset  #just a dummy function
elif dataset == 'norb_96':
    print "Using NORB dataset, size = 96"