def test_det_shape(): rng = numpy.random.RandomState(utt.fetch_seed()) r = rng.randn(5, 5).astype(config.floatX) x = tensor.matrix() f = theano.function([x], det(x)) f_shape = theano.function([x], det(x).shape) assert numpy.all(f(r).shape == f_shape(r))
def test_det(): rng = numpy.random.RandomState(utt.fetch_seed()) r = rng.randn(5, 5).astype(config.floatX) x = tensor.matrix() f = theano.function([x], det(x)) assert numpy.allclose(numpy.linalg.det(r), f(r))
def __init__(self, n_in, n_out, hls, acts): self.net = MLP(n_in, n_out, hls, acts) self.params = self.net.params self.X = T.vector('X') self.X.tag.test_value = numpy.random.uniform(size=(784, ), high=1, low=0.0).astype('float32') self.lr = T.scalar('lr') self.lr.tag.test_value = 0.25 self.Z = self.net(self.X) self.W = self.net.layers[0].W #self.dtanh = 1 - self.Z**2 self.J = self.W self.logpx = T.log(T.abs_(det(self.J))) self.grads = T.grad(self.logpx, self.net.params) self.updates = map(lambda (param, grad): (param, param - self.lr * grad), zip(self.params, self.grads)) self.train_fn = theano.function([self.X, self.lr], self.logpx, updates=self.updates)