Beispiel #1
0
def test_arg_of_sigmoid_good():
    """
    Tests that arg_of_sigmoid works when given a good input.
    """

    X = T.matrix()
    Y = T.nnet.sigmoid(X)
    Z = arg_of_sigmoid(Y)
    assert X is Z
Beispiel #2
0
def test_arg_of_sigmoid_good():
    """
    Tests that arg_of_sigmoid works when given a good input.
    """

    X = T.matrix()
    Y = T.nnet.sigmoid(X)
    Z = arg_of_sigmoid(Y)
    assert X is Z
Beispiel #3
0
def test_arg_of_sigmoid_bad():
    """
    Tests that arg_of_sigmoid raises an error when given a bad input.
    """

    X = T.matrix()
    Y = T.nnet.softmax(X)
    try:
        Z = arg_of_sigmoid(Y)
    except TypeError:
        return
    assert False  # Should have failed
Beispiel #4
0
def test_arg_of_sigmoid_bad():
    """
    Tests that arg_of_sigmoid raises an error when given a bad input.
    """

    X = T.matrix()
    Y = T.nnet.softmax(X)
    try:
        Z = arg_of_sigmoid(Y)
    except TypeError:
        return
    assert False # Should have failed
Beispiel #5
0
m = dataset.X.shape[0]
accumulator = sharedX(np.zeros((m, )))
z_samples = g.get_noise(1)
x_samples = g.mlp.fprop(z_samples)
# x_samples = X
from theano.compat import OrderedDict
updates = OrderedDict()
from theano import shared
num_samples = shared(1)
sigma = sharedX(float(sigma))
prev = accumulator
from theano.printing import Print
#prev = Print('prev',attrs=['min','max'])(prev)
# E_x log E_z exp(- sum_i softplus( (1 - 2 x_i) A(z)_i) )
from pylearn2.expr.nnet import arg_of_sigmoid
A = arg_of_sigmoid(x_samples)
cur = -T.nnet.softplus((1. - 2. * X) * A).sum(axis=1)
#cur = Print('cur',attrs=['min','max'])(cur)
ofs = T.maximum(prev, cur)
num_samples_f = T.cast(num_samples, 'float32')
updates[accumulator] = ofs + T.log(
    (num_samples_f * T.exp(prev - ofs) + T.exp(cur - ofs)) /
    (num_samples_f + 1.))
updates[num_samples] = num_samples + 1
f = function([], updates=updates)
updates[accumulator] = cur
del updates[num_samples]
first = function([], updates=updates)
avg_ll = accumulator.mean()

import time
Beispiel #6
0
m = dataset.X.shape[0]
accumulator = sharedX(np.zeros((m,)))
z_samples = g.get_noise(1)
x_samples = g.mlp.fprop(z_samples)
# x_samples = X
from theano.compat import OrderedDict
updates = OrderedDict()
from theano import shared
num_samples = shared(1)
sigma = sharedX(float(sigma))
prev = accumulator
from theano.printing import Print
#prev = Print('prev',attrs=['min','max'])(prev)
# E_x log E_z exp(- sum_i softplus( (1 - 2 x_i) A(z)_i) )
from pylearn2.expr.nnet import arg_of_sigmoid
A = arg_of_sigmoid(x_samples)
cur = - T.nnet.softplus((1. - 2. * X) * A).sum(axis=1)
#cur = Print('cur',attrs=['min','max'])(cur)
ofs = T.maximum(prev, cur)
num_samples_f = T.cast(num_samples, 'float32')
updates[accumulator] = ofs + T.log((num_samples_f * T.exp(prev - ofs) + T.exp(cur - ofs)) / (num_samples_f + 1.))
updates[num_samples] = num_samples + 1
f = function([], updates=updates)
updates[accumulator] = cur
del updates[num_samples]
first = function([], updates=updates)
avg_ll = accumulator.mean()

import time
prev_t = time.time()
first()