Пример #1
0
def numpy_gaussian_decoder(decoder_params):
    # mostly redundant code with encoder and gaussian_decoder in vae.py
    nnet_params, (W_mu, b_mu), _ = \
        unpack_gaussian_params(decoder_params)
    nnet = compose(numpy_tanh_layer(W, b) for W, b in nnet_params)
    mu = numpy_linear_layer(W_mu, b_mu)

    def decode(X):
        return sigmoid(mu(nnet(X)))

    return decode
Пример #2
0
def numpy_gaussian_decoder(decoder_params):
    # mostly redundant code with encoder and gaussian_decoder in vae.py
    nnet_params, (W_mu, b_mu), _ = \
        unpack_gaussian_params(decoder_params)
    nnet = compose(numpy_tanh_layer(W, b) for W, b in nnet_params)
    mu = numpy_linear_layer(W_mu, b_mu)

    def decode(X):
        return sigmoid(mu(nnet(X)))

    return decode
Пример #3
0
def binary_decoder(decoder_params):
    'a neural net with tanh layers until the final sigmoid layer'

    nnet_params, (W_out, b_out) = unpack_binary_params(decoder_params)

    nnet = compose(tanh_layer(W, b) for W, b in nnet_params)
    Y = sigmoid_layer(W_out, b_out)

    def decode(Z):
        return Y(nnet(Z))

    return decode
Пример #4
0
def binary_decoder(decoder_params):
    'a neural net with tanh layers until the final sigmoid layer'

    nnet_params, (W_out, b_out) = unpack_binary_params(decoder_params)

    nnet = compose(tanh_layer(W, b) for W, b in nnet_params)
    Y = sigmoid_layer(W_out, b_out)

    def decode(Z):
        return Y(nnet(Z))

    return decode
Пример #5
0
def mlp_decode(z, phi, tanh_scale=10., sigmoid_output=True):
    nnet_params, ((W_mu, b_mu), (W_sigma, b_sigma)) = phi[:-2], phi[-2:]
    z = z if z.ndim == 3 else z[:,None,:]  # ensure z.shape == (T, K, n)

    nnet = compose(tanh_layer(W, b) for W, b in nnet_params)
    mu = linear_layer(W_mu, b_mu)
    log_sigmasq = linear_layer(W_sigma, b_sigma)

    nnet_outputs = nnet(np.reshape(z, (-1, z.shape[-1])))
    mu = sigmoid(mu(nnet_outputs)) if sigmoid_output else mu(nnet_outputs)
    log_sigmasq = tanh_scale * np.tanh(log_sigmasq(nnet_outputs) / tanh_scale)

    shape = z.shape[:-1] + (-1,)
    return mu.reshape(shape), log_sigmasq.reshape(shape)
Пример #6
0
def encoder(encoder_params):
    "a neural net with tanh layers until the final layer,"
    "which generates mu and log_sigmasq separately"

    nnet_params, (W_mu, b_mu), (W_sigma, b_sigma) = unpack_gaussian_params(encoder_params)

    nnet = compose(tanh_layer(W, b) for W, b in nnet_params)
    mu = linear_layer(W_mu, b_mu)
    log_sigmasq = linear_layer(W_sigma, b_sigma)

    def encode(X):
        h = nnet(X)
        return mu(h), log_sigmasq(h)

    return encode
Пример #7
0
def mlp_recognize(x, psi, tanh_scale=10.):
    nnet_params, ((W_h, b_h), (W_J, b_J)) = psi[:-2], psi[-2:]
    T, p = x.shape[0], b_h.shape[0]
    shape = x.shape[:-1] + (-1,)

    nnet = compose(tanh_layer(W, b) for W, b in nnet_params)
    h = linear_layer(W_h, b_h)
    log_J = linear_layer(W_J, b_J)

    nnet_outputs = nnet(np.reshape(x, (-1, x.shape[-1])))
    J = -1./2 * np.exp(tanh_scale * np.tanh(log_J(nnet_outputs) / tanh_scale))
    h = h(nnet_outputs)
    logZ = np.zeros(shape[:-1])

    return make_tuple(np.reshape(J, shape), np.reshape(h, shape), logZ)
Пример #8
0
def encoder(encoder_params):
    'a neural net with tanh layers until the final layer,'
    'which generates mu and log_sigmasq separately'

    nnet_params, (W_mu, b_mu), (W_sigma, b_sigma) = \
        unpack_gaussian_params(encoder_params)

    nnet = compose(tanh_layer(W, b) for W, b in nnet_params)
    mu = linear_layer(W_mu, b_mu)
    log_sigmasq = linear_layer(W_sigma, b_sigma)

    def encode(X):
        h = nnet(X)
        return mu(h), log_sigmasq(h)

    return encode