Example #1
0
File: nnet.py Project: lfywork/svae
def _gresnet(mlp_type, mlp, params, inputs):
    ravel, unravel = _make_ravelers(inputs.shape)
    mlp_params, (W, b1, b2) = params

    if mlp_type == 'mean':
        mu_mlp, sigmasq_mlp = mlp(mlp_params, inputs)
        mu_res = unravel(np.dot(ravel(inputs), W) + b1)
        sigmasq_res = log1pexp(b2)
        return tuple_((mu_mlp + mu_res, sigmasq_mlp + sigmasq_res))
    else:
        J_mlp, h_mlp = mlp(mlp_params, inputs)
        J_res = -1. / 2 * log1pexp(b2)
        h_res = unravel(np.dot(ravel(inputs), W) + b1)
        return tuple_((J_mlp + J_res, h_mlp + h_res))
Example #2
0
File: mniw.py Project: lfywork/svae
def expectedstats_standard(nu, S, M, K, fudge=1e-8):
    m = M.shape[0]
    E_Sigmainv = nu * symmetrize(np.linalg.inv(S)) + fudge * np.eye(S.shape[0])
    E_Sigmainv_A = nu * np.linalg.solve(S, M)
    E_AT_Sigmainv_A = m*K + nu*symmetrize(np.dot(M.T, np.linalg.solve(S, M))) \
        + fudge*np.eye(K.shape[0])
    E_logdetSigmainv = digamma((nu-np.arange(m))/2.).sum() \
        + m*np.log(2) - np.linalg.slogdet(S)[1]

    assert is_posdef(E_Sigmainv)
    assert is_posdef(E_AT_Sigmainv_A)

    return tuple_((-1. / 2 * E_AT_Sigmainv_A, E_Sigmainv_A.T,
                   -1. / 2 * E_Sigmainv, 1. / 2 * E_logdetSigmainv))
Example #3
0
def natural_lds_inference_general_autograd(natparam,
                                           node_params,
                                           num_samples=None):
    init_params, pair_params = natparam

    def lds_log_normalizer(all_natparams):
        init_params, pair_params, node_params = all_natparams
        forward_messages, lognorm = natural_filter_forward_general(
            init_params, pair_params, node_params)
        return lognorm, (lognorm, forward_messages)

    all_natparams = tuple_(init_params, pair_params, node_params)
    expected_stats, (
        lognorm, forward_messages) = agrad(lds_log_normalizer)(all_natparams)
    samples = natural_sample_backward_general(forward_messages, pair_params,
                                              num_samples)

    return samples, expected_stats, lognorm
def get_hmm_vlb(lds_global_natparam, hmm_local_natparam, lds_expected_stats):
    init_params, pair_params, _ = hmm_local_natparam
    node_params = get_arhmm_local_nodeparams(lds_global_natparam,
                                             lds_expected_stats)
    local_natparam = tuple_(init_params, pair_params, node_params)
    return hmm_logZ(local_natparam)
Example #5
0
 def bind(result, step):
     next_smooth, stats = result
     J, h, (mu, ExxT, ExxnT) = step(next_smooth)
     return tuple_(J, h, mu), [(mu, ExxT, ExxnT)] + stats
Example #6
0
 def unit(filtered_message):
     J, h = filtered_message
     mu, Sigma = natural_to_mean(filtered_message)
     ExxT = Sigma + np.outer(mu, mu)
     return tuple_(J, h, mu), [(mu, ExxT, 0.)]
Example #7
0
File: nnet.py Project: lfywork/svae
def gaussian_info(inputs):
    J_input, h = np.split(inputs, 2, axis=-1)
    J = -1. / 2 * log1pexp(J_input)
    return tuple_((J, h))
Example #8
0
File: nnet.py Project: lfywork/svae
def gaussian_mean(inputs, sigmoid_mean=False):
    mu_input, sigmasq_input = np.split(inputs, 2, axis=-1)
    mu = sigmoid(mu_input) if sigmoid_mean else mu_input
    sigmasq = log1pexp(sigmasq_input)
    return tuple_((mu, sigmasq))