from __future__ import division
import autograd.numpy as np
import autograd.numpy.random as npr
from autograd import grad
from autograd.core import primitive_with_aux

from svae.hmm.hmm_inference import hmm_logZ_python as python_hmm_logZ
from svae.hmm.cython_hmm_inference import hmm_logZ_normalized as cython_hmm_logZ_normalized, \
    hmm_logZ as cython_hmm_logZ, hmm_logZ_grad as cython_hmm_logZ_grad
from svae.util import allclose


cython_hmm_logZ = primitive_with_aux(cython_hmm_logZ)
def make_grad_hmm_logZ(intermediates, ans, hmm):
    _, pair_params, _ = hmm
    return lambda g: cython_hmm_logZ_grad(g, intermediates)
cython_hmm_logZ.defgrad(make_grad_hmm_logZ)


### util

def rand_hmm(n, T):
    return random_init_param(n), random_pair_param(n), random_node_potentials(n, T)

def random_init_param(n):
    return npr.randn(n)

def random_pair_param(n):
    return npr.randn(n, n)

def random_node_potentials(n, T):
示例#2
0
    add, shape, zeros_like
from gaussian import mean_to_natural, pair_mean_to_natural, natural_sample, \
    natural_condition_on, natural_condition_on_general, natural_to_mean, \
    natural_rts_backward_step
# from gaussian import natural_predict, natural_lognorm
from gaussian_nochol import natural_predict, natural_lognorm

from cython_lds_inference import \
    natural_filter_forward_general as cython_natural_filter_forward, \
    natural_filter_grad as cython_natural_filter_grad, \
    natural_sample_backward as cython_natural_sample_backward, \
    natural_sample_backward_grad as cython_natural_sample_backward_grad, \
    natural_smoother_general as cython_natural_smoother_general, \
    natural_smoother_general_grad as cython_natural_smoother_grad

cython_natural_filter_forward = primitive_with_aux(
    cython_natural_filter_forward)


def make_natural_filter_grad_arg2(intermediates, ans, init_params, pair_params,
                                  node_params):
    return primitive(lambda g: cython_natural_filter_grad(g, intermediates))


cython_natural_filter_forward.defgrad(make_natural_filter_grad_arg2, 2)

cython_natural_sample_backward = primitive_with_aux(
    cython_natural_sample_backward)


def make_natural_sample_grad_arg0(intermediates, ans, messages, pair_params,
                                  num_samples):
示例#3
0
from __future__ import division
import autograd.numpy as np
from autograd import grad
from autograd import value_and_grad as vgrad
from autograd.scipy.misc import logsumexp
from autograd.core import primitive_with_aux, primitive

from pyhsmm.internals.hmm_messages_interface import \
    messages_backwards_log, messages_forwards_log, expected_statistics_log, \
    viterbi as _viterbi

from cython_hmm_inference import hmm_logZ, hmm_logZ_grad

hmm_logZ = primitive_with_aux(hmm_logZ)
def make_grad_hmm_logZ(intermediates, ans, hmm):
    _, pair_params, _ = hmm
    return primitive(lambda g: hmm_logZ_grad(g, intermediates))
hmm_logZ.defgrad(make_grad_hmm_logZ)


def hmm_estep(natparam):
    C = lambda x: np.require(x, np.double, 'C')
    init_params, pair_params, node_params = map(C, natparam)

    # compute messages
    alphal = messages_forwards_log(
        np.exp(pair_params), node_params, np.exp(init_params),
        np.zeros_like(node_params))
    betal = messages_backwards_log(
        np.exp(pair_params), node_params,
        np.zeros_like(node_params))
示例#4
0
from __future__ import division
import autograd.numpy as np
import autograd.numpy.random as npr
from autograd import grad
from autograd.core import primitive_with_aux

from svae.hmm.hmm_inference import hmm_logZ_python as python_hmm_logZ
from svae.hmm.cython_hmm_inference import hmm_logZ_normalized as cython_hmm_logZ_normalized, \
    hmm_logZ as cython_hmm_logZ, hmm_logZ_grad as cython_hmm_logZ_grad
from svae.util import allclose

cython_hmm_logZ = primitive_with_aux(cython_hmm_logZ)


def make_grad_hmm_logZ(intermediates, ans, hmm):
    _, pair_params, _ = hmm
    return lambda g: cython_hmm_logZ_grad(g, intermediates)


cython_hmm_logZ.defgrad(make_grad_hmm_logZ)

### util


def rand_hmm(n, T):
    return random_init_param(n), random_pair_param(n), random_node_potentials(
        n, T)


def random_init_param(n):
    return npr.randn(n)
示例#5
0
from svae.lds.lds_inference import natural_filter_forward_general, \
    natural_smoother_general, natural_sample_backward_general

from svae.lds.cython_lds_inference import natural_filter_grad, \
    natural_filter_forward_general as _natural_filter_forward_general, \
    natural_smoother_general as _natural_smoother_general, \
    natural_sample_backward as _natural_sample_backward, \
    natural_sample_backward_grad, natural_smoother_general_grad

from svae.util import interleave, randn_like, contract, shape, make_unop

from test_lds_info_inference_dense import random_init_param, random_pair_param, rand_psd


_natural_filter_forward_general = primitive_with_aux(_natural_filter_forward_general)
def make_natural_filter_grad_arg2(intermediates, ans, init_params, pair_params, node_params):
    return lambda g: natural_filter_grad(g, intermediates)
_natural_filter_forward_general.defgrad(make_natural_filter_grad_arg2, 2)

_natural_sample_backward = primitive_with_aux(_natural_sample_backward)
def make_natural_sample_grad_arg0(intermediates, ans, messages, pair_params, num_samples):
    return lambda g: natural_sample_backward_grad(g, intermediates)
_natural_sample_backward.defgrad(make_natural_sample_grad_arg0, 0)

_natural_smoother_general = primitive_with_aux(_natural_smoother_general)
def make_natural_smoother_grad_arg0(intermediates, ans, forward_messages, pair_params):
    return lambda g: natural_smoother_general_grad(g, intermediates)
_natural_smoother_general.defgrad(make_natural_smoother_grad_arg0, 0)

### util