Пример #1
0
    def compare_smoother_grads(lds):
        init_params, pair_params, node_params = lds

        symmetrize = make_unop(lambda x: (x + x.T)/2. if np.ndim(x) == 2 else x, tuple)

        messages, _ = natural_filter_forward_general(*lds)
        dotter = randn_like(natural_smoother_general(messages, *lds))

        def py_fun(messages):
            result = natural_smoother_general(messages, *lds)
            assert shape(result) == shape(dotter)
            return contract(dotter, result)

        dense_messages, _ = _natural_filter_forward_general(
            init_params, pair_params, node_params)
        def cy_fun(messages):
            result = _natural_smoother_general(messages, pair_params)
            result = result[0][:3], result[1], result[2]
            assert shape(result) == shape(dotter)
            return contract(dotter, result)

        result_py = py_fun(messages)
        result_cy = cy_fun(dense_messages)
        assert np.isclose(result_py, result_cy)

        g_py = grad(py_fun)(messages)
        g_cy = unpack_dense_messages(grad(cy_fun)(dense_messages))

        assert allclose(g_py, g_cy)
Пример #2
0
    def compare_smoother_grads(lds):
        init_params, pair_params, node_params = lds

        symmetrize = make_unop(
            lambda x: (x + x.T) / 2. if np.ndim(x) == 2 else x, tuple)

        messages, _ = natural_filter_forward_general(*lds)
        dotter = randn_like(natural_smoother_general(messages, *lds))

        def py_fun(messages):
            result = natural_smoother_general(messages, *lds)
            assert shape(result) == shape(dotter)
            return contract(dotter, result)

        dense_messages, _ = _natural_filter_forward_general(
            init_params, pair_params, node_params)

        def cy_fun(messages):
            result = _natural_smoother_general(messages, pair_params)
            result = result[0][:3], result[1], result[2]
            assert shape(result) == shape(dotter)
            return contract(dotter, result)

        result_py = py_fun(messages)
        result_cy = cy_fun(dense_messages)
        assert np.isclose(result_py, result_cy)

        g_py = grad(py_fun)(messages)
        g_cy = unpack_dense_messages(grad(cy_fun)(dense_messages))

        assert allclose(g_py, g_cy)
Пример #3
0
def test_compute_stats_grad():
    F = make_unop(lambda x: np.require(x, np.double, 'F'), tuple)

    dotter = F(randn_like(compute_stats(Ex, ExxT, ExnxT, True)))
    g1 = grad(lambda x: contract(dotter, compute_stats(*x)))((Ex, ExxT, ExnxT, 1.))
    g2 = _compute_stats_grad(dotter)
    assert allclose(g1[:3], g2)

    dotter = F(randn_like(compute_stats(Ex, ExxT, ExnxT, False)))
    g1 = grad(lambda x: contract(dotter, compute_stats(*x)))((Ex, ExxT, ExnxT, 0.))
    g2 = _compute_stats_grad(dotter)
    assert allclose(g1[:3], g2)
Пример #4
0
        init_natparam, pair_natparam = lds_natparam
        nu, S, M, K = mniw.natural_to_standard(pair_natparam)
        # new_M = M
        # new_M = npr.randn(*M.shape)
        new_M = M + 0.2*npr.randn(*M.shape)
        return init_natparam, mniw.standard_to_natural(nu, S, new_M, K)

    natparam = make_slds_global_natparam_from_truth(As, Bs)
    hmm_natparam, lds_natparams = natparam
    return hmm_natparam, map(replace_lds_natparam, lds_natparams)

def _zero(arr):
    arr = np.copy(arr)
    arr[120:140] = 0
    return arr
zero = make_unop(_zero, tuple)

def show_states(params, data):
    from svae.models.slds_svae import optimize_local_meanfield

    natparam, phi, psi = params
    hmm_global_natparam, lds_global_natparam = natparam

    node_potentials = zero(linear_recognize(data, psi))
    (hmm_stats, _), _, _ = optimize_local_meanfield(natparam, node_potentials)

    pcolor_states(data, hmm_stats[2].argmax(1))

def pcolor_states(data, stateseq, cmap=plt.cm.Set1):
    fig, ax = plt.subplots()
Пример #5
0
def matshow(A):
    print(A)
    # plt.matshow(A, cmap='plasma')
    # plt.axis('off')
    # plt.savefig('lds_synth_fit.png')
    # plt.close()


def _zero(x):
    sl = slice(len(x) // 3, 2 * (len(x) // 3))
    out = np.copy(x)
    out[sl] = 0
    return out


zero = make_unop(lambda x: _zero(x), tuple)


def sample_states(params, data, num_samples=1):
    natparam, phi, psi = params
    local_natparam = lds_prior_expectedstats(natparam)
    node_potentials = zero(linear_recognize(data, psi))
    return natural_lds_sample(local_natparam, node_potentials,
                              num_samples=1).mean(1)


def sample(params, data, num_samples=1):
    _, _, psi = params
    C, D = phi
    return np.dot(sample_states(params, data, num_samples), C.T)
Пример #6
0
from scipy.signal import sawtooth
from time import time

from svae.svae import make_gradfun
from svae.optimizers import adam, adadelta
from svae.util import zeros_like, make_unop

from svae.recognition_models import mlp_recognize, init_mlp_recognize
from svae.forward_models import mlp_loglike, mlp_decode, init_mlp_loglike
from svae.models.lds_svae import cython_run_inference as run_inference, \
    make_prior_natparam, lds_prior_expectedstats
from svae.lds.lds_inference import cython_natural_lds_sample as natural_lds_sample
import svae.lds.mniw as mniw
import svae.lds.niw as niw

zero_after_prefix = lambda prefix: make_unop(
    lambda x: np.concatenate((x[:prefix], np.zeros_like(x[prefix:]))), tuple)

### data generation

triangle = lambda t: sawtooth(np.pi * t, width=0.5)
make_dot_trajectory = lambda x0, v: lambda t: triangle(v * (t + (1 + x0) / 2.))
make_renderer = lambda grid, sigma: lambda x: np.exp(-1. / 2 *
                                                     (x - grid)**2 / sigma**2)


def make_dot_data(image_width,
                  T,
                  num_steps,
                  x0=0.0,
                  v=0.5,
                  render_sigma=0.2,
Пример #7
0
from scipy.signal import sawtooth
from time import time

from svae.svae import make_gradfun
from svae.optimizers import adam, adadelta
from svae.util import zeros_like, make_unop

from svae.recognition_models import mlp_recognize, init_mlp_recognize
from svae.forward_models import mlp_loglike, mlp_decode, init_mlp_loglike
from svae.models.lds_svae import cython_run_inference as run_inference, \
    make_prior_natparam, lds_prior_expectedstats
from svae.lds.lds_inference import cython_natural_lds_sample as natural_lds_sample
import svae.lds.mniw as mniw
import svae.lds.niw as niw

zero_after_prefix = lambda prefix: make_unop(lambda x: np.concatenate(
    (x[:prefix], np.zeros_like(x[prefix:]))), tuple)


### data generation

triangle = lambda t: sawtooth(np.pi*t, width=0.5)
make_dot_trajectory = lambda x0, v: lambda t: triangle(v*(t + (1+x0)/2.))
make_renderer = lambda grid, sigma: lambda x: np.exp(-1./2 * (x - grid)**2/sigma**2)

def make_dot_data(image_width, T, num_steps, x0=0.0, v=0.5, render_sigma=0.2, noise_sigma=0.1):
    grid = np.linspace(-1, 1, image_width, endpoint=True)
    render = make_renderer(grid, render_sigma)
    x = make_dot_trajectory(x0, v)
    images = np.vstack([render(x(t)) for t in np.linspace(0, T, num_steps)])
    return images + noise_sigma * npr.randn(*images.shape)
Пример #8
0
    return y

def matshow(A):
    print(A)
    # plt.matshow(A, cmap='plasma')
    # plt.axis('off')
    # plt.savefig('lds_synth_fit.png')
    # plt.close()

def _zero(x):
    sl = slice(len(x)//3, 2*(len(x)//3))
    out = np.copy(x)
    out[sl] = 0
    return out
zero = make_unop(lambda x: _zero(x), tuple)

def sample_states(params, data, num_samples=1):
    natparam, phi, psi = params
    local_natparam = lds_prior_expectedstats(natparam)
    node_potentials = zero(linear_recognize(data, psi))
    return natural_lds_sample(local_natparam, node_potentials, num_samples=1).mean(1)

def sample(params, data, num_samples=1):
    _, _, psi = params
    C, D = phi
    return np.dot(sample_states(params, data, num_samples), C.T)

if __name__ == "__main__":
    npr.seed(0)
Пример #9
0
        # new_M = npr.randn(*M.shape)
        new_M = M + 0.2 * npr.randn(*M.shape)
        return init_natparam, mniw.standard_to_natural(nu, S, new_M, K)

    natparam = make_slds_global_natparam_from_truth(As, Bs)
    hmm_natparam, lds_natparams = natparam
    return hmm_natparam, map(replace_lds_natparam, lds_natparams)


def _zero(arr):
    arr = np.copy(arr)
    arr[120:140] = 0
    return arr


zero = make_unop(_zero, tuple)


def show_states(params, data):
    from svae.models.slds_svae import optimize_local_meanfield

    natparam, phi, psi = params
    hmm_global_natparam, lds_global_natparam = natparam

    node_potentials = zero(linear_recognize(data, psi))
    (hmm_stats, _), _, _ = optimize_local_meanfield(natparam, node_potentials)

    pcolor_states(data, hmm_stats[2].argmax(1))


def pcolor_states(data, stateseq, cmap=plt.cm.Set1):