コード例 #1
0
    def test_multivariate_message(self):
        p1, p2, p3 = mp.Plate(), mp.Plate(), mp.Plate()
        x_ = mp.Variable('x', p3, p1)
        y_ = mp.Variable('y', p1, p2)
        z_ = mp.Variable('z', p2, p3)

        n1, n2, n3 = shape = (2, 3, 4)

        def sumxyz(x, y, z):
            return (np.moveaxis(x[:, :, None], 0, 2) + y[:, :, None] + z[None])

        factor = mp.Factor(sumxyz, x=x_, y=y_, z=z_)

        x = np.arange(n3 * n1).reshape(n3, n1) * 0.1
        y = np.arange(n1 * n2).reshape(n1, n2) * 0.2
        z = np.arange(n2 * n3).reshape(n2, n3) * 0.3
        sumxyz(x, y, z)

        variables = {x_: x, y_: y, z_: z}
        factor(variables)

        model_dist = mp.MeanField({
            x_: mp.NormalMessage(x, 1 * np.ones_like(x)),
            y_: mp.NormalMessage(y, 1 * np.ones_like(y)),
            z_: mp.NormalMessage(z, 1 * np.ones_like(z)),
        })

        assert model_dist(variables).log_value.shape == shape
コード例 #2
0
def test_factor_jacobian():
    shape = 4, 3
    z_ = mp.Variable('z', *(mp.Plate() for _ in shape))
    likelihood = mp.NormalMessage(np.random.randn(*shape),
                                  np.random.exponential(size=shape))
    likelihood_factor = likelihood.as_factor(z_)

    values = {z_: likelihood.sample()}
    fval, jval = likelihood_factor.func_jacobian(values, axis=None)
    ngrad = approx_fprime(values[z_].ravel(),
                          lambda x: likelihood.logpdf(x.reshape(*shape)).sum(),
                          1e-8).reshape(*shape)
    assert np.allclose(ngrad, jval[z_])
コード例 #3
0
def test_meanfield_gradients():
    n1, n2, n3 = 2, 3, 5
    p1, p2, p3 = [graph.Plate() for i in range(3)]

    v1 = graph.Variable('v1', p1, p2)
    v2 = graph.Variable('v2', p2, p3)
    v3 = graph.Variable('v3', p3, p1)

    mean_field = graph.MeanField({
        v1:
        graph.NormalMessage(np.random.randn(n1, n2),
                            np.random.exponential(size=(n1, n2))),
        v2:
        graph.NormalMessage(np.random.randn(n2, n3),
                            np.random.exponential(size=(n2, n3))),
        v3:
        graph.NormalMessage(np.random.randn(n3, n1),
                            np.random.exponential(size=(n3, n1)))
    })

    values = mean_field.sample()
    l0 = mean_field(values, axis=None)
    logl = mean_field(values, axis=False)
    assert logl.sum() == pytest.approx(l0, abs=1e-5)
    logl = mean_field(values, axis=1)
    assert logl.sum() == pytest.approx(l0, abs=1e-5)
    logl = mean_field(values, axis=2)
    assert logl.sum() == pytest.approx(l0, abs=1e-5)
    logl = mean_field(values, axis=(0, 2))
    assert logl.sum() == pytest.approx(l0, abs=1e-5)

    njac0 = mean_field._numerical_func_jacobian(values, axis=None,
                                                _eps=1e-8)[1]
    njac1 = mean_field._numerical_func_jacobian(values, axis=1, _eps=1e-8)[1]
    njac2 = mean_field._numerical_func_jacobian(values, axis=(0, 1),
                                                _eps=1e-8)[1]
    njac = mean_field._numerical_func_jacobian_hessian(values,
                                                       axis=False,
                                                       _eps=1e-8)[1]
    grad = mean_field.logpdf_gradient(values, axis=False)[1]
    for v in grad:
        norm = np.linalg.norm(grad[v] - njac[v].sum((0, 1, 2)))
        assert norm == pytest.approx(0, abs=1e-2)
        norm = np.linalg.norm(grad[v] - njac0[v])
        assert norm == pytest.approx(0, abs=1e-2)
        norm = np.linalg.norm(grad[v] - njac1[v].sum((0, 1)))
        assert norm == pytest.approx(0, abs=1e-2)
        norm = np.linalg.norm(grad[v] - njac2[v].sum(0))
        assert norm == pytest.approx(0, abs=1e-2)
コード例 #4
0
def test_simple_transform_diagonal():
    # testing DiagonalTransform
    d = 5
    scale = np.random.exponential(size=d)
    A = np.diag(scale**-1)
    b = np.random.randn(d)

    x = graph.Variable('x', graph.Plate())
    x0 = np.random.randn(d)
    param_shapes = graph.utils.FlattenArrays({x: (d, )})

    def likelihood(x):
        x = x - b
        return 0.5 * np.linalg.multi_dot((x, A, x))

    factor = graph.Factor(likelihood, x=x, is_scalar=True)
    func = factor.flatten(param_shapes)

    res = optimize.minimize(func, x0)
    assert np.allclose(res.x, b, rtol=1e-2, atol=1e-2)
    H, iA = res.hess_inv, np.linalg.inv(A)
    # check R2 score
    assert 1 - np.square(H - iA).mean() / np.square(iA).mean() > 0.95

    scale = np.random.exponential(size=d)
    A = np.diag(scale**-2)

    diag = transform.DiagonalTransform(scale)
    whiten = transform.VariableTransform({x: diag})
    white_factor = transform.TransformedNode(factor, whiten)
    white_func = white_factor.flatten(param_shapes)

    y0 = diag * x0

    res = optimize.minimize(white_func, y0)
    assert np.allclose(res.x, diag * b)
    H, iA = res.hess_inv, np.eye(d)
    # check R2 score
    assert 1 - np.square(H - iA).mean() / np.square(iA).mean() > 0.95

    # testing gradients
    grad = white_func.jacobian(y0)
    ngrad = optimize.approx_fprime(y0, white_func, 1e-6)
    assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3)
コード例 #5
0
def test_simple_transform_cholesky():

    np.random.seed(0)

    d = 5
    A = stats.wishart(d, np.eye(d)).rvs()
    b = np.random.rand(d)

    def likelihood(x):
        x = x - b
        return 0.5 * np.linalg.multi_dot((x, A, x))

    x = graph.Variable('x', graph.Plate())
    x0 = np.random.randn(d)

    factor = graph.Factor(likelihood, x=x, is_scalar=True)
    param_shapes = graph.utils.FlattenArrays({x: (d, )})
    func = factor.flatten(param_shapes)

    res = optimize.minimize(func, x0)
    assert np.allclose(res.x, b, rtol=1e-2)
    H, iA = res.hess_inv, np.linalg.inv(A)
    # check R2 score
    assert 1 - np.square(H - iA).mean() / np.square(iA).mean() > 0.95

    # cho = transform.CholeskyTransform(linalg.cho_factor(A))
    cho = transform.CholeskyTransform.from_dense(A)
    whiten = transform.VariableTransform({x: cho})
    white_factor = transform.TransformedNode(factor, whiten)
    white_func = white_factor.flatten(param_shapes)

    y0 = cho * x0

    res = optimize.minimize(white_func, y0)
    assert np.allclose(res.x, cho * b, atol=1e-3, rtol=1e-3)
    assert np.allclose(res.hess_inv, np.eye(d), atol=1e-3, rtol=1e-3)

    # testing gradients

    grad = white_func.jacobian(y0)
    ngrad = optimize.approx_fprime(y0, white_func, 1e-6)
    assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3)

    # testing CovarianceTransform,

    cho = transform.CovarianceTransform.from_dense(iA)
    whiten = transform.VariableTransform({x: cho})
    white_factor = transform.TransformedNode(factor, whiten)
    white_func = white_factor.flatten(param_shapes)

    y0 = cho * x0

    res = optimize.minimize(white_func, y0)
    assert np.allclose(res.x, cho * b, atol=1e-3, rtol=1e-3)
    assert np.allclose(res.hess_inv, np.eye(d), atol=1e-3, rtol=1e-3)

    # testing gradients

    grad = white_func.jacobian(y0)
    ngrad = optimize.approx_fprime(y0, white_func, 1e-6)
    assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3)

    # testing FullCholeskyTransform

    whiten = transform.FullCholeskyTransform(cho, param_shapes)
    white_factor = transform.TransformedNode(factor, whiten)
    white_func = white_factor.flatten(param_shapes)

    y0 = cho * x0

    res = optimize.minimize(white_func, y0)
    assert np.allclose(res.x, cho * b, atol=1e-3, rtol=1e-3)
    assert np.allclose(res.hess_inv, np.eye(d), atol=1e-3, rtol=1e-3)

    # testing gradients

    grad = white_func.jacobian(y0)
    ngrad = optimize.approx_fprime(y0, white_func, 1e-6)
    assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3)
コード例 #6
0
def test_complex_transform():

    n1, n2, n3 = 2, 3, 2
    d = n1 + n2 * n3

    A = stats.wishart(d, np.eye(d)).rvs()
    b = np.random.rand(d)

    p1, p2, p3 = (graph.Plate() for i in range(3))
    x1 = graph.Variable('x1', p1)
    x2 = graph.Variable('x2', p2, p3)

    mean_field = graph.MeanField({
        x1:
        graph.NormalMessage(np.zeros(n1), 100 * np.ones(n1)),
        x2:
        graph.NormalMessage(np.zeros((n2, n3)), 100 * np.ones((n2, n3))),
    })

    values = mean_field.sample()
    param_shapes = graph.utils.FlattenArrays(
        {v: x.shape
         for v, x in values.items()})

    def likelihood(x1, x2):
        x = np.r_[x1, x2.ravel()] - b
        return 0.5 * np.linalg.multi_dot((x, A, x))

    factor = graph.Factor(likelihood, x1=x1, x2=x2, is_scalar=True)

    cho = transform.CholeskyTransform(linalg.cho_factor(A))
    whiten = transform.FullCholeskyTransform(cho, param_shapes)
    trans_factor = transform.TransformedNode(factor, whiten)

    values = mean_field.sample()
    transformed = whiten * values

    assert np.allclose(factor(values), trans_factor(transformed))

    njac = trans_factor._numerical_func_jacobian(transformed)[1]
    jac = trans_factor.jacobian(transformed)
    ngrad = param_shapes.flatten(njac)
    grad = param_shapes.flatten(jac)

    assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3)

    # test VariableTransform with CholeskyTransform
    var_cov = {
        v: (X.reshape((int(X.size**0.5), ) * 2))
        for v, X in param_shapes.unflatten(linalg.inv(A)).items()
    }
    cho_factors = {
        v: transform.CholeskyTransform(linalg.cho_factor(linalg.inv(cov)))
        for v, cov in var_cov.items()
    }
    whiten = transform.VariableTransform(cho_factors)
    trans_factor = transform.TransformedNode(factor, whiten)

    values = mean_field.sample()
    transformed = whiten * values

    assert np.allclose(factor(values), trans_factor(transformed))

    njac = trans_factor._numerical_func_jacobian(transformed)[1]
    jac = trans_factor.jacobian(transformed)
    ngrad = param_shapes.flatten(njac)
    grad = param_shapes.flatten(jac)

    assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3)

    res = optimize.minimize(trans_factor.flatten(param_shapes).func_jacobian,
                            param_shapes.flatten(transformed),
                            method='BFGS',
                            jac=True)
    assert res.hess_inv.diagonal() == pytest.approx(1., rel=1e-1)

    # test VariableTransform with CholeskyTransform
    diag_factors = {
        v: transform.DiagonalTransform(cov.diagonal()**0.5)
        for v, cov in var_cov.items()
    }
    whiten = transform.VariableTransform(diag_factors)
    trans_factor = transform.TransformedNode(factor, whiten)

    values = mean_field.sample()
    transformed = whiten * values

    assert np.allclose(factor(values), trans_factor(transformed))

    njac = trans_factor._numerical_func_jacobian(transformed)[1]
    jac = trans_factor.jacobian(transformed)
    ngrad = param_shapes.flatten(njac)
    grad = param_shapes.flatten(jac)

    assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3)

    res = optimize.minimize(trans_factor.flatten(param_shapes).func_jacobian,
                            param_shapes.flatten(transformed),
                            method='BFGS',
                            jac=True)
    assert res.hess_inv.diagonal() == pytest.approx(1., rel=1e-1)
コード例 #7
0

np.random.seed(1)

error_std = 1.
prior_std = 10.
a = np.array([[-1.3], [0.7]])
b = np.array([-0.5])

n_obs = 100
n_features, n_dims = a.shape

x = 5 * np.random.randn(n_obs, n_features)
y = x.dot(a) + b + np.random.randn(n_obs, n_dims)

obs = graph.Plate(name='obs')
features = graph.Plate(name='features')
dims = graph.Plate(name='dims')

x_ = graph.Variable('x', obs, features)
a_ = graph.Variable('a', features, dims)
b_ = graph.Variable('b', dims)
y_ = graph.Variable('y', obs, dims)
z_ = graph.Variable('z', obs, dims)

def make_model():
    prior_norm = stats.norm(loc=0, scale=prior_std)

    def prior(x):
        return prior_norm.logpdf(x).sum()