def test_multivariate_message(self): p1, p2, p3 = mp.Plate(), mp.Plate(), mp.Plate() x_ = mp.Variable('x', p3, p1) y_ = mp.Variable('y', p1, p2) z_ = mp.Variable('z', p2, p3) n1, n2, n3 = shape = (2, 3, 4) def sumxyz(x, y, z): return (np.moveaxis(x[:, :, None], 0, 2) + y[:, :, None] + z[None]) factor = mp.Factor(sumxyz, x=x_, y=y_, z=z_) x = np.arange(n3 * n1).reshape(n3, n1) * 0.1 y = np.arange(n1 * n2).reshape(n1, n2) * 0.2 z = np.arange(n2 * n3).reshape(n2, n3) * 0.3 sumxyz(x, y, z) variables = {x_: x, y_: y, z_: z} factor(variables) model_dist = mp.MeanField({ x_: mp.NormalMessage(x, 1 * np.ones_like(x)), y_: mp.NormalMessage(y, 1 * np.ones_like(y)), z_: mp.NormalMessage(z, 1 * np.ones_like(z)), }) assert model_dist(variables).log_value.shape == shape
def make_approx(factor): return g.EPMeanField( factor_graph=g.FactorGraph([factor]), factor_mean_field={ factor: g.MeanField({ variable: af.GaussianPrior(mean=0, sigma=1) for variable in factor.variables }) }, )
def test_meanfield_gradients(): n1, n2, n3 = 2, 3, 5 p1, p2, p3 = [graph.Plate() for i in range(3)] v1 = graph.Variable('v1', p1, p2) v2 = graph.Variable('v2', p2, p3) v3 = graph.Variable('v3', p3, p1) mean_field = graph.MeanField({ v1: graph.NormalMessage(np.random.randn(n1, n2), np.random.exponential(size=(n1, n2))), v2: graph.NormalMessage(np.random.randn(n2, n3), np.random.exponential(size=(n2, n3))), v3: graph.NormalMessage(np.random.randn(n3, n1), np.random.exponential(size=(n3, n1))) }) values = mean_field.sample() l0 = mean_field(values, axis=None) logl = mean_field(values, axis=False) assert logl.sum() == pytest.approx(l0, abs=1e-5) logl = mean_field(values, axis=1) assert logl.sum() == pytest.approx(l0, abs=1e-5) logl = mean_field(values, axis=2) assert logl.sum() == pytest.approx(l0, abs=1e-5) logl = mean_field(values, axis=(0, 2)) assert logl.sum() == pytest.approx(l0, abs=1e-5) njac0 = mean_field._numerical_func_jacobian(values, axis=None, _eps=1e-8)[1] njac1 = mean_field._numerical_func_jacobian(values, axis=1, _eps=1e-8)[1] njac2 = mean_field._numerical_func_jacobian(values, axis=(0, 1), _eps=1e-8)[1] njac = mean_field._numerical_func_jacobian_hessian(values, axis=False, _eps=1e-8)[1] grad = mean_field.logpdf_gradient(values, axis=False)[1] for v in grad: norm = np.linalg.norm(grad[v] - njac[v].sum((0, 1, 2))) assert norm == pytest.approx(0, abs=1e-2) norm = np.linalg.norm(grad[v] - njac0[v]) assert norm == pytest.approx(0, abs=1e-2) norm = np.linalg.norm(grad[v] - njac1[v].sum((0, 1))) assert norm == pytest.approx(0, abs=1e-2) norm = np.linalg.norm(grad[v] - njac2[v].sum(0)) assert norm == pytest.approx(0, abs=1e-2)
def test_laplace_method(probit_factor, q_cavity, x): probit_approx = mp.FactorApproximation(factor=probit_factor, cavity_dist={x: q_cavity}, factor_dist={}, model_dist=mp.MeanField( {x: q_cavity})) opt_probit = mp.OptFactor.from_approx(probit_approx) result = opt_probit.maximise({x: 0.}) q_probit_laplace = autofit.graphical.messages.normal.NormalMessage.from_mode( result.mode[x], covariance=result.hess_inv[x]) assert q_probit_laplace.mu == pytest.approx(-0.258, rel=0.01) assert q_probit_laplace.sigma == pytest.approx(0.462, rel=0.01)
def test_laplace_method(probit_factor, q_cavity, x): mf = graph.MeanField({x: q_cavity}) probit_approx = graph.FactorApproximation( factor=probit_factor, cavity_dist=mf, factor_dist=mf, model_dist=mf, ) opt = graph.LaplaceOptimiser() new_dist, s = opt.optimise_approx(probit_approx) q_probit_laplace = new_dist[x] assert q_probit_laplace.mean == pytest.approx(-0.258, rel=0.01) assert q_probit_laplace.sigma == pytest.approx(0.462, rel=0.01)
def test_complex_transform(): n1, n2, n3 = 2, 3, 2 d = n1 + n2 * n3 A = stats.wishart(d, np.eye(d)).rvs() b = np.random.rand(d) p1, p2, p3 = (graph.Plate() for i in range(3)) x1 = graph.Variable('x1', p1) x2 = graph.Variable('x2', p2, p3) mean_field = graph.MeanField({ x1: graph.NormalMessage(np.zeros(n1), 100 * np.ones(n1)), x2: graph.NormalMessage(np.zeros((n2, n3)), 100 * np.ones((n2, n3))), }) values = mean_field.sample() param_shapes = graph.utils.FlattenArrays( {v: x.shape for v, x in values.items()}) def likelihood(x1, x2): x = np.r_[x1, x2.ravel()] - b return 0.5 * np.linalg.multi_dot((x, A, x)) factor = graph.Factor(likelihood, x1=x1, x2=x2, is_scalar=True) cho = transform.CholeskyTransform(linalg.cho_factor(A)) whiten = transform.FullCholeskyTransform(cho, param_shapes) trans_factor = transform.TransformedNode(factor, whiten) values = mean_field.sample() transformed = whiten * values assert np.allclose(factor(values), trans_factor(transformed)) njac = trans_factor._numerical_func_jacobian(transformed)[1] jac = trans_factor.jacobian(transformed) ngrad = param_shapes.flatten(njac) grad = param_shapes.flatten(jac) assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3) # test VariableTransform with CholeskyTransform var_cov = { v: (X.reshape((int(X.size**0.5), ) * 2)) for v, X in param_shapes.unflatten(linalg.inv(A)).items() } cho_factors = { v: transform.CholeskyTransform(linalg.cho_factor(linalg.inv(cov))) for v, cov in var_cov.items() } whiten = transform.VariableTransform(cho_factors) trans_factor = transform.TransformedNode(factor, whiten) values = mean_field.sample() transformed = whiten * values assert np.allclose(factor(values), trans_factor(transformed)) njac = trans_factor._numerical_func_jacobian(transformed)[1] jac = trans_factor.jacobian(transformed) ngrad = param_shapes.flatten(njac) grad = param_shapes.flatten(jac) assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3) res = optimize.minimize(trans_factor.flatten(param_shapes).func_jacobian, param_shapes.flatten(transformed), method='BFGS', jac=True) assert res.hess_inv.diagonal() == pytest.approx(1., rel=1e-1) # test VariableTransform with CholeskyTransform diag_factors = { v: transform.DiagonalTransform(cov.diagonal()**0.5) for v, cov in var_cov.items() } whiten = transform.VariableTransform(diag_factors) trans_factor = transform.TransformedNode(factor, whiten) values = mean_field.sample() transformed = whiten * values assert np.allclose(factor(values), trans_factor(transformed)) njac = trans_factor._numerical_func_jacobian(transformed)[1] jac = trans_factor.jacobian(transformed) ngrad = param_shapes.flatten(njac) grad = param_shapes.flatten(jac) assert np.allclose(grad, ngrad, atol=1e-3, rtol=1e-3) res = optimize.minimize(trans_factor.flatten(param_shapes).func_jacobian, param_shapes.flatten(transformed), method='BFGS', jac=True) assert res.hess_inv.diagonal() == pytest.approx(1., rel=1e-1)
def test(): mean_field = g.MeanField({ }) mean_field.instance_for_arguments({})