Пример #1
0
 def _eval_determinant(self):
     condition = Eq(self.shape[0], 1) & Eq(self.shape[1], 1)
     if condition == True:
         return S.One
     elif condition == False:
         return S.Zero
     else:
         from sympy import Determinant
         return Determinant(self)
Пример #2
0
def test_matrix_derivative_with_inverse():

    # Cookbook example 61:
    expr = a.T*Inverse(X)*b
    assert expr.diff(X) == -Inverse(X).T*a*b.T*Inverse(X).T

    # Cookbook example 62:
    expr = Determinant(Inverse(X))
    # Not implemented yet:
    # assert expr.diff(X) == -Determinant(X.inv())*(X.inv()).T

    # Cookbook example 63:
    expr = Trace(A*Inverse(X)*B)
    assert expr.diff(X) == -(X**(-1)*B*A*X**(-1)).T

    # Cookbook example 64:
    expr = Trace(Inverse(X + A))
    assert expr.diff(X) == -(Inverse(X + A)).T**2
Пример #3
0
 def _eval_determinant(self):
     condition = self._is_1x1()
     if condition == True:
         return S.One
     elif condition == False:
         return S.Zero
     else:
         from sympy import Determinant
         return Determinant(self)
Пример #4
0
def test_matrix_derivative_with_inverse():

    # Cookbook example 61:
    expr = a.T * Inverse(X) * b
    assert expr.diff(X) == -Inverse(X).T * a * b.T * Inverse(X).T

    # Cookbook example 62:
    expr = Determinant(Inverse(X))
    # Not implemented yet:
    # assert expr.diff(X) == -Determinant(X.inv())*(X.inv()).T

    # Cookbook example 63:
    expr = Trace(A * Inverse(X) * B)
    assert expr.diff(X) == -(X**(-1) * B * A * X**(-1)).T

    # Cookbook example 64:
    expr = Trace(Inverse(X + A))
    assert expr.diff(X) == -(Inverse(X + A)).T**2
Пример #5
0
    def Elogp(self, p, to_full_expr=False):
        """
            Calculates the expectation of log p w.r.t to this distribution where p is also
            a distribution
        
            Args:
                p - An MVG object
                to_full_expr - Set to True to indicate that you want to use the full expression
                               for the mean and covariance of this MVG
        
            Returns:
                res - The calculation of this expression
        """
        a, A = self.mean, self.covar
        b, B = p.mean, p.covar

        if to_full_expr:
            a = utils.expand_to_fullexpr(a)
            A = utils.expand_to_fullexpr(A)
            b = utils.expand_to_fullexpr(b)
            B = utils.expand_to_fullexpr(B)

        return -Rational(1, 2) * (ln(Determinant(2 * pi * B)) +
                                  Trace(B.I * A) + (a - b).T * B.I * (a - b))
Пример #6
0
from .symbols import d, Kron, SymmetricMatrixSymbol
from .simplifications import simplify_matdiff

MATRIX_DIFF_RULES = {
    # e =expression, s = a list of symbols respsect to which
    # we want to differentiate
    Symbol: lambda e, s: d(e) if (e in s) else 0,
    MatrixSymbol: lambda e, s: d(e) if (e in s) else ZeroMatrix(*e.shape),
    SymmetricMatrixSymbol: lambda e, s: d(e) if (e in s) else ZeroMatrix(*e.shape),
    Add: lambda e, s: Add(*[_matDiff_apply(arg, s) for arg in e.args]),
    Mul: lambda e, s: _matDiff_apply(e.args[0], s) if len(e.args)==1 else Mul(_matDiff_apply(e.args[0],s),Mul(*e.args[1:])) + Mul(e.args[0], _matDiff_apply(Mul(*e.args[1:]),s)),
    MatAdd: lambda e, s: MatAdd(*[_matDiff_apply(arg, s) for arg in e.args]),
    MatMul: lambda e, s: _matDiff_apply(e.args[0], s) if len(e.args)==1 else MatMul(_matDiff_apply(e.args[0],s),MatMul(*e.args[1:])) + MatMul(e.args[0], _matDiff_apply(MatMul(*e.args[1:]),s)),
    Kron: lambda e, s: _matDiff_apply(e.args[0],s) if len(e.args)==1 else Kron(_matDiff_apply(e.args[0],s),Kron(*e.args[1:]))
                  + Kron(e.args[0],_matDiff_apply(Kron(*e.args[1:]),s)),
    Determinant: lambda e, s: MatMul(Determinant(e.args[0]), Trace(e.args[0].I*_matDiff_apply(e.args[0], s))), 
    # inverse always has 1 arg, so we index
    Inverse: lambda e, s: -Inverse(e.args[0]) * _matDiff_apply(e.args[0], s) * Inverse(e.args[0]),
    # trace always has 1 arg
    Trace: lambda e, s: Trace(_matDiff_apply(e.args[0], s)),
    # transpose also always has 1 arg, index
    Transpose: lambda e, s: Transpose(_matDiff_apply(e.args[0], s))
    }


def _matDiff_apply(expr, syms):
    if expr.__class__ in list(MATRIX_DIFF_RULES.keys()):
        return MATRIX_DIFF_RULES[expr.__class__](expr, syms)
    elif expr.is_constant():
        return 0
    else:
Пример #7
0
def test_det_trace_positive():
    X = MatrixSymbol('X', 4, 4)
    assert ask(Q.positive(Trace(X)), Q.positive_definite(X))
    assert ask(Q.positive(Determinant(X)), Q.positive_definite(X))
Пример #8
0
def test_Normal():
    m = Normal('A', [1, 2], [[1, 0], [0, 1]])
    A = MultivariateNormal('A', [1, 2], [[1, 0], [0, 1]])
    assert m == A
    assert density(m)(1, 2) == 1 / (2 * pi)
    assert m.pspace.distribution.set == ProductSet(S.Reals, S.Reals)
    raises(ValueError, lambda: m[2])
    n = Normal('B', [1, 2, 3], [[1, 0, 0], [0, 1, 0], [0, 0, 1]])
    p = Normal('C', Matrix([1, 2]), Matrix([[1, 0], [0, 1]]))
    assert density(m)(x, y) == density(p)(x, y)
    assert marginal_distribution(n, 0, 1)(1, 2) == 1 / (2 * pi)
    raises(ValueError, lambda: marginal_distribution(m))
    assert integrate(density(m)(x, y), (x, -oo, oo), (y, -oo, oo)).evalf() == 1
    N = Normal('N', [1, 2], [[x, 0], [0, y]])
    assert density(N)(0, 0) == exp(-((4 * x + y) /
                                     (2 * x * y))) / (2 * pi * sqrt(x * y))

    raises(ValueError, lambda: Normal('M', [1, 2], [[1, 1], [1, -1]]))
    # symbolic
    n = symbols('n', natural=True)
    mu = MatrixSymbol('mu', n, 1)
    sigma = MatrixSymbol('sigma', n, n)
    X = Normal('X', mu, sigma)
    assert density(X) == MultivariateNormalDistribution(mu, sigma)
    raises(NotImplementedError, lambda: median(m))
    # Below tests should work after issue #17267 is resolved
    # assert E(X) == mu
    # assert variance(X) == sigma

    # test symbolic multivariate normal densities
    n = 3

    Sg = MatrixSymbol('Sg', n, n)
    mu = MatrixSymbol('mu', n, 1)
    obs = MatrixSymbol('obs', n, 1)

    X = MultivariateNormal('X', mu, Sg)
    density_X = density(X)

    eval_a = density_X(obs).subs({
        Sg: eye(3),
        mu: Matrix([0, 0, 0]),
        obs: Matrix([0, 0, 0])
    }).doit()
    eval_b = density_X(0, 0, 0).subs({
        Sg: eye(3),
        mu: Matrix([0, 0, 0])
    }).doit()

    assert eval_a == sqrt(2) / (4 * pi**Rational(3 / 2))
    assert eval_b == sqrt(2) / (4 * pi**Rational(3 / 2))

    n = symbols('n', natural=True)

    Sg = MatrixSymbol('Sg', n, n)
    mu = MatrixSymbol('mu', n, 1)
    obs = MatrixSymbol('obs', n, 1)

    X = MultivariateNormal('X', mu, Sg)
    density_X_at_obs = density(X)(obs)

    expected_density = MatrixElement(
        exp((S(1)/2) * (mu.T - obs.T) * Sg**(-1) * (-mu + obs)) / \
        sqrt((2*pi)**n * Determinant(Sg)), 0, 0)

    assert density_X_at_obs == expected_density
Пример #9
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from sympy import (
    Identity,
    Matrix,
    MatrixSymbol,
    Determinant,
)

E = Identity(3)
print(E)
print(E.as_mutable())
print(E.as_explicit())

X = MatrixSymbol('X', 3, 3)
print(X)
print(X.T)
MX = Matrix(X)
print(MX)
print(Matrix(X.I))
print(Matrix(X.T))
print(Matrix(X.T * X))

print(Determinant(X))
# assert Determinant(X) == Determinant(X.T)
DX = Determinant(MX)
print(DX)