Esempio n. 1
0
        def check(D, N, K, mu=None, Lambda=None, rho=None):

            if mu is None:
                mu = np.zeros(D)
            if Lambda is None:
                Lambda = np.identity(D)
            if rho is None:
                rho = np.ones(D)

            V = np.identity(D) + np.ones((D, D))

            # Construct model
            B = GaussianARD(3, 5, shape=(D, K), plates=(1, D))
            S = GaussianARD(2, 4, shape=(K, ), plates=(N, 1))
            A = SumMultiply('dk,k->d', B, S)
            X = GaussianMarkovChain(mu,
                                    Lambda,
                                    A,
                                    rho,
                                    n=N + 1,
                                    initialize=False)
            Y = Gaussian(X, V, initialize=False)

            # Posterior estimation
            Y.observe(np.random.randn(N + 1, D))
            X.update()
            B.update()
            S.update()
            try:
                mu.update()
            except:
                pass
            try:
                Lambda.update()
            except:
                pass
            try:
                rho.update()
            except:
                pass

            # Construct rotator
            rotB = RotateGaussianARD(B, axis=-2)
            rotX = RotateVaryingMarkovChain(X, B, S, rotB)
            rotX.setup()

            # Check gradient with respect to R
            R = np.random.randn(D, D)

            def cost(r):
                (b, dr) = rotX.bound(np.reshape(r, np.shape(R)))
                return (b, np.ravel(dr))

            err = optimize.check_gradient(cost, np.ravel(R), verbose=False)
            self.assertAllClose(err, 0, atol=1e-6, msg="Gradient incorrect")

            return
Esempio n. 2
0
        def check(D, N, K, mu=None, Lambda=None, rho=None):

            if mu is None:
                mu = np.zeros(D)
            if Lambda is None:
                Lambda = np.identity(D)
            if rho is None:
                rho = np.ones(D)

            V = np.identity(D) + np.ones((D, D))

            # Construct model
            B = GaussianARD(3, 5, shape=(D, K), plates=(1, D))
            S = GaussianARD(2, 4, shape=(K, ), plates=(N, 1))
            A = SumMultiply('dk,k->d', B, S)
            X = GaussianMarkovChain(mu,
                                    Lambda,
                                    A,
                                    rho,
                                    n=N + 1,
                                    initialize=False)
            Y = Gaussian(X, V, initialize=False)

            # Posterior estimation
            Y.observe(np.random.randn(N + 1, D))
            X.update()
            B.update()
            S.update()
            try:
                mu.update()
            except:
                pass
            try:
                Lambda.update()
            except:
                pass
            try:
                rho.update()
            except:
                pass

            # Construct rotator
            rotB = RotateGaussianARD(B, axis=-2)
            rotX = RotateVaryingMarkovChain(X, B, S, rotB)

            # Rotation
            true_cost0 = X.lower_bound_contribution()
            rotX.setup()
            I = np.identity(D)
            R = np.random.randn(D, D)
            rot_cost0 = rotX.get_bound_terms(I)
            rot_cost1 = rotX.get_bound_terms(R)
            self.assertAllClose(sum(rot_cost0.values()),
                                rotX.bound(I)[0],
                                msg="Bound terms and total bound differ")
            self.assertAllClose(sum(rot_cost1.values()),
                                rotX.bound(R)[0],
                                msg="Bound terms and total bound differ")
            rotX.rotate(R)
            true_cost1 = X.lower_bound_contribution()
            self.assertAllClose(true_cost1 - true_cost0,
                                rot_cost1[X] - rot_cost0[X],
                                msg="Incorrect rotation cost for X")

            return