예제 #1
0
    def __init__(self, means, covariances, weights, n_components, D1, D2):
        """ Initialiser class method.

            means - means of Gaussians in mixture
            covariances - covariance matrices of Gaussians in mixture
            weights - mixture proportions of the GMM
            n_components - no. of components in the GMM
            D1 - dimension of x1 (see notes)
            D2 - dimension of x2 (see notes)

        """

        self.n_components = n_components
        self.weights = weights

        # Initialise lists
        self.mu_11_list = []
        self.mu_22_list = []
        self.Sigma_11_list = []
        self.Sigma_12_list = []
        self.Sigma_21_list = []
        self.Sigma_22_list = []

        # Isolate components of Gaussian Mixture model
        for c in range(n_components):

            # Split mean into individual components
            mu_11, mu_22 = means[c][0:D1], means[c][D1:D1 + D2]

            # Split covariance matrix into individual components
            (Sigma_11, Sigma_12, Sigma_21,
             Sigma_22) = (covariances[c][0:D1,
                                         0:D1], covariances[c][0:D1,
                                                               D1:D1 + D2],
                          covariances[c][D1:D1 + D2,
                                         0:D1], covariances[c][D1:D1 + D2,
                                                               D1:D1 + D2])

            self.mu_11_list.append(mu_11)
            self.mu_22_list.append(mu_22)
            self.Sigma_11_list.append(Sigma_11)
            self.Sigma_12_list.append(Sigma_12)
            self.Sigma_21_list.append(Sigma_21)
            self.Sigma_22_list.append(Sigma_22)

        # Create lists of marginal probability distributions
        self.p_11 = []
        self.p_22 = []
        for c in range(n_components):
            self.p_11.append(
                Normal_PDF(mean=self.mu_11_list[c], cov=self.Sigma_11_list[c]))
            self.p_22.append(
                Normal_PDF(mean=self.mu_22_list[c], cov=self.Sigma_22_list[c]))
예제 #2
0
    def pdf_x1_cond_x2(self, x1, x2):
        """ Compute the probability density of x1, given x2.

        """

        # Find Gaussian components of p(x1 | x2)
        p_1_2 = []
        for c in range(self.n_components):

            mu = self.mu_1_2(x2, self.mu_11_list[c], self.Sigma_12_list[c],
                             self.Sigma_22_list[c], self.mu_22_list[c])

            Sigma = self.Sigma_1_2(x2, self.Sigma_11_list[c],
                                   self.Sigma_12_list[c],
                                   self.Sigma_21_list[c],
                                   self.Sigma_22_list[c])

            p_1_2.append(Normal_PDF(mean=mu, cov=Sigma))

        # Find weights of p(x1 | x2)
        weights_1_2 = self.w_1_2(x2)

        # Calculate pdf, p(x1 | x2)
        pdf = np.zeros(1)
        for c in range(self.n_components):
            pdf += weights_1_2[c] * p_1_2[c].pdf(x1)

        return pdf
예제 #3
0
    def __init__(self):
        """ Define prior pdfs over stiffness, damping, and
            noise std.
        """

        self.p_k = Normal_PDF(mean=3, cov=0.5)
        self.p_c = Gamma_PDF(a=1, scale=0.1)
        self.p_sigma = Gamma_PDF(a=1, scale=0.1)
예제 #4
0
    def __init__(self, D, means, vars, weights, n_components):
        """ Initiate with mean and standard deviation.

            Note that, with this class, it is assumed that the mean and
            variance is a function of 'x_cond' in the following.

        """

        # Assign variables to object instance
        self.means = means
        self.vars = vars
        self.weights = weights
        self.n_components = n_components
        self.D = D

        # Define each components as a seperate normal pdf
        self.pdfs = []
        for c in range(n_components):
            self.pdfs.append(Normal_PDF(mean=self.means[c], cov=self.vars[c]))
예제 #5
0
def test_sampler():
    """ Test that we can sample from a multi-modal distribution

    """

    # Define target distribution
    p = GMM_PDF(D=1,
                means=[np.array(-3), np.array(3)],
                vars=[np.array(1), np.array(1)],
                weights=[0.5, 0.5],
                n_components=2)

    # Define initial proposal
    q0 = Normal_PDF(mean=0, cov=3)

    # Define proposal as being Gaussian, centered on x_cond, with variance
    # equal to 0.1
    q = Q_Proposal()
    q.var = 0.1
    q.std = np.sqrt(q.var)
    q.logpdf = lambda x, x_cond: -1 / (2 * q.var) * (x - x_cond)**2
    q.rvs = lambda x_cond: x_cond + q.std * np.random.randn()

    # Define L-kernel as being Gaussian, centered on x_cond, with variance
    # equal to 0.1
    L = L_Kernel()
    L.var = 0.1
    L.std = np.sqrt(L.var)
    L.logpdf = lambda x, x_cond: -1 / (2 * L.var) * (x - x_cond)**2

    # No. samples and iterations
    N = 5000
    K = 10

    # Run samplers
    smc_opt_gmm = SMC_OPT_GMM(N=N, D=1, p=p, q0=q0, K=K, q=q, L_components=2)
    smc_opt_gmm.generate_samples()

    assert np.allclose(smc_opt_gmm.mean_estimate_EES[-1], 0, atol=0.5)
    assert np.allclose(smc_opt_gmm.var_estimate_EES[-1], 10, atol=0.5)
예제 #6
0
 def __init__(self):
     self.pdf = Normal_PDF(mean=np.zeros(2), cov=np.eye(2))
예제 #7
0
 def __init__(self):
     self.pdf = Normal_PDF(mean=np.array([3.0, 2.0]), cov=np.eye(2))
예제 #8
0
 def __init__(self):
     self.mean = np.array([3.0, 2.0])
     self.cov = np.eye(2)
     self.pdf = Normal_PDF(self.mean, self.cov)
예제 #9
0
 def __init__(self):
     self.pdf = Normal_PDF(mean=0, cov=3)
예제 #10
0
import numpy as np
import sys

sys.path.append('..')  # noqa
from SMC_BASE import *
from scipy.stats import multivariate_normal as Normal_PDF
"""
Testing for SMC_BASE

P.L.Green
"""

# Define target distribution
p = Normal_PDF(mean=np.array([3.0, 2.0]), cov=np.eye(2))

# Define initial proposal
q0 = Normal_PDF(mean=np.zeros(2), cov=np.eye(2))

# Define proposal as being Gaussian, centered on x_cond, with identity
# covariance matrix
q = Q_Proposal()
q.logpdf = lambda x, x_cond: -0.5 * (x - x_cond).T @ (x - x_cond)
q.rvs = lambda x_cond: x_cond + np.random.randn(2)

# Define L-kernel as being Gaussian, centered on x_cond, with identity
# covariance matrix
L = L_Kernel()
L.logpdf = lambda x, x_cond: -0.5 * (x - x_cond).T @ (x - x_cond)
L.rvs = lambda x_cond: x_cond + np.random.randn(2)

# No. samples and iterations
p = Target()
p.pdf = GMM_PDF(D=1,
                means=[np.array(-3), np.array(3)],
                vars=[np.array(1), np.array(1)],
                weights=[0.5, 0.5],
                n_components=2)


def p_pdf(x):
    return p.pdf.logpdf(x)


p.logpdf = p_pdf

# Define initial proposal
q0 = Normal_PDF(mean=0, cov=3)

# Define proposal as being Gaussian, centered on x_cond, with variance
# equal to 0.1
q = Q_Proposal()
q.var = 0.1
q.std = np.sqrt(q.var)
q.logpdf = lambda x, x_cond: -1 / (2 * q.var) * (x - x_cond)**2
q.rvs = lambda x_cond: x_cond + q.std * np.random.randn()

# Define L-kernel as being Gaussian, centered on x_cond, with variance
# equal to 0.1
L = L_Kernel()
L.var = 0.1
L.std = np.sqrt(L.var)
L.logpdf = lambda x, x_cond: -1 / (2 * L.var) * (x - x_cond)**2
 def __init__(self):
     self.pdf = Normal_PDF(mean=np.repeat(2, D), cov=0.1*np.eye(D))