コード例 #1
0
    def random(self, *phi, plates=None):
        """
        Draw a random sample from the distribution.
        """
        # Convert natural parameters to transition probabilities
        p0 = np.exp(phi[0] - misc.logsumexp(phi[0], axis=-1, keepdims=True))
        P = np.exp(phi[1] - misc.logsumexp(phi[1], axis=-1, keepdims=True))
        # Explicit broadcasting
        P = P * np.ones(plates)[..., None, None, None]
        # Allocate memory
        Z = np.zeros(plates + (self.N, ), dtype=np.int)
        # Draw initial state
        Z[..., 0] = random.categorical(p0, size=plates)
        # Create [0,1,2,...,len(plate_axis)] indices for each plate axis and
        # make them broadcast properly
        nplates = len(plates)
        plates_ind = [
            np.arange(plate)[(Ellipsis, ) + (nplates - i - 1) * (None, )]
            for (i, plate) in enumerate(plates)
        ]
        plates_ind = tuple(plates_ind)
        # Draw next states iteratively
        for n in range(self.N - 1):
            # Select the transition probabilities for the current state but take
            # into account the plates.  This leads to complex NumPy
            # indexing.. :)
            time_ind = min(n, np.shape(P)[-3] - 1)
            ind = plates_ind + (time_ind, Z[..., n], Ellipsis)
            p = P[ind]
            # Draw next state
            z = random.categorical(P[ind])
            Z[..., n + 1] = z

        return Z
コード例 #2
0
    def random(self, *phi, plates=None):
        """
        Draw a random sample from the distribution.
        """
        # Convert natural parameters to transition probabilities
        p0 = np.exp(phi[0] - misc.logsumexp(phi[0], axis=-1, keepdims=True))
        P = np.exp(phi[1] - misc.logsumexp(phi[1], axis=-1, keepdims=True))
        # Explicit broadcasting
        P = P * np.ones(plates)[..., None, None, None]
        # Allocate memory
        Z = np.zeros(plates + (self.N,), dtype=np.int)
        # Draw initial state
        Z[..., 0] = random.categorical(p0, size=plates)
        # Create [0,1,2,...,len(plate_axis)] indices for each plate axis and
        # make them broadcast properly
        nplates = len(plates)
        plates_ind = [np.arange(plate)[(Ellipsis,) + (nplates - i - 1) * (None,)] for (i, plate) in enumerate(plates)]
        plates_ind = tuple(plates_ind)
        # Draw next states iteratively
        for n in range(self.N - 1):
            # Select the transition probabilities for the current state but take
            # into account the plates.  This leads to complex NumPy
            # indexing.. :)
            time_ind = min(n, np.shape(P)[-3] - 1)
            ind = plates_ind + (time_ind, Z[..., n], Ellipsis)
            p = P[ind]
            # Draw next state
            z = random.categorical(P[ind])
            Z[..., n + 1] = z

        return Z
コード例 #3
0
ファイル: test_plot.py プロジェクト: pfjob09/bayespy
def _setup_bernoulli_mixture():
    """
    Setup code for the hinton tests.

    This code is from http://www.bayespy.org/examples/bmm.html
    """
    np.random.seed(1)
    p0 = [0.1, 0.9, 0.1, 0.9, 0.1, 0.9, 0.1, 0.9, 0.1, 0.9]
    p1 = [0.1, 0.1, 0.1, 0.1, 0.1, 0.9, 0.9, 0.9, 0.9, 0.9]
    p2 = [0.9, 0.9, 0.9, 0.9, 0.9, 0.1, 0.1, 0.1, 0.1, 0.1]
    p = np.array([p0, p1, p2])

    z = random.categorical([1 / 3, 1 / 3, 1 / 3], size=100)
    x = random.bernoulli(p[z])
    N = 100
    D = 10
    K = 10

    R = Dirichlet(K * [1e-5], name='R')
    Z = Categorical(R, plates=(N, 1), name='Z')

    P = Beta([0.5, 0.5], plates=(D, K), name='P')

    X = Mixture(Z, Bernoulli, P)

    Q = VB(Z, R, X, P)
    P.initialize_from_random()
    X.observe(x)
    Q.update(repeat=1000)

    return (R, P, Z)
コード例 #4
0
 def random(self, *phi, plates=None):
     """
     Draw a random sample from the distribution.
     """
     logp = phi[0]
     logp -= np.amax(logp, axis=-1, keepdims=True)
     p = np.exp(logp)
     return random.categorical(p, size=plates)
コード例 #5
0
ファイル: categorical.py プロジェクト: Sandy4321/bayespy
 def random(self):
     """
     Draw a random sample from the distribution.
     """
     logp = self.phi[0]
     logp -= np.amax(logp, axis=-1, keepdims=True)
     p = np.exp(logp)
     return random.categorical(p, size=self.plates)
コード例 #6
0
ファイル: collapsed_cg.py プロジェクト: zehsilva/bayespy
def mixture_of_gaussians():
    """Collapsed Riemannian conjugate gradient demo

    This is similar although not exactly identical to an experiment in
    (Hensman et al 2012).
    """

    np.random.seed(41)

    # Number of samples
    N = 1000
    # Number of clusters in the model (five in the data)
    K = 10

    # Overlap parameter of clusters
    R = 2

    # Construct the model
    Q = mog.gaussianmix_model(N, K, 2, covariance='diagonal')

    # Generate data from five Gaussian clusters
    mu = np.array([[0, 0],
                   [R, R],
                   [-R, R],
                   [R, -R],
                   [-R, -R]])
    Z = random.categorical(np.ones(5), size=N)
    data = np.empty((N, 2))
    for n in range(N):
        data[n,:] = mu[Z[n]] + np.random.randn(2)
    Q['Y'].observe(data)

    # Take one update step (so phi is ok)
    Q.update(repeat=1)
    Q.save()

    # Run standard VB-EM
    Q.update(repeat=1000, tol=0)
    bpplt.pyplot.plot(np.cumsum(Q.cputime), Q.L, 'k-')

    # Restore the initial state
    Q.load()

    # Run Riemannian conjugate gradient
    Q.optimize('alpha', 'X', 'Lambda', collapsed=['z'], maxiter=300, tol=0)
    bpplt.pyplot.plot(np.cumsum(Q.cputime), Q.L, 'r:')

    bpplt.pyplot.xlabel('CPU time (in seconds)')
    bpplt.pyplot.ylabel('VB lower bound')
    bpplt.pyplot.legend(['VB-EM', 'Collapsed Riemannian CG'], loc='lower right')

    ## bpplt.pyplot.figure()
    ## bpplt.pyplot.plot(data[:,0], data[:,1], 'rx')
    ## bpplt.pyplot.title('Data')

    bpplt.pyplot.show()
コード例 #7
0
ファイル: test_plot.py プロジェクト: BayesianHuman/bayespy
def _setup_bernoulli_mixture():
    """
    Setup code for the hinton tests.

    This code is from http://www.bayespy.org/examples/bmm.html
    """
    np.random.seed(1)
    p0 = [0.1, 0.9, 0.1, 0.9, 0.1, 0.9, 0.1, 0.9, 0.1, 0.9]
    p1 = [0.1, 0.1, 0.1, 0.1, 0.1, 0.9, 0.9, 0.9, 0.9, 0.9]
    p2 = [0.9, 0.9, 0.9, 0.9, 0.9, 0.1, 0.1, 0.1, 0.1, 0.1]
    p = np.array([p0, p1, p2])

    z = random.categorical([1/3, 1/3, 1/3], size=100)
    x = random.bernoulli(p[z])
    N = 100
    D = 10
    K = 10

    R = Dirichlet(K*[1e-5],
                  name='R')
    Z = Categorical(R,
                    plates=(N,1),
                    name='Z')

    P = Beta([0.5, 0.5],
             plates=(D,K),
             name='P')

    X = Mixture(Z, Bernoulli, P)

    Q = VB(Z, R, X, P)
    P.initialize_from_random()
    X.observe(x)
    Q.update(repeat=1000)

    return (R,P,Z)
コード例 #8
0
ファイル: bmm-2.py プロジェクト: sumitsourabh/bayes-network
import numpy
numpy.random.seed(1)
p0 = [0.1, 0.9, 0.1, 0.9, 0.1, 0.9, 0.1, 0.9, 0.1, 0.9]
p1 = [0.1, 0.1, 0.1, 0.1, 0.1, 0.9, 0.9, 0.9, 0.9, 0.9]
p2 = [0.9, 0.9, 0.9, 0.9, 0.9, 0.1, 0.1, 0.1, 0.1, 0.1]
import numpy as np
p = np.array([p0, p1, p2])
from bayespy.utils import random
z = random.categorical([1 / 3, 1 / 3, 1 / 3], size=100)
x = random.bernoulli(p[z])
N = 100
D = 10
K = 10
from bayespy.nodes import Categorical, Dirichlet
R = Dirichlet(K * [1e-5], name='R')
Z = Categorical(R, plates=(N, 1), name='Z')
from bayespy.nodes import Beta
P = Beta([0.5, 0.5], plates=(D, K), name='P')
from bayespy.nodes import Mixture, Bernoulli
X = Mixture(Z, Bernoulli, P)
from bayespy.inference import VB
Q = VB(Z, R, X, P)
P.initialize_from_random()
X.observe(x)
Q.update(repeat=1000)
import bayespy.plot as bpplt
bpplt.hinton(P)
bpplt.pyplot.show()
コード例 #9
0
ファイル: bayespyex.py プロジェクト: rdorado79/nb-al
bpplt.pyplot.subplot(2, 1, 1)
bpplt.pdf(mu, np.linspace(-10, 20, num=100), color='k', name=r'\mu')

bpplt.pyplot.subplot(2, 1, 2)
bpplt.pdf(tau, np.linspace(1e-6, 0.08, num=100), color='k', name=r'\tau')

bpplt.pyplot.tight_layout()
bpplt.pyplot.show()
'''

p0 = [0.1, 0.1, 0.1, 0.9, 0.9, 0.9, 0.1, 0.1, 0.1, 0.1]
p1 = [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.9, 0.9, 0.9, 0.9]
p2 = [0.9, 0.9, 0.9, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1]

p = np.array([p0, p1, p2])
z = random.categorical([1/3, 1/3, 1/3], size=100)
x = random.bernoulli(p[z])

N = 100
D = 10
K = 3

R = Dirichlet(K*[1e-5],name='R')
Z = Categorical(R,plates=(N,1),name='Z')
P = Beta([0.5, 0.5],plates=(D,K),name='P')
X = Mixture(Z, Bernoulli, P)

Q = VB(Z, R, X, P)
P.initialize_from_random()
X.observe(x)
コード例 #10
0
ファイル: categorical.py プロジェクト: POkoroafor/bayespy
 def random(self):
     logp = self.phi[0]
     logp -= np.amax(logp, axis=-1, keepdims=True)
     p = np.exp(logp)
     return random.categorical(p, size=self.plates)
コード例 #11
0
def run(N=100000, N_batch=50, seed=42, maxiter=100, plot=True):
    """
    Run deterministic annealing demo for 1-D Gaussian mixture.
    """

    if seed is not None:
        np.random.seed(seed)

    # Number of clusters in the model
    K = 20

    # Dimensionality of the data
    D = 5

    # Generate data
    K_true = 10
    spread = 5
    means = spread * np.random.randn(K_true, D)
    z = random.categorical(np.ones(K_true), size=N)
    data = np.empty((N, D))
    for n in range(N):
        data[n] = means[z[n]] + np.random.randn(D)

    #
    # Standard VB-EM algorithm
    #

    # Full model
    mu = Gaussian(np.zeros(D), np.identity(D), plates=(K, ), name='means')
    alpha = Dirichlet(np.ones(K), name='class probabilities')
    Z = Categorical(alpha, plates=(N, ), name='classes')
    Y = Mixture(Z, Gaussian, mu, np.identity(D), name='observations')

    # Break symmetry with random initialization of the means
    mu.initialize_from_random()

    # Put the data in
    Y.observe(data)

    # Run inference
    Q = VB(Y, Z, mu, alpha)
    Q.save(mu)
    Q.update(repeat=maxiter)
    if plot:
        bpplt.pyplot.plot(np.cumsum(Q.cputime), Q.L, 'k-')
    max_cputime = np.sum(Q.cputime[~np.isnan(Q.cputime)])

    #
    # Stochastic variational inference
    #

    # Construct smaller model (size of the mini-batch)
    mu = Gaussian(np.zeros(D), np.identity(D), plates=(K, ), name='means')
    alpha = Dirichlet(np.ones(K), name='class probabilities')
    Z = Categorical(alpha,
                    plates=(N_batch, ),
                    plates_multiplier=(N / N_batch, ),
                    name='classes')
    Y = Mixture(Z, Gaussian, mu, np.identity(D), name='observations')

    # Break symmetry with random initialization of the means
    mu.initialize_from_random()

    # Inference engine
    Q = VB(Y, Z, mu, alpha, autosave_filename=Q.autosave_filename)
    Q.load(mu)

    # Because using mini-batches, messages need to be multiplied appropriately
    print("Stochastic variational inference...")
    Q.ignore_bound_checks = True

    maxiter *= int(N / N_batch)
    delay = 1
    forgetting_rate = 0.7
    for n in range(maxiter):

        # Observe a mini-batch
        subset = np.random.choice(N, N_batch)
        Y.observe(data[subset, :])

        # Learn intermediate variables
        Q.update(Z)

        # Set step length
        step = (n + delay)**(-forgetting_rate)

        # Stochastic gradient for the global variables
        Q.gradient_step(mu, alpha, scale=step)

        if np.sum(Q.cputime[:n]) > max_cputime:
            break

    if plot:
        bpplt.pyplot.plot(np.cumsum(Q.cputime), Q.L, 'r:')

        bpplt.pyplot.xlabel('CPU time (in seconds)')
        bpplt.pyplot.ylabel('VB lower bound')
        bpplt.pyplot.legend(['VB-EM', 'Stochastic inference'],
                            loc='lower right')
        bpplt.pyplot.title('VB for Gaussian mixture model')

    return
コード例 #12
0
def run(N=100000, N_batch=50, seed=42, maxiter=100, plot=True):
    """
    Run deterministic annealing demo for 1-D Gaussian mixture.
    """

    if seed is not None:
        np.random.seed(seed)

    # Number of clusters in the model
    K = 20

    # Dimensionality of the data
    D = 5

    # Generate data
    K_true = 10
    spread = 5
    means = spread * np.random.randn(K_true, D)
    z = random.categorical(np.ones(K_true), size=N)
    data = np.empty((N,D))
    for n in range(N):
        data[n] = means[z[n]] + np.random.randn(D)

    #
    # Standard VB-EM algorithm
    #

    # Full model
    mu = Gaussian(np.zeros(D), np.identity(D),
                  plates=(K,),
                  name='means')
    alpha = Dirichlet(np.ones(K),
                      name='class probabilities')
    Z = Categorical(alpha,
                    plates=(N,),
                    name='classes')
    Y = Mixture(Z, Gaussian, mu, np.identity(D),
                name='observations')

    # Break symmetry with random initialization of the means
    mu.initialize_from_random()

    # Put the data in
    Y.observe(data)

    # Run inference
    Q = VB(Y, Z, mu, alpha)
    Q.save(mu)
    Q.update(repeat=maxiter)
    if plot:
        bpplt.pyplot.plot(np.cumsum(Q.cputime), Q.L, 'k-')
    max_cputime = np.sum(Q.cputime[~np.isnan(Q.cputime)])


    #
    # Stochastic variational inference
    #

    # Construct smaller model (size of the mini-batch)
    mu = Gaussian(np.zeros(D), np.identity(D),
                  plates=(K,),
                  name='means')
    alpha = Dirichlet(np.ones(K),
                      name='class probabilities')
    Z = Categorical(alpha,
                    plates=(N_batch,),
                    plates_multiplier=(N/N_batch,),
                    name='classes')
    Y = Mixture(Z, Gaussian, mu, np.identity(D),
                name='observations')

    # Break symmetry with random initialization of the means
    mu.initialize_from_random()

    # Inference engine
    Q = VB(Y, Z, mu, alpha, autosave_filename=Q.autosave_filename)
    Q.load(mu)

    # Because using mini-batches, messages need to be multiplied appropriately
    print("Stochastic variational inference...")
    Q.ignore_bound_checks = True

    maxiter *= int(N/N_batch)
    delay = 1
    forgetting_rate = 0.7
    for n in range(maxiter):

        # Observe a mini-batch
        subset = np.random.choice(N, N_batch)
        Y.observe(data[subset,:])

        # Learn intermediate variables
        Q.update(Z)

        # Set step length
        step = (n + delay) ** (-forgetting_rate)

        # Stochastic gradient for the global variables
        Q.gradient_step(mu, alpha, scale=step)

        if np.sum(Q.cputime[:n]) > max_cputime:
            break
    
    if plot:
        bpplt.pyplot.plot(np.cumsum(Q.cputime), Q.L, 'r:')

        bpplt.pyplot.xlabel('CPU time (in seconds)')
        bpplt.pyplot.ylabel('VB lower bound')
        bpplt.pyplot.legend(['VB-EM', 'Stochastic inference'], loc='lower right')
        bpplt.pyplot.title('VB for Gaussian mixture model')

    return
コード例 #13
0
ファイル: categorical.py プロジェクト: POkoroafor/bayespy
 def random(self):
     logp = self.phi[0]
     logp -= np.amax(logp, axis=-1, keepdims=True)
     p = np.exp(logp)
     return random.categorical(p, size=self.plates)