Ejemplo n.º 1
0
    def __init__(self):
        dim = 3

        self.dim = dim

        self.p = DiagonalMND( nvis = dim,
                init_beta = 1.,
                init_mu = 0.,
                min_beta = 1e-6,
                max_beta = 1e6)

        self.q = DiagonalMND( nvis = dim,
                init_beta = 1.,
                init_mu = 0.,
                min_beta = 1e-6,
                max_beta = 1e6)
Ejemplo n.º 2
0
def test_log_partition_function():
    """ tests that the log partition function is right in the simple 1D case"""

    sigma = 2.3
    model = DiagonalMND(nvis=1, init_beta=1 / np.square(sigma), min_beta=1e-6, max_beta=1e6, init_mu=17.0)

    log_Z = model.log_partition_function()

    log_Z = function([], log_Z)()

    ground = np.log(sigma * np.sqrt(2.0 * np.pi))

    print ground
    print log_Z

    assert np.allclose(ground, log_Z)
Ejemplo n.º 3
0
def test_log_partition_function():
    """ tests that the log partition function is right in the simple 1D case"""

    sigma = 2.3
    model = DiagonalMND(nvis=1,init_beta=1/np.square(sigma),
            min_beta = 1e-6, max_beta = 1e6, init_mu = 17.)

    log_Z = model.log_partition_function()

    log_Z = function([],log_Z)()

    ground = np.log( sigma * np.sqrt(2.*np.pi))

    print ground
    print log_Z

    assert np.allclose(ground, log_Z)
Ejemplo n.º 4
0
idxs = np.arange(num_beta)
pos = idxs / float(num_beta - 1)
scaled_shifted = pos * (max_exp - min_exp) + min_exp
betas = 10**scaled_shifted

kls = np.zeros((trials, num_beta))
ml_kls = np.zeros((trials, ))

for trial in xrange(trials):
    #generate the data
    data_distribution = MND(sigma=np.identity(dim) / true_beta,
                            mu=np.zeros((dim, )),
                            seed=17 * (trial + 1))
    true = DiagonalMND(nvis=dim,
                       init_beta=true_beta,
                       init_mu=0.,
                       min_beta=.1,
                       max_beta=10.)
    X = sharedX(function([], data_distribution.random_design_matrix(m))())

    Xv = X.get_value()
    mu = Xv.mean(axis=0)
    print 'maximum likelihood mu: ', mu
    diff = Xv - mu
    var = np.square(diff).mean(axis=0)
    mlbeta = 1. / var
    print 'maximum likelihood beta: ', mlbeta
    ml_model = DiagonalMND(nvis=dim,
                           init_mu=mu,
                           init_beta=mlbeta,
                           min_beta=0.0,
Ejemplo n.º 5
0
    assert ml_kl >= 0.0
    ml_kls[trial] = ml_kl
    print 'maximum likelihood kl divergence:',ml_kl

    best_mse = None

    #Try each noise beta
    for idx1 in xrange(num_beta):
        beta = betas[idx1]

        print 'Running experiment for ',beta

        #Allocate a fresh model
        model = DiagonalMND(
                nvis = dim,
                init_mu = 0.,
                init_beta = .1,
                min_beta = .001,
                max_beta = 1e30)

        #Make the noise distribution
        noise_distribution = AdditiveDiagonalMND(
                                    init_beta = beta,
                                    nvis = dim
                                    )

        #generate the noise samples
        noise_func = function([], noise_distribution.random_design_matrix(X))
        Y = []
        for i in xrange(noise_per_clean):
            Y.append(sharedX(noise_func()))
Ejemplo n.º 6
0
#Generate the values of beta to consider
idxs = np.arange(num_beta)
pos = idxs / float(num_beta-1)
scaled_shifted = pos * (max_exp-min_exp) + min_exp
betas = 10 ** scaled_shifted


kls = np.zeros((trials,num_beta))
ml_kls = np.zeros((trials,))

for trial in xrange(trials):
#generate the data
    data_distribution = MND( sigma = np.identity(dim) / true_beta,
                            mu = np.zeros((dim,)), seed = 17 * (trial+1) )
    true = DiagonalMND( nvis = dim, init_beta = true_beta, init_mu = 0.,
            min_beta = .1, max_beta = 10.)
    X = sharedX(function([],data_distribution.random_design_matrix(m))())

    Xv = X.get_value()
    mu = Xv.mean(axis=0)
    print 'maximum likelihood mu: ',mu
    diff = Xv - mu
    var = np.square(diff).mean(axis=0)
    mlbeta = 1./var
    print 'maximum likelihood beta: ',mlbeta
    ml_model = DiagonalMND( nvis = dim, init_mu = mu, init_beta = mlbeta,
            min_beta = 0.0,
            max_beta = 1e6)
    ml_kl = kl_divergence( true, ml_model)
    ml_kl = function([],ml_kl)()
    assert ml_kl >= 0.0
Ejemplo n.º 7
0
    print 'maximum likelihood beta: ', mlbeta
    ml_mse[trial] = np.abs(mlbeta - true_beta).mean()
    ml_betas[trial] = mlbeta

    best_mse = None

    #Try each noise beta
    for idx1 in xrange(num_beta):
        beta = betas[idx1]

        print 'Running experiment for ', beta

        #Allocate a fresh model
        model = DiagonalMND(nvis=dim,
                            init_mu=0.,
                            init_beta=.1,
                            min_beta=.001,
                            max_beta=1e30)

        #Make the noise distribution
        noise_distribution = AdditiveDiagonalMND(init_beta=beta, nvis=dim)

        #generate the noise samples
        noise_func = function([], noise_distribution.random_design_matrix(X))
        Y = []
        for i in xrange(noise_per_clean):
            Y.append(sharedX(noise_func()))

        #Get the objective function
        nce = DNCE(noise_distribution)
        J = nce(model, X, Y)