Пример #1
0
def rotate(M):
    """Rotates a RTBM model by a Pi/2 angle in counterclockwise direction.
    
    """
    nh = M._Nh
    nv = M._Nv
    R = np.array([[0, -1], [1, 0]])
    Rt = R.T
    Rpinv = sp.linalg.inv(Rt.dot(R)).dot(Rt)
    t = sp.linalg.inv(R.dot(sp.linalg.inv(M.t).dot(R.T)))
    w = Rpinv.T.dot(M.w).reshape(1, nh * nv)
    bv = Rpinv.T.dot(M.bv)

    rM = rtbm.RTBM(nv, nh)
    params = np.concatenate(
        (bv, M.bh, w, t[np.triu_indices(nv)], M.q[np.triu_indices(nh)]),
        axis=None)
    rM.set_parameters(params)
    return rM
def conditional(model, d):
    """Generates the conditional RTBM. 
    
    Args:
        model (theta.rtbm.RTBM): RTBM modelling the original PDF
        d (numpy.array): column vector containing the values for the conditional

    Returns:
        theta.rtbm.RTBM: RTBM modelling the conditional probability P(y|d)
    """

    nh = model._Nh
    nv = model._Nv 
    
    assert (nv > 1), "cannot do the conditional probability of a 1d distribution"

    assert (d.size < nv), "d larger than Nv"

    k = int(nv-d.size)
    
    cmodel = rtbm.RTBM(k, nh)

    # Matrix A
    t = model.t[:k,:k]
    t = t[np.triu_indices(k)]
    q = model.q[np.triu_indices(nh)]
    w = model.w[:k]

    # Biases
    bh = model.bh + np.dot(model.w[k:].T, d)
    bv = model.bv[:k] + np.dot(model.t[:k,k:], d)

    cparams = np.concatenate((bv, bh, w, t, q), axis = None)
    cmodel.set_parameters(cparams)

    return cmodel
Пример #3
0
sys.path.append('../../main')

import conditional as con
import utils as ut

import numpy as np
import matplotlib.pyplot as plt
from theta import rtbm, minimizer
from theta.costfunctions import logarithmic

# Loading data
n = 5000
data = np.load('gaussian-mix-sample.npy')

# RTBM model
M = rtbm.RTBM(2, 4, random_bound=1, init_max_param_bound=30)

# Training
minim = minimizer.CMA(True)
#solution = minim.train(logarithmic(), M, data.T, tolfun=1e-3)     # decomment if you want training to take place,
# otherwise use the pre-trained model below

# Pre-trained model
M.set_parameters(
    np.array([
        1.02735619e-05, -3.63860337e-04, 2.15571853e+00, -2.96561961e-01,
        1.47561952e+01, 9.63622840e-01, -1.03625774e+01, -8.31445704e+00,
        -2.55729017e-01, -7.01183003e+00, 4.09970744e+00, -3.38018523e+00,
        1.70457837e+00, 6.56773389e-01, 1.49908760e+01, 1.15771402e+00,
        1.50000000e+01, 1.50000000e+01, 4.59256632e+00, 3.45065143e-01,
        2.12470448e+00, 1.50000000e+01, 4.29228972e-02, -1.66464616e+00,
Пример #4
0
    for i in range(n):
        if u[i] < 0.6:
            v[i] = np.random.normal(-0.7, 0.3)
        elif u[i] < 0.7:
            v[i] = np.random.normal(0.2, .1)
        else:
            v[i] = np.random.normal(.5, .5)
    return v


n = 10000
data = gaussian_mixture(n)
data = data.reshape(1, data.size)

# RTBM model
model = rtbm.RTBM(1, 2, random_bound=1)

# Training
minim = CMA_es(KullbackLeibler(model), model, data, empirical_cost=True)
#minim = AMSGrad(KullbackLeibler(model), model, data, empirical_cost=True)

solution = minim.train(popsize=30, m2=20)  # this is for CMA_es
#solution = minim.train()                  # this is for AMSGrad

# Plotting

figure = plt.axes()
figure.hist(data.flatten(), bins=50, density=True, color='grey')

x = np.linspace(figure.get_xlim()[0],
                figure.get_xlim()[1], 100).reshape(1, 100)
Пример #5
0
data = student.sampling()


# Building grid for evaluation
n = 50
xx = np.linspace(-5,5, n)
yy = np.linspace(-5,5, n)
X,Y = np.meshgrid(xx,yy)
grid = np.vstack([X,Y]).reshape(2,-1)

# Analytic pdf
pdf = np.asarray([student.pdf(grid.T[i]) for i in range(n**2)]).flatten()


# RTBM model
M = rtbm.RTBM(2,2, random_bound=1, init_max_param_bound=60)

# Training
minim = minimizer.CMA(True)
#solution = minim.train(logarithmic, M, data.T, tolfun=1e-3)    # decomment if you want training to take place,
                                                                # otherwise use the pre-trained model below
                                                                
# Pre-trained model
M.set_parameters(np.array([ 1.74587313e-03, -9.13477122e-04,  8.22063600e+00,  1.74029397e+01,-1.10959475e+00,  1.02118882e+00, -6.59149043e-01,  5.99301112e-01,5.57746556e-01,  1.84310133e-01,  3.04781259e-01,  2.41516805e+01,-4.40890318e-01,  4.15665779e+01]))



# Analytic conditional
x = np.array([-1, 0.1, 1.9]) # fixed x at which we calculate the conditional probability p(y|x)

cstudent1 = student.conditional(x[0].reshape(1,1))