Beispiel #1
0
from __future__ import division;
import Plot_Sampler_Tools as pst;
import pymc as pm;
import warnings;

def run_Bernoulli_Normal():
    #Cluster 1
    Uh1 = pm.Uniform('UnifH1', lower=-50, upper=50);  # @UndefinedVariable
    Nc1 = pm.Normal('NormC1', mu=Uh1, tau=1)#, observed=True, value=10);  # @UndefinedVariable
    #Cluster 2
    Uh2 = pm.Uniform('UnifH2', lower=-50, upper=50);  # @UndefinedVariable
    Nc2 = pm.Normal('NormC2', mu=Uh2, tau=1)#, observed=True, value=10);  # @UndefinedVariable
    #Points
    normalObs1 = pm.Normal('NormX1', mu=Nc1, tau=1, observed=True, value=-10);  # @UndefinedVariable
    normalObs2 = pm.Normal('NormX2', mu=Nc2, tau=1, observed=True, value=10);  # @UndefinedVariable
    return [Nc1,Nc2,Uh1,Uh2,normalObs1,normalObs2];

if __name__ == '__main__':
    warnings.filterwarnings("ignore");
    aNodes = run_Bernoulli_Normal();
    model = pst.get_Model(aNodes);    
    mcmc = pst.sample_MCMC(model, 10000);
    pst.plot_Samples(mcmc, nBins=500);
#     graph = pm.graph.graph(model);
#     graph.write_pdf("./graph4.pdf");
    
    # @UndefinedVariable
    aC = [pm.Categorical('Cat' + str(i), Dir) for i in range(nPts)]
    # @UndefinedVariable
    aL = [
        pm.Lambda('p_Norm' + str(i), lambda k=aC[i], aNcl=aNc: aNcl[int(k)])
        for i in range(nPts)
    ]
    # @UndefinedVariable
    #Points
    aN = [
        pm.Normal('NormX' + str(i),
                  mu=aL[i],
                  tau=1,
                  observed=True,
                  value=aD[i]) for i in range(nPts - 1)
    ]
    # @UndefinedVariable
    Nz = pm.Normal('NormZ', mu=aL[-1], tau=1)
    # @UndefinedVariable
    return np.concatenate([[Nz, Dir], aUh, aNc, aC, aN])


if __name__ == '__main__':
    warnings.filterwarnings("ignore")
    aNodes = run_Categorical_Normal()
    model = pst.get_Model(aNodes)
    mcmc = pst.sample_MCMC(model, 1000)
    pst.plot_Samples(mcmc, nBins=500, nCols=3)
#     graph = pm.graph.graph(model);
#     graph.write_pdf("./graph7.pdf");
Beispiel #3
0
def getModel():
    D = pm.Dirichlet('1-Dirichlet', theta=[3,2,4]); #@UndefinedVariable
    C1 = pm.Categorical('2-Cat', D); #@UndefinedVariable
    C2 = pm.Categorical('10-Cat', D); #@UndefinedVariable
    C3 = pm.Categorical('11-Cat', D); #@UndefinedVariable
    W0_0 = pm.WishartCov('4-Wishart0_1', n=5, C=np.eye(2)); #@UndefinedVariable
    N0_1 = pm.MvNormalCov('5-Norm0_1', mu=[-20,-20], C=np.eye(2)); #@UndefinedVariable
    N0_2 = pm.MvNormalCov('6-Norm0_2', mu=[0,0], C=np.eye(2)); #@UndefinedVariable
    N0_3 = pm.MvNormalCov('7-Norm0_3', mu=[20,20], C=np.eye(2)); #@UndefinedVariable
    aMu = [N0_1.value, N0_2.value, N0_3.value];
    fL1 = lambda n=C1: np.select([n==0, n==1, n==2], aMu);
    fL2 = lambda n=C2: np.select([n==0, n==1, n==2], aMu);
    fL3 = lambda n=C3: np.select([n==0, n==1, n==2], aMu);
    p_N1 = pm.Lambda('p_Norm1', fL1, doc='Pr[Norm|Cat]');
    p_N2 = pm.Lambda('p_Norm2', fL2, doc='Pr[Norm|Cat]');
    p_N3 = pm.Lambda('p_Norm3', fL3, doc='Pr[Norm|Cat]');
    N = pm.MvNormalCov('3-Norm', mu=p_N1, C=W0_0); #@UndefinedVariable
    obsN1 = pm.MvNormalCov('8-Norm', mu=p_N2, C=W0_0, observed=True, value=[-20,-20]); #@UndefinedVariable @UnusedVariable
    obsN2 = pm.MvNormalCov('9-Norm', mu=p_N3, C=W0_0, observed=True, value=[20,20]); #@UndefinedVariable @UnusedVariable
    return pm.Model([D,C1,C2,C3,N,W0_0,N0_1,N0_2,N0_3,N,obsN1,obsN2]);

if __name__ == '__main__':
    warnings.filterwarnings("ignore");
    mcmc = pst.sample_MCMC(getModel(), 5000);
    aBins = [100]*11;
    aBins[1] = 3;
    aBins[9] = 3;
    aBins[10] = 3;
    aHidden = [4,5,6,7,10,11];
    pst.plot_Samples(mcmc, aBins=aBins, aKDE=[], aRowCols=[3,1], aHidden=aHidden);
Beispiel #4
0
from __future__ import division
import Plot_Sampler_Tools as pst
import numpy as np
import pymc as pm
import warnings


def getModel():
    D = pm.Dirichlet('1-Dirichlet', theta=[2, 1, 2, 4])
    #@UndefinedVariable
    #     p_B = pm.Lambda('p_Bern', lambda b=B: np.where(b==0, 0.9, 0.1), doc='Pr[Bern|Beta]');
    C = pm.Categorical('2-Cat', D)
    #@UndefinedVariable
    #     C = pm.Categorical('1-Cat', [0.2, 0.4, 0.1, 0.3], observed=True, value=3); #@UndefinedVariable
    p_N = pm.Lambda(
        'p_Norm',
        lambda n=C: np.select([n == 0, n == 1, n == 2, n == 3],
                              [[-5, -5], [0, 0], [5, 5], [10, 10]]),
        doc='Pr[Norm|Cat]')
    N = pm.MvNormal('3-Norm_2D', mu=p_N, tau=np.eye(2, 2))
    #@UndefinedVariable
    #     N = pm.MvNormal('2-Norm_2D', mu=p_N, tau=np.eye(2,2), observed=True, value=[2.5,2.5]); #@UndefinedVariable
    return pm.Model([D, C, N])


if __name__ == '__main__':
    warnings.filterwarnings("ignore")
    mcmc = pst.sample_MCMC(getModel(), 10000)
    pst.plot_Samples(mcmc, aBins=[100, 4, 100], aKDE=[])
Beispiel #5
0
    N0_3 = pm.Normal('7-Norm0_3', mu=30, tau=1)
    #@UndefinedVariable
    N0_4 = pm.Normal('16-Norm0_3', mu=-30, tau=1)
    #@UndefinedVariable
    aMu = [N0_1.value, N0_2.value, N0_3.value, N0_4.value]
    p_N1 = pm.Lambda('p_Norm1', lambda n=C1: aMu[n], doc='Pr[Norm|Cat]')
    #     p_N2 = pm.Lambda('p_Norm2', lambda n=C2: aMu[n], doc='Pr[Norm|Cat]');
    #     p_N3 = pm.Lambda('p_Norm3', lambda n=C3: aMu[n], doc='Pr[Norm|Cat]');
    #     p_N4 = pm.Lambda('p_Norm4', lambda n=C4: aMu[n], doc='Pr[Norm|Cat]');
    #     p_N5 = pm.Lambda('p_Norm6', lambda n=C5: aMu[n], doc='Pr[Norm|Cat]');
    N = pm.Normal('3-Norm', mu=p_N1, tau=1)
    #@UndefinedVariable
    #     obsN1 = pm.Normal('8-Norm', mu=p_N2, tau=1, observed=True, value=40); #@UndefinedVariable @UnusedVariable
    #     obsN2 = pm.Normal('9-Norm', mu=p_N3, tau=1, observed=True, value=40); #@UndefinedVariable @UnusedVariable
    #     obsN3 = pm.Normal('12-Norm', mu=p_N4, tau=1, observed=True, value=-40); #@UndefinedVariable @UnusedVariable
    #     obsN4 = pm.Normal('13-Norm', mu=p_N5, tau=1, observed=True, value=-40); #@UndefinedVariable @UnusedVariable
    return pm.Model([D, C1, N, N0_1, N0_2, N0_3, N0_4, N])


#     return pm.Model([D,C1,C2,C3,N,G0_0,N0_1,N0_2,N0_3,N0_4,N,obsN1,obsN2,obsN3,obsN4]);

if __name__ == '__main__':
    warnings.filterwarnings("ignore")
    mcmc = pst.sample_MCMC(getModel(), 10000)
    aBins = [100] * len(mcmc.nodes)
    for i in [1]:
        aBins[i] = 4
    aShow = [1, 2, 3]
    #[4,5,6,7,10,11  ,12,13,14,15,16];
    pst.plot_Samples(mcmc, aBins=aBins, aKDE=[], aRowCols=[3, 1], aShow=aShow)
    # @UndefinedVariable
    aC = [pm.Categorical('Cat' + str(i), Dir) for i in range(nPts)]
    # @UndefinedVariable
    aL = [
        pm.Lambda('p_Norm' + str(i), lambda k=aC[i], aNcl=aNc: aNcl[int(k)])
        for i in range(nPts)
    ]
    # @UndefinedVariable
    #Points
    aN = [
        pm.Normal('NormX' + str(i),
                  mu=aL[i],
                  tau=1,
                  observed=True,
                  value=aD[i]) for i in range(nPts - 1)
    ]
    # @UndefinedVariable
    Nz = pm.Normal('NormZ', mu=aL[-1], tau=1)
    # @UndefinedVariable
    return np.concatenate([[Nz, Dir, Gam], aUh, aNc, aC, aN])


if __name__ == '__main__':
    warnings.filterwarnings("ignore")
    aNodes = run_DP()
    model = pst.get_Model(aNodes)
    mcmc = pst.sample_MCMC(model, 5000)
    pst.plot_Samples(mcmc, nBins=500, nCols=4, aShow=range(8))
#     graph = pm.graph.graph(model);
#     graph.write_pdf("./graph9.pdf");
Beispiel #7
0
from __future__ import division;
import Plot_Sampler_Tools as pst;
import pymc as pm;
import warnings;
    
def getModel():
    nA, nB, nK = 5, 2, 10;
    B = pm.Beta('1-Beta', alpha=nA/nK, beta=nB*(nK-1)/nK); #@UndefinedVariable
#     p_B = pm.Lambda('p_Bern', lambda b=B: np.where(b==0, 0.9, 0.1), doc='Pr[Bern|Beta]');
    C = pm.Categorical('2-Cat', [1-B, B]); #@UndefinedVariable
#     C = pm.Categorical('1-Cat', [0.2, 0.4, 0.1, 0.3], observed=True, value=3); #@UndefinedVariable
    return pm.Model([B,C]);

if __name__ == '__main__':
    warnings.filterwarnings("ignore");
    mcmc = pst.sample_MCMC(getModel(), 1000);    
    pst.plot_Samples(mcmc, aBins=[100,2,100], aKDE=[0,0,0]);