def main():
    # Code to create artificial data
    N = 100
    X = 0.025 * np.random.randn(N)
    Y = 0.5 * X + 0.01 * np.random.randn(N)

    std = pm.Uniform("std", 0, 100, trace=False)

    @pm.deterministic
    def prec(U=std):
        return 1.0 / (U) ** 2

    beta = pm.Normal("beta", 0, 0.0001)
    alpha = pm.Normal("alpha", 0, 0.0001)


    @pm.deterministic
    def mean(X=X, alpha=alpha, beta=beta):
        return alpha + beta * X

    obs = pm.Normal("obs", mean, prec, value=Y, observed=True)
    mcmc = pm.MCMC([obs, beta, alpha, std, prec])

    mcmc.sample(100000, 80000)
    mcplot(mcmc)
def main():
    data = np.loadtxt("data/mixture_data.csv", delimiter=",")

    p = pm.Uniform("p", 0, 1)

    assignment = pm.Categorical("assignment", [p, 1 - p], size=data.shape[0])

    taus = 1.0 / pm.Uniform("stds", 0, 100, size=2) ** 2
    centers = pm.Normal("centers", [120, 190], [0.01, 0.01], size=2)

    """
    The below deterministic functions map an assignment, in this case 0 or 1,
    to a set of parameters, located in the (1,2) arrays `taus` and `centers`.
    """

    @pm.deterministic
    def center_i(assignment=assignment, centers=centers):
        return centers[assignment]

    @pm.deterministic
    def tau_i(assignment=assignment, taus=taus):
        return taus[assignment]

    # and to combine it with the observations:
    observations = pm.Normal("obs", center_i, tau_i, value=data, observed=True)

    # below we create a model class
    model = pm.Model([p, assignment, observations, taus, centers])

    map_ = pm.MAP(model)
    map_.fit() #stores the fitted variables' values in foo.value

    mcmc = pm.MCMC(model)
    # Where 50000 is the burn-in iterations where fitting is
    # started but the results are not counted to the end model
    mcmc.sample(100000, 50000)

    p_trace = mcmc.trace("p")[:]
    center_trace = mcmc.trace("centers")[:]
    std_trace = mcmc.trace("stds")[:]
    x = 175

    v = ((p_trace *
          stats.norm.pdf(x, loc=center_trace[:, 0], scale=std_trace[:, 0]))
         >
         (1 - p_trace) *
          stats.norm.pdf(x, loc=center_trace[:, 1], scale=std_trace[:, 1]))

    # If you try this with out the 50000 burn-in iterations, the certainty is
    # much less that the pixel belongs to cluster 0
    print "Probability of belonging to cluster 1:", v.mean()
    print "Probability of belonging to cluster 0:", 1 - v.mean()

    mcmc.sample(25000, 0, 10)
    mcplot(mcmc.trace("centers", 2), common_scale=False)
Ejemplo n.º 3
0
def main():
    data = np.loadtxt("data/mixture_data.csv", delimiter=",")

    p = pm.Uniform("p", 0, 1)

    assignment = pm.Categorical("assignment", [p, 1 - p], size=data.shape[0])

    taus = 1.0 / pm.Uniform("stds", 0, 100, size=2)**2
    centers = pm.Normal("centers", [120, 190], [0.01, 0.01], size=2)
    """
    The below deterministic functions map an assignment, in this case 0 or 1,
    to a set of parameters, located in the (1,2) arrays `taus` and `centers`.
    """
    @pm.deterministic
    def center_i(assignment=assignment, centers=centers):
        return centers[assignment]

    @pm.deterministic
    def tau_i(assignment=assignment, taus=taus):
        return taus[assignment]

    # and to combine it with the observations:
    observations = pm.Normal("obs", center_i, tau_i, value=data, observed=True)

    # below we create a model class
    model = pm.Model([p, assignment, observations, taus, centers])

    map_ = pm.MAP(model)
    map_.fit()  #stores the fitted variables' values in foo.value

    mcmc = pm.MCMC(model)
    # Where 50000 is the burn-in iterations where fitting is
    # started but the results are not counted to the end model
    mcmc.sample(100000, 50000)

    p_trace = mcmc.trace("p")[:]
    center_trace = mcmc.trace("centers")[:]
    std_trace = mcmc.trace("stds")[:]
    x = 175

    v = ((p_trace *
          stats.norm.pdf(x, loc=center_trace[:, 0], scale=std_trace[:, 0])) >
         (1 - p_trace) *
         stats.norm.pdf(x, loc=center_trace[:, 1], scale=std_trace[:, 1]))

    # If you try this with out the 50000 burn-in iterations, the certainty is
    # much less that the pixel belongs to cluster 0
    print "Probability of belonging to cluster 1:", v.mean()
    print "Probability of belonging to cluster 0:", 1 - v.mean()

    mcmc.sample(25000, 0, 10)
    mcplot(mcmc.trace("centers", 2), common_scale=False)
Ejemplo n.º 4
0
def main():
    # Code to create artificial data
    N = 100
    X = 0.025 * np.random.randn(N)
    Y = 0.5 * X + 0.01 * np.random.randn(N)

    ls_coef_ = np.cov(X, Y)[0, 1] / np.var(X)
    ls_intercept = Y.mean() - ls_coef_ * X.mean()

    plt.scatter(X, Y, c="k")
    plt.xlabel("trading signal")
    plt.ylabel("returns")
    plt.title("Empirical returns vs trading signal")
    plt.plot(X, ls_coef_ * X + ls_intercept, label="Least-squares line")
    plt.xlim(X.min(), X.max())
    plt.ylim(Y.min(), Y.max())
    plt.legend(loc="upper left")
    plt.show()

    std = pm.Uniform("std", 0, 100, trace=False)

    @pm.deterministic
    def prec(U=std):
        return 1.0 / (U) ** 2

    beta = pm.Normal("beta", 0, 0.0001)
    alpha = pm.Normal("alpha", 0, 0.0001)


    @pm.deterministic
    def mean(X=X, alpha=alpha, beta=beta):
        return alpha + beta * X

    obs = pm.Normal("obs", mean, prec, value=Y, observed=True)
    mcmc = pm.MCMC([obs, beta, alpha, std, prec])

    mcmc.sample(100000, 80000)
    mcplot(mcmc)
Ejemplo n.º 5
0
def main():
    # Code to create artificial data
    N = 100
    X = 0.025 * np.random.randn(N)
    Y = 0.5 * X + 0.01 * np.random.randn(N)

    ls_coef_ = np.cov(X, Y)[0, 1] / np.var(X)
    ls_intercept = Y.mean() - ls_coef_ * X.mean()

    plt.scatter(X, Y, c="k")
    plt.xlabel("trading signal")
    plt.ylabel("returns")
    plt.title("Empirical returns vs trading signal")
    plt.plot(X, ls_coef_ * X + ls_intercept, label="Least-squares line")
    plt.xlim(X.min(), X.max())
    plt.ylim(Y.min(), Y.max())
    plt.legend(loc="upper left")
    plt.show()

    std = pm.Uniform("std", 0, 100, trace=False)

    @pm.deterministic
    def prec(U=std):
        return 1.0 / (U)**2

    beta = pm.Normal("beta", 0, 0.0001)
    alpha = pm.Normal("alpha", 0, 0.0001)

    @pm.deterministic
    def mean(X=X, alpha=alpha, beta=beta):
        return alpha + beta * X

    obs = pm.Normal("obs", mean, prec, value=Y, observed=True)
    mcmc = pm.MCMC([obs, beta, alpha, std, prec])

    mcmc.sample(100000, 80000)
    mcplot(mcmc)
Ejemplo n.º 6
0
def main():
    # Code to create artificial data
    N = 100
    X = 0.025 * np.random.randn(N)
    Y = 0.5 * X + 0.01 * np.random.randn(N)

    std = pm.Uniform("std", 0, 100, trace=False)

    @pm.deterministic
    def prec(U=std):
        return 1.0 / (U)**2

    beta = pm.Normal("beta", 0, 0.0001)
    alpha = pm.Normal("alpha", 0, 0.0001)

    @pm.deterministic
    def mean(X=X, alpha=alpha, beta=beta):
        return alpha + beta * X

    obs = pm.Normal("obs", mean, prec, value=Y, observed=True)
    mcmc = pm.MCMC([obs, beta, alpha, std, prec])

    mcmc.sample(100000, 80000)
    mcplot(mcmc)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from params import *  # noqa
import pymc
# import matplotlib.pyplot as plt
from params import *  # noqa
# import os
from pymc.Matplot import plot as mcplot

if __name__ == '__main__':
    mcmc = pymc.database.pickle.load(PICKLE_PATH)
    mcplot(mcmc.trace('ws'), common_scale=False)
Ejemplo n.º 8
0
#  pymc load the model, the pickle file and retrive the saved data
#----------------------------------------------------------------------
Mdb = pymc.database.pickle.load(Path + mcmcfile)
M = pymc.MCMC(model_objects, db=Mdb)
"""
A. Generate composite figure with trace plot, autocorrelation plot, and
   posterior distribution for the Global-specific parameters. The figure is
   displayed and also automatically saved in your current folder.

   For Example:
   The following few lines of code generate the composite figures for the
   globalprameters. You can add more lines for other parameters.
"""
print " Generating Trace plots ,autocorrelation plots, posterior distribution ..... "

mcplot(M.trace("mu_r_G"), common_scale=False)
mcplot(M.trace("mu_Lmax_G"), common_scale=False)
mcplot(M.trace("mu_L0_G"), common_scale=False)
mcplot(M.trace('tau_r_line'), common_scale=False)
mcplot(M.trace('tau_r_indv'), common_scale=False)
mcplot(M.trace('tau_Lmax_line'), common_scale=False)
mcplot(M.trace('tau_Lmax_indv'), common_scale=False)
mcplot(M.trace('tau_L0_line'), common_scale=False)
mcplot(M.trace('tau_L0_indv'), common_scale=False)
plt.show()

# NOTE: You can print and save the summary of the parameters' posterior stats.
#       To print the summary stats for a global parameter, "mu_r_0", here is the
#       example syntax:
#
#       M.mu_r_0.summary()
	return np.array([mean_x, mean_y])

@pm.deterministic
def mean_i(means = means, category = category):
	return means[category]

taus_ = []
for i in xrange(nGaussians):
	taus_.append(pm.Wishart('tau%d'%i, n=4, Tau = np.eye(2))) 

taus_= np.asarray(taus_)
@pm.deterministic
def taus(taus = taus_):
	return taus

@pm.deterministic
def tau_i(taus = taus, category = category):
	return taus[category]

data = pm.MvNormal("data", mean_i, tau_i, value =img_coords , observed = True)

model= pm.Model([means, mean_i,mean_x, mean_y, taus, tau_i, data])

mcmc = pm.MCMC(model)
mcmc.sample(20000,burn = 2000, thin = 2)

mean_samples = mcmc.trace('mean')[:]
tau_samples = mcmc.trace('tau')[:]

mcplot(mcmc, common_scale = False)
Ejemplo n.º 10
0
def plot_parcorr(m):
    parnames = ["a","z","p"]
    for par in parnames:
        mcplot(m.trace(par), common_scale=False)
        plt.savefig(par + ".pdf", bbox_inches='tight', edgecolor='none')
        plt.close()
Ejemplo n.º 11
0
    def auto_plots(self):
        mcplot(self.model.predictive_0, common_scale=False)
        mcplot(self.model.predictive_1, common_scale=False)
        # mcplot(self.model.predictive_2, common_scale=False)
        # mcplot(self.model.predictive_3, common_scale=False)
        # mcplot(self.model.predictive_4, common_scale=False)

        mcplot(self.model.mu_0, common_scale=False)
        mcplot(self.model.mu_1, common_scale=False)
        # mcplot(self.model.mu_2, common_scale=False)
        # mcplot(self.model.mu_3, common_scale=False)
        # mcplot(self.model.mu_4, common_scale=False)

        mcplot(self.model.sigma_0, common_scale=False)
        mcplot(self.model.sigma_1, common_scale=False)
        # mcplot(self.model.sigma_2, common_scale=False)
        # mcplot(self.model.sigma_3, common_scale=False)
        # mcplot(self.model.sigma_4, common_scale=False)

        # mcplot(self.model.theta_0, common_scale=False)
        # mcplot(self.model.theta_1, common_scale=False)
        # mcplot(self.model.theta_2, common_scale=False)
        # mcplot(self.model.theta_3, common_scale=False)
        # mcplot(self.model.theta_4, common_scale=False)

        mcplot(self.model.categ_0, common_scale=False)
        mcplot(self.model.categ_1, common_scale=False)
        mcplot(self.model.categ_45, common_scale=False)
        mcplot(self.model.categ_46, common_scale=False)
Ejemplo n.º 12
0
import numpy as np
from matplotlib import pylab as plt
import dive_model
#import noba_model
import pymc
from pymc import MCMC
from pymc.Matplot import plot as mcplot

M = MCMC(dive_model)

M.sample(iter=2000000, burn=0, thin=10, verbose=0)
mcplot(M)

plt.hist([M.trace('intrinsic_rate')[:]], 500, label='intrinsic')
plt.hist([M.trace('social_rate')[:]], 500, label='social')
plt.legend(loc='upper left')
plt.xlim(0, 0.2)
plt.show()

d1 = M.trace('blind_angle')[:]
bc = d1 * 180 / 3.142
plt.hist(bc, 20)
plt.xlim(0, 380)
plt.show()

plt.hist([M.trace('lag')[:]])
plt.legend(loc='upper left')
plt.xlim(0, 5)
plt.show()

plt.hist([M.trace('dist')[:]], 100)
Ejemplo n.º 13
0
#tester


from matplotlib import pylab as plt
import numpy as np

import moveModel
import pymc
from pymc import MCMC
from pymc.Matplot import plot as mcplot
M = MCMC(moveModel)
plt.close('all')
#M.use_step_method(pymc.AdaptiveMetropolis, [M.left_angle, M.right_angle, M.lag, M.dist],  delay=1000)
M.sample(iter=20000, burn=10, thin=10,verbose=0)
mcplot(M)
#from pylab import hist, show

#
#plt.hist(M.trace('reprho')[:])
#plt.xlim(0,1)

#plt.title('repulsion strength')

#plt.savefig('repulsion_strength.png')
#plt.show()
#plt.hist(M.trace('attrho')[:])
#plt.xlim(0,1)

#plt.title('attraction strength')

#plt.savefig('attraction_strength.png')
Ejemplo n.º 14
0
mcmcA = bayes_lin_regr(data['Signal_A'].values, data['Distance_A'].values,
                       200000, 10000, 50)

mcmcB = bayes_lin_regr(data['Signal_B'].values, data['Distance_B'].values,
                       200000, 10000, 50)

mcmcC = bayes_lin_regr(data['Signal_C'].values, data['Distance_C'].values,
                       200000, 10000, 50)

# Spit out mcmc statistics

names = ["mcmcA", "mcmcB", "mcmcC"]
for i, mcmc in enumerate([mcmcA, mcmcB, mcmcC]):
    trc = getattr(mcmc, "trace")
    mcplot(trc('alpha'))
    plt.savefig("Plots/" + names[i] + "_alpha.png")
    mcplot(trc('beta'))
    plt.savefig("Plots/" + names[i] + "_beta.png")
    mcplot(trc('error'))
    plt.savefig("Plots/" + names[i] + "_error.png")

# Now we need a function that can take the 3 circles
# and estimate where they all cross

data.iloc[1]

Signal_A_ = 1.201608
Signal_B_ = 1.031228
Signal_C_ = 1.893498
Position_X_ = 122
Ejemplo n.º 15
0
# ====================  ==============
import numpy as np
import pymc as pm
data = np.array([[145,1,2,1,0],[5,256,22,9,6],[5,24,234,36,19],[1,18,32,243,25],[1,5,9,38,254]])
n_class = np.array([np.sum(i) for i in data])
D = np.sum(data)

# a simple demo for Binomal-Beta Conjugate
p = pm.Beta("p",alpha=1,beta=1)
n = pm.Binomial("Bino",n=19,p=p,value=5,observed=True)
mcmc = pm.MCMC([n,p])
mcmc.sample(25000)

%matplotlib inline
from pymc.Matplot import plot as mcplot
mcplot(mcmc.trace("p"),common_scale=False)

# a simple demo for Dirichlet-Multinomal Conjugate
N = 5 # dimension
beta = np.ones(N)
mu=pm.Dirichlet("mu", theta=beta)
cmu = pm.CompletedDirichlet("cmu", D=mu)

n = pm.Multinomial('n', n=D, p=cmu, value=n_class, observed=True)

alpha = np.ones(N)

theta = pm.Container([pm.Dirichlet("theta_%s" % i,theta=alpha) \
                      for i in range(N)])
ctheta = pm.Container([pm.CompletedDirichlet("ctheta_%s" % i, D=theta[i]) for i in range(N)])
c = pm.Container([pm.Multinomial("c_%s" % i, n=n_class[i], p=theta[i]\
Ejemplo n.º 16
0
    
for i in range(2):
    plt.hist(mus_samples[:, i], histtype = "stepfilled", color = colors[i],
             label = r"Posterior samples of $\mu_%d$" %i, alpha = 0.7)

plt.legend()
plt.clf()
for i in range(2):
    plt.hist(stds_samples[:, i], histtype = "stepfilled", color = colors[i],
             label = r"Posterior samples of $\sigma_%d$" %i, alpha = 0.7)

plt.legend()

cluster1_freq = assignment_samples.sum(axis = 1)/float(assignment_samples.shape[1])

plt.clf()
plt.plot(cluster1_freq, color = "g", lw = 3)
plt.ylim(0, 1)

# Continue sampling

mcmc.sample(100000)
mus_samples = mcmc.trace("mus", chain = 1)[:]
prev_mus_samples = mcmc.trace("mus", chain = 0)[:]

cluster1_probs = assignment_samples.mean(axis = 0)

from pymc.Matplot import plot as mcplot

mcplot(mcmc.trace("mus"), common_scale = False)
Ejemplo n.º 17
0
plt.bar(x,
        autocorr(y_t)[1:max_x],
        edgecolor=colors[0],
        label="no thinning",
        color=colors[0],
        width=1)
plt.bar(x,
        autocorr(y_t[::2])[1:max_x],
        edgecolor=colors[1],
        label="keeping every 2nd sample",
        color=colors[1],
        width=1)
plt.bar(x,
        autocorr(y_t[::3])[1:max_x],
        width=1,
        edgecolor=colors[2],
        label="keeping every 3rd sample",
        color=colors[2])

plt.autoscale(tight=True)
plt.legend(title="Autocorrelation plot for $y_t$", loc="lower left")
plt.ylabel("measured correlation \nbetween $y_t$ and $y_{t-k}$.")
plt.xlabel("k (lag)")
plt.title("Autocorrelation of $y_t$ (no thinning vs. thinning) \
at differing $k$ lags.")

# Using PyMC's built in plot. No need to matplotlib everytime

mcmc.sample(25000, 0, 10)
mcplot(mcmc.trace("centers", 2), common_scale=False)