예제 #1
0
def doMCMC(n, nxx, nxy, nyy, x):

    # Optional setting for reproducibility
    use_seed = False

    d = nxx.shape[0]
    ns = 2000
    if use_seed:  # optional setting for reproducibility
        seed = 42

    # Disable printing
    sys.stdout = open(os.devnull, 'w')

    # Sufficient statistics
    NXX = shared(nxx)
    NXY = shared(nxy)
    NYY = shared(nyy)

    # Define model and perform MCMC sampling
    with Model() as model:

        # Fixed hyperparameters for priors
        b0 = Deterministic('b0', th.zeros((d), dtype='float64'))
        ide = Deterministic('ide', th.eye(d, m=d, k=0, dtype='float64'))

        # Priors for parameters
        l0 = Gamma('l0', alpha=2.0, beta=2.0)
        l = Gamma('l', alpha=2.0, beta=2.0)
        b = MvNormal('b', mu=b0, tau=l0 * ide, shape=d)

        # Custom log likelihood
        def logp(xtx, xty, yty):
            return (n / 2.0) * th.log(l / (2 * np.pi)) + (-l / 2.0) * (
                th.dot(th.dot(b, xtx), b) - 2 * th.dot(b, xty) + yty)

        # Likelihood
        delta = DensityDist('delta',
                            logp,
                            observed={
                                'xtx': NXX,
                                'xty': NXY,
                                'yty': NYY
                            })

        # Inference
        print('doMCMC: start NUTS')
        step = NUTS()
        if use_seed:
            trace = sample(ns, step, progressbar=True, random_seed=seed)
        else:
            trace = sample(ns, step, progressbar=True)

    # Enable printing
    sys.stdout = sys.__stdout__

    # Compute prediction over posterior
    return np.mean([np.dot(x, trace['b'][i]) for i in range(ns)], 0)
    def fit(self, base_models_predictions, true_targets,
            model_identifiers=None):

        ba = BayesianAverage()
        weight_vector = ba.fit(base_models_predictions, true_targets)
        default = True

        base_models_predictions = base_models_predictions.transpose()
        n_basemodels = base_models_predictions.shape[2]
        with Model() as basic_model:
            #define prior
            HalfNormal('weights', sd=1, shape=n_basemodels)
            #define likelihood function
            ensemble_pred = np.dot(base_models_predictions, weight_vector)
            Categorical('likelihood', p=ensemble_pred.transpose(), observed=true_targets)

        with basic_model:
            start = find_MAP(model=basic_model)
            if not default:
                step = Metropolis()
            step = NUTS()
            trace = sample(self.n_samples, step=step, start=start)
        trace = trace[5000:]
        self.sampled_weights = trace["weights"]
예제 #3
0
파일: mengpymc.py 프로젝트: crockct/meng
map_estimate = find_MAP(model=basic_model, fmin=optimize.fmin_powell)

print(map_estimate)


from pymc3 import NUTS, sample
from pymc3 import traceplot

with basic_model:

    # obtain starting values via MAP
    start = find_MAP(fmin=optimize.fmin_powell)

    # instantiate sampler
    step = NUTS(scaling=start)

    # draw 2000 posterior samples
    trace = sample(2000, step, start=start)
    trace['alpha'][-5:]
    traceplot(trace)
    plt.show()




from pymc3 import summary
summary(trace)

n = 500
p = 0.3
예제 #4
0
def gaith(datagaith):
    # datagaith=pd.DataFrame(datagaith)
    data = datagaith.iloc[:-1, 0].values
    # print(data)
    # Fit a normal distribution to the data:
    mu, std = norm.fit(data)

    # Plot the histogram.
    plt.hist(data, bins=25, normed=True, alpha=0.6, color='g')

    # Plot the PDF.
    xmin, xmax = plt.xlim()
    x = np.linspace(xmin, xmax, 100)
    pdf = norm.pdf(x, mu, std)

    # plt.plot(x, pdf, 'k', linewidth=2)
    # title = "Fit results: mu = %.2f,  std = %.4f" % (mu, std)
    # plt.title(title)
    # plt.show()

    # prior

    prior = datagaith.iloc[-1, 0]
    stdprior = 0.002

    x = np.linspace(prior - .01, prior + .01, 100)

    p = norm.pdf(x, prior, stdprior)

    # plt.plot(x, p, 'k', linewidth=2)
    # title = "Fit results: mu = %.2f,  std = %.4f" % (prior, stdprior)
    # plt.title(title)
    # plt.show()

    # with pm.Model():
    #     mu1 =pdf
    #
    # niter = 20
    # with pm.Model():
    #     mu = mu
    #     sd = std
    #     y = pm.Normal('y', mu=mu, sd=sd, observed=data)
    #     start = pm.find_MAP(fmin=optimize.fmin_powell)
    #     print("************************")
    #     print(start)
    #     step = pm.NUTS(scaling=start)
    #     trace = pm.sample(niter, start=start, step=step)

    model = pm.Model()
    with model:
        mu1 = pm.Normal("mu1", mu=mu, sd=std, shape=10)
    with model:
        # step = pm.NUTS()
        # trace = pm.sample(2000, tune=1000, init=None, step=step, cores=2)

        # obtain starting values via MAP
        start = pm.find_MAP(fmin=optimize.fmin_powell)

        # instantiate sampler
        step = NUTS(scaling=start)

        # draw 2000 posterior samples
        trace = sample(10, step, start=start)

    # for i in summary(trace).iterrows():
    #     print(str(i))
    # if (i[1]) == (min(summary(trace)['mc_error'])):
    #     print(i)

    a = summary(trace).loc[summary(trace)['mc_error'] == min(
        summary(trace)['mc_error'])]
    return a, mu, prior
예제 #5
0
    'Q_ratematrixoneway': Q_raw_log,
    'B_logodds': B_lo,
    'B0_logodds': B0_lo,
    'S': S_start,
    'X': X_start,
    'Z_logodds': Z_lo,
    'L_logodds': L_lo
}
#teststart = {'Q_ratematrixoneway': Q_raw_log, 'B_logodds':B_lo, 'B0_logodds':B0_lo, 'S':S_start, 'X':X_start, 'Z_logodds':Z_lo, 'L_logodds':L_lo, 'pi_stickbreaking':np.ones(M)/float(M)}
#start = {'Q_ratematrixoneway': Q_raw_log, 'B_logodds':B_lo, 'B0_logodds':B0_lo, 'S':S_start, 'X':X_start, 'Z_logodds':Z_lo, 'L_logodds':L_start}

with model:
    #import pdb; pdb.set_trace()

    steps = []
    steps.append(NUTS(vars=[pi]))
    #steps.append(NUTS(vars=[pi], scaling=np.ones(M-1)*0.058))
    #steps.append(Metropolis(vars=[pi], scaling=0.058, tune=False))
    steps.append(NUTS(vars=[Q], scaling=np.ones(M - 1, dtype=float) * 10.))
    #steps.append(Metropolis(vars=[Q], scaling=0.2, tune=False))
    steps.append(
        ForwardS(vars=[S], nObs=nObs, T=T, N=N, observed_jumps=obs_jumps))
    steps.append(NUTS(vars=[B0, B]))
    #steps.append(Metropolis(vars=[B0], scaling=0.2, tune=False))
    #steps.append(NUTS(vars=[B]))
    #steps.append(Metropolis(vars=[B], scaling=0.198, tune=False))
    steps.append(ForwardX(vars=[X], N=N, T=T, K=K, D=D, Dd=Dd, O=O, nObs=nObs))
    #steps.append(NUTS(vars=[Z], scaling=np.ones(K*D)))
    steps.append(Metropolis(vars=[Z], scaling=0.0132, tune=False))
    steps.append(NUTS(vars=[L], scaling=np.ones(D)))
    #steps.append(Metropolis(vars=[L],scaling=0.02, tune=False, ))
예제 #6
0
plt.ylabel("Disaster count")
plt.xlabel("Year")

plt.show()

from pymc3 import DiscreteUniform, Poisson, switch, Model, Exponential, NUTS, Metropolis, sample, traceplot

with Model() as disaster_model:

    switchpoint = DiscreteUniform('switchpoint',
                                  lower=year.min(),
                                  upper=year.max(),
                                  testval=1900)

    # Priors for pre- and post-switch rates number of disasters
    early_rate = Exponential('early_rate', 1)
    late_rate = Exponential('late_rate', 1)

    # Allocate appropriate Poisson rates to years before and after current
    rate = switch(switchpoint >= year, early_rate, late_rate)

    disasters = Poisson('disasters', rate, observed=disaster_data)

    step1 = NUTS([early_rate, late_rate])

    # Use Metropolis for switchpoint, and missing values since it accommodates discrete variables
    step2 = Metropolis([switchpoint, disasters.missing_values[0]])

    trace = sample(10000, step=[step1, step2])

traceplot(trace)
예제 #7
0
파일: GHME_2013.py 프로젝트: zaczou/pymc3
    sd = StudentT('sd', 10, 2, 5**-2)

    vals = Normal('vals', p, sd=sd, observed=rate)

# <markdowncell>

# Model Fitting
# -------------

# <codecell>

with model:
    s = find_MAP(vars=[sd, y])

    step = NUTS(scaling=s)
    trace = sample(100, step, s)

    s = trace[-1]

    step = NUTS(scaling=s)


def run(n=3000):
    if n == "short":
        n = 150
    with model:
        trace = sample(n, step, s)
    # <codecell>

    for i, country in enumerate(countries):
예제 #8
0
from pymc3 import traceplot

traceplot(trace)
plt.show()
'''
NUTS Sampler
'''

from pymc3 import NUTS, sample

with basic_model:

    # Use starting ML point
    start = MLpoint

    hess = hessian(useToAs)

    #Set scaling using hessian
    step = NUTS()
    # draw 2000 posterior samples
    trace = sample(2000, start=start)

from pymc3 import traceplot

traceplot(trace)
plt.show()

accept = np.float64(np.sum(trace['phase'][1:] != trace['phase'][:-1]))
print "Acceptance Rate: ", accept / trace['phase'].shape[0]
    'B_logodds': B_lo,
    'B0_logodds': B0_lo,
    'S': S_start,
    'X': X_start,
    'Z_anchoredbeta': Z_lo,
    'L_logodds': L_lo
}
#start = {'Q_ratematrixoneway': Q_raw_log, 'B_logodds':B_lo, 'B0_logodds':B0_lo, 'S':S_start, 'X':X_start, 'Z_logodds':Z_lo, 'L_logodds':L_lo}

with model:

    steps = []
    if 'pi' in args.constantVars:
        steps.append(Constant(vars=[pi]))
    else:
        steps.append(NUTS(vars=[pi]))
        #steps.append(Metropolis(vars=[pi], scaling=0.058, tune=False))
    if 'Q' in args.constantVars:
        steps.append(Constant(vars=[Q]))
    else:
        steps.append(NUTS(
            vars=[Q], scaling=np.ones(M - 1, dtype=float) *
            10.))  #steps.append(Metropolis(vars=[Q], scaling=0.2, tune=False))
    if 'S' in args.constantVars:
        steps.append(Constant(vars=[S]))
    else:
        steps.append(
            ForwardS(vars=[S], nObs=nObs, T=T, N=N, observed_jumps=obs_jumps))
    if 'B0' in args.constantVars:
        steps.append(Constant(vars=[B0]))
        if 'B' in args.constantVars: