示例#1
0
def getTrace(alMean, Sal, case):
    # prior bounds
    allLow = case.xAllT - case.allDelta
    allUpp = case.xAllT + case.allDelta

    alSeed = np.random.randn(case.nChains, case.nFree) * Sal  # .reshape((-1, 1))
    alSeed += alMean  # .reshape((-1, 1))
    start = [dict({'xAl': alSeed[i]}) for i in range(nChains)]

    projectionModel = pm.Model()
    with projectionModel:
        # Priors for unknown model parameters
        xAl = pm.Uniform('xAl', lower=allLow, upper=allUpp, shape=allLow.shape,
                         transform=None)
        xAl.tag.test_value= case.xAllT
#
#        proj = project2diagonalisedError(case)
#        x = theano.tensor.vector()
#        x.tag.test_value= case.xAllT
#        xyMNor = project2diagonalisedError(xAl, theano.shared(case))

#        f = theano.function([xAl], project2diagonalisedError(case)(xAl))
#        xyMNor = f(xAl)

        xyMNor = project2diagonalisedError(xAl)

        Y_obs = pm.Normal('Y_obs', mu=xyMNor, sd=1, observed=case.observedNormed)

        step = pm.DEMetropolis(vars=[xAl], S=Sal, tune=tuneBool,
                               tune_interval=nTuneInter, tune_throughout=tune_thr,
                               tally=tallyBool, scaling=scaAl)
        step.tune = tuneBool
        step.lamb = scaAl
        step.scaling = scaAl

        trace = pm.sample(draws=nDraws, step=step, njobs=nChains,
                          start=start,
                          tune=nTune, chains=nChains, progressbar=True,
                          discard_tuned_samples=False, cores=nCores,
                          compute_convergence_checks=convChecksBool,
                          parallelize=True)

    return trace
示例#2
0
    s = tt.stack(s, axis=0)

    # Log-likelihood function
    def logp(data):
        observedRenewed = data[0, :]
        observedReleased = data[1, :]

        # Released entries every year
        released = tt.mul(p[1:].log(), observedReleased[1:])

        # Renewed entries every year
        renewed = s[-1].log() * observedRenewed[-1]
        return released.sum() + renewed

    retention = pm.DensityDist('retention', logp, observed=data)
    step = pm.DEMetropolis()
    trace = pm.sample(10000, step=step, tune=2000)

# Maximum a posteriori estimators for the model
mapValues = pm.find_MAP(model=BdWwithcfromNorm)

# Extract alpha and beta MAP-estimators
betaParams = mapValues.get('alpha').item(), mapValues.get('beta').item()

theta = stats.beta.mean(betaParams[0], betaParams[1])
cHat = mapValues.get('c').item()
rvar = stats.beta.var(betaParams[0], betaParams[1])


# Define a Discrete Weibull distribution
def DiscreteWeibull(q, b, x):
示例#3
0
})
#%%
plt.figure()
plt.plot(elbos1, alpha=.3)
plt.legend()
#%% NUTS
with mixedEffect:
    trace = pm.sample(3000, njobs=2, tune=1000)

pm.traceplot(trace, lines={
    'w': w0,
    'z': z0
})
#%% DEMetropolis
with mixedEffect:
    tracede = pm.sample(5000, njobs=50, tune=1000, step=pm.DEMetropolis())

pm.traceplot(tracede, lines={
    'w': w0,
    'z': z0
})
#%% atmcmc
from tempfile import mkdtemp
from pymc3.step_methods import smc

test_folder = mkdtemp(prefix='ATMIP_TEST')

n_chains = 500
samples = 1000
tune_interval = 25
n_jobs = 1
示例#4
0
                      shape=len(thresh), observed=thresh_obs)
    zbeta0 = pm.Normal('zbeta0', mu=float(n_cat)/2., tau=1./n_cat**2)
    zbeta = pm.Normal('zbeta', mu=0., tau=1./n_cat**2, shape=Z_data.shape[1])
    zsigma = pm.Uniform('zsigma', n_cat/1000., n_cat*10.)
    # Linear model
    mu = pm.Deterministic('mu', zbeta0 + pm.math.dot(zbeta, Z_data.T))
    # Link function
    pr = outcome_probabilities(theta, mu, zsigma)
    # For the *robust* version of the ordered "probit" regression
    # comment the previous line and uncomment the following lines
    ##nu = pm.Exponential('nu', lam=1./30.)
    ##pr = outcome_probabilities_robust(theta, mu, zsigma, nu)
    # Likelihood
    y = pm.Categorical('y', pr, observed=y_train_log)
    # MCMC (it is not possible to use gradient-based samplers)
    step_M = pm.DEMetropolis()  # experimental sampler!
    chain = pm.sample(draws=32000, tune=4000, step=step_M, chains=4, parallelize=True)


# In[30]:


burnin = 2000
thin = 6
# Trace after burn-in and thinning
trace = chain[burnin::thin]


# In[31]:

示例#5
0
    def InitPyMCSampling(self, **kwargs):
        '''
        PyMC3 initialisation: Sampler
            N_tune :
            N_chains :
            N_cores :
            IsProgressbar :
            Sampler_Name :
        '''
        #Checking if all the necessary stuff is loaded
        if not self.VarNames:
            self.InitPar(kwargs["ParFile"])
        try:
            self.Cov[0][0]
        except TypeError:
            self.SetCovMatrix(Scale=1.2)
        if not self.basic_model:
            self.InitPyMC()

        # Further initialisation
        print(' >> Logging calculation steps in {}'.format(self.log_file_name))
        open(self.log_file_name, 'w+').close()
        with self.basic_model:
            Sampler_Name = kwargs.get("Sampler_Name", "Metropolis")
            N_tune = kwargs.get("N_tune", 0)
            N_chains = kwargs.get("N_chains", 1)
            N_cores = kwargs.get("N_cores", min(4, N_chains))
            IsProgressbar = kwargs.get("IsProgressbar", 1)
            print(
                '\n >> using configuration :  {:12}, N_tune = {}, N_chains = {}, N_cores = {}'
                .format(Sampler_Name, N_tune, N_chains, N_cores))

            self.S = Storage_Container(
                3 * N_chains * len(self.VarNames))  # updating the cach size

            # Setting up the samplers
            #   Calling S = self.Cov[::-1,::-1] is a neccessary hack in order to avoid a problem in the PyMC3 code:
            #   The order of the variables is inverted (by accident?) durint the BlockStep().__init__() (see terminal promts)
            if Sampler_Name == "DEMetropolis":
                step = pm.DEMetropolis(
                    S=self.Cov[::-1, ::-1],
                    proposal_dist=pm.MultivariateNormalProposal)

            elif Sampler_Name == "Metropolis":
                step = pm.Metropolis(
                    S=self.Cov[::-1, ::-1],
                    proposal_dist=pm.MultivariateNormalProposal,
                    blocked=True)

            elif Sampler_Name == "Hamiltonian":
                # the settings for HMC are very tricky. allowing adapt_step_size=True may lead to very small step sizes causing the method to stuck.
                length = max(
                    0.3, 1.5 * np.sqrt(np.sum(np.array(self.STDs)**2))
                )  # this is the length in the parameter-space to travel between two points
                #length = np.sqrt(self.STDs) * np.mean(self.STDs)
                sub_l = length / 7  # setting substeps
                step = pm.HamiltonianMC(scaling=self.Cov[::-1, ::-1],
                                        adapt_step_size=0,
                                        step_scale=sub_l,
                                        path_length=length,
                                        is_cov=True)

                self.step.adapt_step_size = False  # workaround for PyMC3 bug ( 'adapt_step_size= 0' is ignored)

                print(
                    ' >> Hamiltonian settings: {:7.4f} / {:7.4f}  = {:4} substeps between points'
                    .format(length, sub_l / (len(self.STDs)**0.25),
                            int(length / (sub_l / (len(self.STDs)**0.25)))))

            else:
                print(
                    ' >> Unknown Sampler_Name = {:20}, Using Metropolis instead'
                    .format(Sampler_Name))
                step = pm.Metropolis(
                    S=self.Cov[::-1, ::-1],
                    proposal_dist=pm.MultivariateNormalProposal,
                    blocked=True)

            self.Custom_sample_args = {
                "step": step,
                "progressbar": IsProgressbar,
                "chains": N_chains,
                "cores": N_cores,
                "tune": N_tune,
                #"parallelize" : True,
            }

        self.trace = None
        self.Prev_End = None