Ejemplo n.º 1
0
    def test_mixture_list_of_normals(self):
        with Model() as model:
            w = Dirichlet('w', floatX(np.ones_like(self.norm_w)))
            mu = Normal('mu', 0., 10., shape=self.norm_w.size)
            tau = Gamma('tau', 1., 1., shape=self.norm_w.size)
            Mixture('x_obs',
                    w, [
                        Normal.dist(mu[0], tau=tau[0]),
                        Normal.dist(mu[1], tau=tau[1])
                    ],
                    observed=self.norm_x)
            step = Metropolis()
            trace = sample(5000,
                           step,
                           random_seed=self.random_seed,
                           progressbar=False,
                           chains=1)

        assert_allclose([
            np.sort(trace['w'].mean(axis=0)),
            np.sort(trace['mu'].mean(axis=0))
        ], [np.sort(self.norm_w), np.sort(self.norm_mu)],
                        rtol=0.1,
                        atol=0.1)
        assert_allclose(np.sort(trace['mu'].mean(axis=0)),
                        np.sort(self.norm_mu),
                        rtol=0.1,
                        atol=0.1)
Ejemplo n.º 2
0
    def test_mixture_list_of_normals(self):
        with Model() as model:
            w = Dirichlet("w",
                          floatX(np.ones_like(self.norm_w)),
                          shape=self.norm_w.size)
            mu = Normal("mu", 0.0, 10.0, shape=self.norm_w.size)
            tau = Gamma("tau", 1.0, 1.0, shape=self.norm_w.size)
            Mixture(
                "x_obs",
                w,
                [
                    Normal.dist(mu[0], tau=tau[0]),
                    Normal.dist(mu[1], tau=tau[1])
                ],
                observed=self.norm_x,
            )
            step = Metropolis()
            trace = sample(5000,
                           step,
                           random_seed=self.random_seed,
                           progressbar=False,
                           chains=1)

        assert_allclose(np.sort(trace["w"].mean(axis=0)),
                        np.sort(self.norm_w),
                        rtol=0.1,
                        atol=0.1)
        assert_allclose(np.sort(trace["mu"].mean(axis=0)),
                        np.sort(self.norm_mu),
                        rtol=0.1,
                        atol=0.1)
Ejemplo n.º 3
0
    def test_normal_mixture(self):
        with Model() as model:
            w = Dirichlet('w', np.ones_like(self.norm_w))

            mu = Normal('mu', 0., 10., shape=self.norm_w.size)
            tau = Gamma('tau', 1., 1., shape=self.norm_w.size)

            x_obs = NormalMixture('x_obs',
                                  w,
                                  mu,
                                  tau=tau,
                                  observed=self.norm_x)

            step = Metropolis()
            trace = sample(5000,
                           step,
                           random_seed=self.random_seed,
                           progressbar=False)

        assert_allclose(np.sort(trace['w'].mean(axis=0)),
                        np.sort(self.norm_w),
                        rtol=0.1,
                        atol=0.1)
        assert_allclose(np.sort(trace['mu'].mean(axis=0)),
                        np.sort(self.norm_mu),
                        rtol=0.1,
                        atol=0.1)
Ejemplo n.º 4
0
    def test_mixture_list_of_poissons(self):
        with Model() as model:
            w = Dirichlet('w',
                          floatX(np.ones_like(self.pois_w)),
                          shape=self.pois_w.shape)
            mu = Gamma('mu', 1., 1., shape=self.pois_w.size)
            Mixture(
                'x_obs',
                w,
                [Poisson.dist(mu[0]), Poisson.dist(mu[1])],
                observed=self.pois_x)
            step = Metropolis()
            trace = sample(5000,
                           step,
                           random_seed=self.random_seed,
                           progressbar=False,
                           chains=1)

        assert_allclose(np.sort(trace['w'].mean(axis=0)),
                        np.sort(self.pois_w),
                        rtol=0.1,
                        atol=0.1)
        assert_allclose(np.sort(trace['mu'].mean(axis=0)),
                        np.sort(self.pois_mu),
                        rtol=0.1,
                        atol=0.1)
Ejemplo n.º 5
0
    def init_sampler(self, hypers=False):
        """
        Initialise the Sampling algorithm as defined in the configuration file.
        """

        if hypers:
            sc = self.config.hyper_sampler_config
        else:
            sc = self.config.sampler_config

        if self.model is None:
            raise Exception(
                'Model has to be built before initialising the sampler.')

        with self.model:
            if sc.name == 'Metropolis':
                logger.info(
                    '... Initiate Metropolis ... \n'
                    ' proposal_distribution %s, tune_interval=%i,'
                    ' n_jobs=%i \n' % (
                    sc.parameters.proposal_dist, sc.parameters.tune_interval,
                    sc.parameters.n_jobs))

                t1 = time.time()
                if hypers:
                    step = Metropolis(
                        tune_interval=sc.parameters.tune_interval,
                        proposal_dist=atmcmc.proposal_dists[
                            sc.parameters.proposal_dist])
                else:
                    step = atmcmc.ATMCMC(
                        n_chains=sc.parameters.n_jobs,
                        tune_interval=sc.parameters.tune_interval,
                        likelihood_name=self._like_name,
                        proposal_name=sc.parameters.proposal_dist)
                t2 = time.time()
                logger.info('Compilation time: %f' % (t2 - t1))

            elif sc.name == 'ATMCMC':
                logger.info(
                    '... Initiate Adaptive Transitional Metropolis ... \n'
                    ' n_chains=%i, tune_interval=%i, n_jobs=%i \n' % (
                        sc.parameters.n_chains, sc.parameters.tune_interval,
                        sc.parameters.n_jobs))

                t1 = time.time()
                step = atmcmc.ATMCMC(
                    n_chains=sc.parameters.n_chains,
                    tune_interval=sc.parameters.tune_interval,
                    coef_variation=sc.parameters.coef_variation,
                    proposal_dist=sc.parameters.proposal_dist,
                    likelihood_name=self._like_name)
                t2 = time.time()
                logger.info('Compilation time: %f' % (t2 - t1))

        if self._seismic_flag:
            self.engine.close_cashed_stores()

        return step
Ejemplo n.º 6
0
    def test_poisson_mixture(self):
        with Model() as model:
            w = Dirichlet("w", floatX(np.ones_like(self.pois_w)), shape=self.pois_w.shape)
            mu = Gamma("mu", 1.0, 1.0, shape=self.pois_w.size)
            Mixture("x_obs", w, Poisson.dist(mu), observed=self.pois_x)
            step = Metropolis()
            trace = sample(5000, step, random_seed=self.random_seed, progressbar=False, chains=1)

        assert_allclose(np.sort(trace["w"].mean(axis=0)), np.sort(self.pois_w), rtol=0.1, atol=0.1)
        assert_allclose(
            np.sort(trace["mu"].mean(axis=0)), np.sort(self.pois_mu), rtol=0.1, atol=0.1
        )
Ejemplo n.º 7
0
def test_plots_multidimensional():

    # Test single trace
    from .models import multidimensional_model

    start, model, _ = multidimensional_model()
    with model as model:
        h = np.diag(find_hessian(start))
        step = Metropolis(model.vars, h)
        trace = sample(3000, step, start)

        traceplot(trace)
Ejemplo n.º 8
0
def test_multichain_plots():

    from pymc3.examples import disaster_model as dm

    with dm.model as model:
        # Run sampler
        step1 = Slice([dm.early_mean, dm.late_mean])
        step2 = Metropolis([dm.switchpoint])
        start = {'early_mean': 2., 'late_mean': 3., 'switchpoint': 50}
        ptrace = sample(1000, [step1, step2], start, njobs=2)

    forestplot(ptrace, varnames=['early_mean', 'late_mean'])

    autocorrplot(ptrace, varnames=['switchpoint'])
Ejemplo n.º 9
0
def test_plots():

    # Test single trace
    from pymc3.examples import arbitrary_stochastic as asmod

    with asmod.model as model:

        start = model.test_point
        h = find_hessian(start)
        step = Metropolis(model.vars, h)
        trace = sample(3000, step, start)

        traceplot(trace)
        forestplot(trace)

        autocorrplot(trace)
    def fit(self, base_models_predictions, true_targets,
            model_identifiers=None):

        ba = BayesianAverage()
        weight_vector = ba.fit(base_models_predictions, true_targets)
        default = True

        base_models_predictions = base_models_predictions.transpose()
        n_basemodels = base_models_predictions.shape[2]
        with Model() as basic_model:
            #define prior
            HalfNormal('weights', sd=1, shape=n_basemodels)
            #define likelihood function
            ensemble_pred = np.dot(base_models_predictions, weight_vector)
            Categorical('likelihood', p=ensemble_pred.transpose(), observed=true_targets)

        with basic_model:
            start = find_MAP(model=basic_model)
            if not default:
                step = Metropolis()
            step = NUTS()
            trace = sample(self.n_samples, step=step, start=start)
        trace = trace[5000:]
        self.sampled_weights = trace["weights"]
Ejemplo n.º 11
0
    C = Dirichlet('mixture_coeff',
                  dirichlet_scale * dirichlet_shape,
                  shape=nclusters)
    S = HalfNormal('S', sd=sd_halfnormal, shape=nclusters)
    U = Normal('mu', mu=mean_prior_mean, sd=mean_prior_sd, shape=nclusters)
    Y = Categorical('labels', p=C, shape=nsamples)
    X = Normal('X', mu=U[Y], sd=S[Y], observed=X_obs)

from pymc3 import find_MAP
map_estimate = find_MAP(model=gmm)
print map_estimate

from pymc3 import NUTS, sample, Slice, Metropolis, ElemwiseCategorical, HamiltonianMC

modified_map_estimate = copy.deepcopy(map_estimate)
modified_map_estimate['mu'] = [
    1 if x < 0.001 else x for x in modified_map_estimate['mu']
]

with gmm:
    # step = Slice(vars=[Y])
    # step = Metropolis(var=)
    start = copy.deepcopy(map_estimate)
    step1 = ElemwiseCategorical(vars=[Y])
    step2 = Metropolis(vars=[S, C, U])
    # trace = sample(100, step=step, start=map_estimate)
    trace = sample(20000, step=[step1, step2], start=start)

from pymc3 import traceplot
traceplot(trace)
plt.show()

from pymc3 import Metropolis, sample, find_MAP
from scipy import optimize
trace_copy= {}
with basic_model:  

    # obtain starting values via MAP
    start = find_MAP(fmin=optimize.fmin_powell)

    #instantiate sampler
 

    # draw 5000 posterior samples

    trace= sample(100, step= Metropolis(), start=start)
    trace_copy= trace

thin_factor=2

print(trace['c'][0:9])
trace= trace[:][0::thin_factor]
print(trace['c'][0:9])

#summary(trace)
#traceplot(trace); 




Ejemplo n.º 13
0
Metropolis Hastings Sampler
'''

MLpoint = ML(useToAs)
hess = hessian(useToAs)

from pymc3 import Metropolis, sample

with basic_model:

    # Use starting ML point
    start = MLpoint

    #hess = hessian(useToAs)

    step1 = Metropolis(vars=[amplitude, offset, noise, phase],
                       h=np.diag(basic_model.dict_to_array(hess)))

    # draw 2000 posterior samples
    trace = sample(10000, start=start, step=step1)

from pymc3 import traceplot

traceplot(trace)
plt.show()

accept = np.float64(np.sum(trace['phase'][1:] != trace['phase'][:-1]))
print "Acceptance Rate: ", accept / trace['phase'].shape[0]
'''
HMC Sampler
'''
Ejemplo n.º 14
0
    sigma = HalfNormal('sigma', sd=1)

    #you can print searched values after every iteration
    lf_print = T.printing.Print('lf')(lf)
    #Deterministic value, found in the model
    mu = model(rule_firing, lf_print)

    #Deterministic value, found in the model
    #mu = model(rule_firing, lf)

    # Likelihood (sampling distribution) of observations
    Normal('Y_obs', mu=mu, sd=sigma, observed=Y)

    #Slice should be used for continuous variables but it gets stuck sometimes - you can also use Metropolis
    step = Metropolis(basic_model.vars)

    #step = Slice(basic_model.vars)

    trace = sample(10, step, njobs=2, init='auto')

print(summary(trace))
traceplot(trace)
pp.savefig("plot_u8_estimating_using_pymc3.png")
print(trace['lf'], trace['rule_firing'])
print(gelman_rubin(trace))

print(
    "Of course, much more things can be explored this way: more parameters could be studied; their priors could be better adjusted etc."
)
Ejemplo n.º 15
0
def simple_init():
    start, model, moments = simple_model()
    step = Metropolis(model.vars, np.diag([1.0]), model=model)
    return model, start, step, moments
Ejemplo n.º 16
0
    mu_temp = c * T * ((T - T0) * (T0 < T)) * np.sqrt((Tm - T) * (Tm > T))
    mu = 0 * (mu_temp < 0) + mu_temp * (mu_temp > 0)

    Y_obs = Normal('Y_obs', mu=mu, sd=tau, observed=Y)

from pymc3 import Metropolis, sample, find_MAP
from scipy import optimize

with basic_model_GCR:

    # obtain starting values via MAP
    start = find_MAP(fmin=optimize.fmin_powell)

    # draw 5000 posterior samples

    trace = sample(sample_size, step=Metropolis(), start=start)

#thin the samples by selecting every 5 samples
thin_factor = 5

#summary(trace)
#traceplot(trace);

#PLOTTING THE HISTOGRAM

figure_count = mua.create_2x2_histograms(trace, figure_count)

#Create the Brier Function
Temps = np.arange(0, 50, 0.1)
a_samps = mua.make_sims_temp_resp("briere", trace, Temps, thin_factor)
Ejemplo n.º 17
0
def do_mcmc_bmlingam(xs, hparams, mcmc_params):
    """Do MCMC for sampling posterior of bmlingam coefficient.

    Example: 

    .. code:: python

        mcmc_params = MCMCParams(
            n_burn=10000,     # Samples in burn-in period
            n_mcmc_samples=10000, # Samples in MCMC (after burn-in)
            seed_burn=1, # Random seed for burn-in period
            seed=2 # Random seed for MCMC
        ) 
        trace = do_mcmc_bmlingam(data['xs'], hparams, mcmc_params)
        b_post = np.mean(trace['b'])

    :code:`xs` is the numpy.ndarray containing samples. 

    :param xs: Data array. 
    :type xs: numpy.ndarray, shape=(n_samples, 2)

    :code:`hparams` is a dict including hyperparameters. 
    See :func:`bmlingam.hparam.define_hparam_searchspace`. 

    :param hparams: Set of hyperparameters.
    :type hparams: dict

    :code:`mcmc_params` includes parameters for MCMC. 

    :param mcmc_params: Parameters for MCMC. 
    :type mcmc_params: :class:`bmlingam.MCMCParams`
    """
    assert(type(mcmc_params) == MCMCParams)

    # ---- Import PyMC3 modules when required ----
    from pymc3 import Metropolis, sample

    # ---- Standardization ----
    scale_ratio = np.std(xs[:, 1]) / np.std(xs[:, 0])
    xs = standardize_samples(xs, hparams['standardize'])

    model = get_pm3_model_bmlingam(xs, hparams, mcmc_params.verbose)

    # ---- MCMC sampling ----
    with model:
        # Burn-in
        # start = find_MAP()
        step = Metropolis()
        trace = sample(
            mcmc_params.n_burn, step, random_seed=mcmc_params.seed_burn, 
            progressbar=False
        )

        # Sampling
        trace = sample(
            mcmc_params.n_mcmc_samples, step, start=trace[-1], 
            random_seed=mcmc_params.seed, progressbar=False
        )

    trace_b = np.array(trace['b'])
    if hparams['standardize']:
        if hparams['causality'] == [1, 2]:
            trace_b *= scale_ratio
        elif hparams['causality'] == [2, 1]:
            trace_b /= scale_ratio
        else:
            raise ValueError("Invalid value of causality: %s" %
                hparams['causality'])

    return {'b': trace_b}
Ejemplo n.º 18
0
    steps = []
    steps.append(NUTS(vars=[pi]))
    #steps.append(NUTS(vars=[pi], scaling=np.ones(M-1)*0.058))
    #steps.append(Metropolis(vars=[pi], scaling=0.058, tune=False))
    steps.append(NUTS(vars=[Q], scaling=np.ones(M - 1, dtype=float) * 10.))
    #steps.append(Metropolis(vars=[Q], scaling=0.2, tune=False))
    steps.append(
        ForwardS(vars=[S], nObs=nObs, T=T, N=N, observed_jumps=obs_jumps))
    steps.append(NUTS(vars=[B0, B]))
    #steps.append(Metropolis(vars=[B0], scaling=0.2, tune=False))
    #steps.append(NUTS(vars=[B]))
    #steps.append(Metropolis(vars=[B], scaling=0.198, tune=False))
    steps.append(ForwardX(vars=[X], N=N, T=T, K=K, D=D, Dd=Dd, O=O, nObs=nObs))
    #steps.append(NUTS(vars=[Z], scaling=np.ones(K*D)))
    steps.append(Metropolis(vars=[Z], scaling=0.0132, tune=False))
    steps.append(NUTS(vars=[L], scaling=np.ones(D)))
    #steps.append(Metropolis(vars=[L],scaling=0.02, tune=False, ))

    ## 22 minutes per step with all NUTS set

    #import pdb; pdb.set_trace()
    #model.dlogp()
    trace = sample(1001, steps, start=start, random_seed=111, progressbar=True)
    #trace = sample(11, steps, start=start, random_seed=111,progressbar=True)
    #trace = sample(11, steps, start=start, random_seed=[111,112,113],progressbar=False,njobs=3)

pi = trace[pi]
Q = trace[Q]
S = trace[S]
#S0 = S[:,0]    #now pibar
Ejemplo n.º 19
0
plt.ylabel("Disaster count")
plt.xlabel("Year")

plt.show()

from pymc3 import DiscreteUniform, Poisson, switch, Model, Exponential, NUTS, Metropolis, sample, traceplot

with Model() as disaster_model:

    switchpoint = DiscreteUniform('switchpoint',
                                  lower=year.min(),
                                  upper=year.max(),
                                  testval=1900)

    # Priors for pre- and post-switch rates number of disasters
    early_rate = Exponential('early_rate', 1)
    late_rate = Exponential('late_rate', 1)

    # Allocate appropriate Poisson rates to years before and after current
    rate = switch(switchpoint >= year, early_rate, late_rate)

    disasters = Poisson('disasters', rate, observed=disaster_data)

    step1 = NUTS([early_rate, late_rate])

    # Use Metropolis for switchpoint, and missing values since it accommodates discrete variables
    step2 = Metropolis([switchpoint, disasters.missing_values[0]])

    trace = sample(10000, step=[step1, step2])

traceplot(trace)

stimuli_csv = load_file(SENTENCES, sep=",")  #sentences with frequencies
sentences = stimuli_csv.groupby(['item', 'label'], sort=False)

parser_with_bayes = pm.Model()
with parser_with_bayes:
    lf = HalfNormal('lf', sd=0.3)
    le = HalfNormal('le', sd=0.5)
    rf = HalfNormal('rf', sd=0.05)
    emap = HalfNormal('emap', sd=1.0)
    # latency likelihood -- this is where pyactr is used
    pyactr_rt = actrmodel_latency(lf, le, rf, emap)
    subj_mu_rt = Deterministic('subj_mu_rt', pyactr_rt[0])
    subj_rt_observed = Normal('subj_rt_observed',
                              mu=subj_mu_rt,
                              sd=10,
                              observed=subj_extraction['rt'])
    obj_mu_rt = Deterministic('obj_mu_rt', pyactr_rt[1])
    obj_rt_observed = Normal('obj_rt_observed',
                             mu=obj_mu_rt,
                             sd=10,
                             observed=obj_extraction['rt'])
    # we start the sampling
    step = Metropolis()
    db = Text('subj_obj_extraction/')
    trace = sample(draws=100, trace=db, step=step, init='auto', tune=10)
    traceplot(trace)
    plt.savefig("subj_obj_extraction_posteriors.pdf")
    plt.savefig("subj_obj_extraction_posteriors.png")
Ejemplo n.º 21
0
def run_phi(data, **kwargs):
    if isinstance(data, str):
        data = csv(data)
    data = np.array(data)

    # Check limits in **kwargs
    if kwargs.get("limits") is not None:
        limits = kwargs.get("limits")
    else:
        limits = (np.nanmin(list(itertools.chain.from_iterable(data))),
                  np.nanmax(list(itertools.chain.from_iterable(data))))

    if kwargs.get("verbose") is not None:
        verbose = kwargs.get("verbose")
    else:
        verbose = False

    if (kwargs.get("binning") is not None) and not kwargs.get("binning"):
        print("removing binning on borders")
        binning_multiplier = 2
    else:
        binning_multiplier = 1

    if kwargs.get("seed") is not None:
        seed = kwargs.get("seed")
    else:
        seed = 123

    if kwargs.get("table") is not None:
        table = kwargs.get("table")
    else:
        table = False

    if kwargs.get("N") is not None:
        N = kwargs.get("N")
    else:
        N = 1000

    if kwargs.get("keep_missing") is not None:
        keep_missing = kwargs.get("keep_missing")
    else:
        keep_missing = None  #AUTO
        #keep_missing = True

    if kwargs.get("fast") is not None:
        fast = kwargs.get("fast")
    else:
        fast = True

    if kwargs.get("njobs") is not None:
        njobs = kwargs.get("njobs")
    else:
        njobs = 2

    if kwargs.get("sd") is not None:
        sd = kwargs.get("sd")
    else:
        sd = 1000000

    # Check gt in **kwargs
    if kwargs.get("gt") is not None:
        gt = kwargs.get("gt")
    else:
        gt = [None] * len(data)

    if verbose: print("Computing Phi")
    idx_of_gt = np.array([x is not None for x in gt])
    idx_of_not_gt = np.array([x is None for x in gt])
    num_of_gt = np.sum(idx_of_gt)

    basic_model = Model()

    for i, g in enumerate(gt):
        if g is not None:
            gt[i] = scale_mat(np.array([[gt[i]] * len(data[i])]),
                              limits,
                              binning_multiplier=binning_multiplier)[0][0]

    num_of_docs = len(data)  # number of documents

    rectangular = True
    sparse = False
    if np.isnan(data).any():
        sparse = True
        data = np.ma.masked_invalid(data)
        data = minimal_matrix(data)

    scaled = scale_mat(data, limits, binning_multiplier=binning_multiplier)

    if (np.count_nonzero(np.isnan(scaled)) /
            scaled.size) > 0.2:  # a lot of nans
        if verbose:
            print(
                "WARNING: a lot of missing values: we are going to set keep_missing=False to improve convergence (if not manually overridden)"
            )
        if keep_missing is None:
            keep_missing = False

    if (sparse and keep_missing == False):
        rectangular = False
        scaled = [doc[~np.isnan(doc)].tolist()
                  for doc in scaled]  #make data a list of lists

    NUM_OF_ITERATIONS = N

    with basic_model:
        precision = Normal('precision', mu=2, sd=sd)
        #precision = Gamma('precision',mu=2,sd=1)

        if num_of_docs - num_of_gt == 1:
            mu = Normal('mu', mu=1 / 2, sd=sd)
        else:
            mu = Normal('mu', mu=1 / 2, sd=sd, shape=num_of_docs - num_of_gt)
        alpha = mu * precision
        beta = precision * (1 - mu)

        if rectangular:
            masked = pd.DataFrame(
                scaled[idx_of_not_gt])  #needed to keep nan working
            if num_of_docs - num_of_gt == 1:
                Beta('beta_obs', observed=masked, alpha=alpha, beta=beta)
            else:
                Beta('beta_obs',
                     observed=masked.T,
                     alpha=alpha,
                     beta=beta,
                     shape=num_of_docs - num_of_gt)
        else:
            for i, doc in enumerate(scaled):
                Beta('beta_obs' + str(i),
                     observed=doc,
                     alpha=alpha[i],
                     beta=beta[i])

        for i, g in enumerate(gt):
            if g is not None:
                mu = Normal('mu' + str(i), mu=gt[i], sd=1)
                alpha = mu * precision
                beta = precision * (1 - mu)
                Beta('beta_obs_g' + str(i),
                     observed=scaled[i],
                     alpha=alpha,
                     beta=beta)  #alpha=a,beta=b,observed=beta)

        try:
            if fast:
                assert False
            stds = np.ones(basic_model.ndim)
            for _ in range(5):
                args = {'scaling': stds**2, 'is_cov': True}
                trace = pm.sample(round(NUM_OF_ITERATIONS / 10),
                                  tune=round(NUM_OF_ITERATIONS / 10),
                                  init=None,
                                  nuts_kwargs=args,
                                  chains=10,
                                  progressbar=verbose,
                                  random_seed=seed)
                samples = [basic_model.dict_to_array(p) for p in trace]
                stds = np.array(samples).std(axis=0)

            step = pm.NUTS(scaling=stds**2, is_cov=True, target_accept=0.9)
            start = trace[0]
            trace = sample(NUM_OF_ITERATIONS,
                           tune=round(NUM_OF_ITERATIONS / 2),
                           njobs=njobs,
                           chains=8,
                           init=None,
                           step=step,
                           start=start,
                           progressbar=verbose,
                           random_seed=seed)
            # Staistical inference
            beg = time()
            #start = find_MAP()
            bef_slice = time()
            #step = NUTS()# Metropolis()
            #step = Metropolis()
            aft_slice = time()
            bef_trace = time()
            #trace = sample(NUM_OF_ITERATIONS, progressbar=verbose,random_seed=123, njobs=njobs,start=start,step=step)
    #        trace = sample(NUM_OF_ITERATIONS, progressbar=verbose,random_seed=123, njobs=njobs,init=None,tune=100)
        except:
            beg = time()
            step = Metropolis()
            #start = find_MAP()
            trace = sample(NUM_OF_ITERATIONS,
                           progressbar=verbose,
                           random_seed=seed,
                           njobs=njobs,
                           step=step)  #,start=start)
        #pm.summary(trace,include_transformed=True)
        if np.float(pymc3.__version__) <= 3.3:
            res = pm.stats.df_summary(trace, include_transformed=True)
        else:
            res = pm.summary(trace, include_transformed=True)
        res.drop(["sd", "mc_error"], axis=1, inplace=True)
        res = res.transpose()
        res["agreement"] = agreement(res['precision'])
        # ----

        #sub_res = res.copy()

        # Mu rescaling

        col_agreement = res["agreement"]
        col_precision = res["precision"]

        res.drop("agreement", inplace=True, axis=1)
        res.drop("precision", inplace=True, axis=1)

        if table:
            col_names = res.columns[0:len(data) - 1]
            for i, name in enumerate(col_names):
                l = len(scaled[i]) * binning_multiplier
                for j in range(3):

                    b = res[name].iloc[j]
                    mu_res = (b * l - 0.5) / (l - 1)
                    res[name].iloc[j] = np.clip(mu_res, 0,
                                                1) * (limits[1] - limits[0])

        res["agreement"] = col_agreement
        res.insert(0, "precision", col_precision)
        aft_trace = time()
    computation_time = time() - beg
    if verbose: print("Elapsed time for computation: ", computation_time)

    convergence = True
    if np.isnan(res.loc['Rhat']['precision']
                ) or np.abs(res.loc['Rhat']['precision'] - 1) > 1e-1:
        print("Warning! You need more iterations!")
        convergence = False
    if table:
        return {
            'agreement': col_agreement['mean'],
            'interval': col_agreement[['hpd_2.5', 'hpd_97.5']].values,
            "computation_time": computation_time,
            "convergence_test": convergence,
            'table': res
        }
    else:
        return {
            'agreement': col_agreement['mean'],
            'interval': col_agreement[['hpd_2.5', 'hpd_97.5']].values,
            "computation_time": computation_time,
            "convergence_test": convergence
        }