Exemple #1
0
    def sample_hypers(self,
                      start=None,
                      samples=1000,
                      chains=1,
                      trace=None,
                      method='Slice'):
        if start is None:
            start = self.get_params_current()
        if self.outputs.get_value() is None:
            print('For sample_hypers it is necessary to have observations')
            return start
        with self.model:
            if len(self.fixed_vars) > 0:
                step = [ConstantStep(vars=self.fixed_vars)
                        ]  # Fue eliminado por error en pymc3
                if len(self.sampling_vars) > 0:
                    if method == 'HMC':
                        step += [pm.HamiltonianMC(vars=self.sampling_vars)]
                    else:
                        step += [
                            RobustSlice(vars=self.sampling_vars)
                        ]  # Slice original se cuelga si parte del óptimo
            else:
                if method == 'HMC':

                    step = pm.HamiltonianMC()
                else:
                    step = RobustSlice()
            trace = pm.sample(samples,
                              step=step,
                              start=start,
                              njobs=chains,
                              trace=trace)
        return trace
Exemple #2
0
    def test_run(self):
        model = self.build_model()
        with model:
            # move the chain to the MAP which should be a good starting point
            start = pm.find_MAP()
            H = model.fastd2logp()  # find a good orientation using the hessian at the MAP
            h = H(start)

            step = pm.HamiltonianMC(model.vars, h)
            pm.sample(50, step=step, start=start)
Exemple #3
0
    def sample(self,
               prior='umvt',
               method='NUTS',
               res=25.0,
               retrain=False,
               train=True,
               **step_kwargs):
        model = self.get_model(prior)
        if retrain:
            self.train_model()
        variances = self.read_cov(method, prior)
        #step_scale = res*(np.pi/180) / (self.n_d)**(0.25)
        step_scale = res * np.pi / 180 / (1 / self.n_d)**0.25
        if method == 'NUTS':
            with model:
                step = pm.NUTS(scaling=variances,
                               is_cov=True,
                               step_scale=step_scale,
                               adapt_step_size=False,
                               **step_kwargs)
        elif method == 'HMC':
            scaling = variances
            with model:
                step = pm.HamiltonianMC(step_scale=step_scale,
                                        adapt_step_size=False,
                                        **step_kwargs)
        elif method == 'MH':
            S = variances
            with model:
                step = pm.Metropolis()
        with model:
            trace = pm.sample(step=step, cores=1, **self.mckwargs)
        self.pickle(trace, prior, method, model, dict(**step_kwargs))
        myfile = 'corner_{ns}_{method}_{prior}.png'.format(
            ns=self.mckwargs['draws'], method=method, prior=prior)
        results = 'results' if self.hpc else 'test-results'
        mypath = os.path.join(self.samp_obj.output_directory, results,
                              self.samp_obj.label, method, prior, myfile)
        save_plot(trace, self.tmodes, self.T, mypath)
        if not self.hpc:
            plot_MC_torsion_result(trace, self.tmodes, self.T)

            #full_cov = self.priors_dict[prior]['full_cov']
            def flat_t(var):
                x = trace[str(var)]
                return x.reshape((x.shape[0], np.prod(x.shape[1:], dtype=int)))

        return trace
Exemple #4
0
    def too_slow(self):
        model = self.build_model()
        with model:
            start = pm.Point({
                "groupmean": self.obs_means.mean(),
                "groupsd_interval__": 0,
                "sd_interval__": 0,
                "means": np.array(self.obs_means),
                "u_m": np.array([0.72]),
                "floor_m": 0.0,
            })

            start = pm.find_MAP(start, model.vars[:-1])
            H = model.fastd2logp()
            h = np.diag(H(start))

            step = pm.HamiltonianMC(model.vars, h)
            pm.sample(50, step=step, start=start)
Exemple #5
0
    def too_slow(self):
        model = self.build_model()
        with model:
            start = pm.Point({
                'groupmean': self.obs_means.mean(),
                'groupsd_interval__': 0,
                'sd_interval__': 0,
                'means': np.array(self.obs_means),
                'u_m': np.array([.72]),
                'floor_m': 0.,
            })

            start = pm.find_MAP(start, model.vars[:-1])
            H = model.fastd2logp()
            h = np.diag(H(start))

            step = pm.HamiltonianMC(model.vars, h)
            pm.sample(50, step=step, start=start)
Exemple #6
0
def create_pp_sampler(sampler, context, priors, pp_engine='pymc3'):

    s = None
    if pp_engine == 'pymc3':

        with context['model']:
            if sampler == 'nuts':
                s = pymc3.NUTS(
                    vars=[priors[k] for k in sorted(priors.keys())],
                    max_treedepth=10,
                    early_max_treedepth=8,
                )
            elif sampler == 'hmc':
                s = pymc3.HamiltonianMC(
                    vars=[priors[k] for k in sorted(priors.keys())],
                    path_length=2.0,
                    adapt_step_size=True,
                    gamma=0.05,
                    k=0.75,
                    t0=10,
                    target_accept=0.8)
            elif sampler == 'mh':
                s = pymc3.Metropolis(
                    vars=[priors[k] for k in sorted(priors.keys())],
                    S=None,
                    proposal_dist=None,
                    scaling=1.0,
                    tune=True,
                    tune_interval=100,
                    model=None,
                    mode=None,
                )
            else:
                raise ValueError('Unrecognized sampler: {}'.format(sampler))
    else:
        raise ValueError('Unrecognized PP engine: {}'.format(pp_engine))

    return s
Exemple #7
0
def test_leapfrog_reversible():
    n = 3
    start, model, _ = models.non_normal(n)

    with model:
        h = pm.find_hessian(start, model=model)
        step = pm.HamiltonianMC(model.vars, h, model=model)

    bij = DictToArrayBijection(step.ordering, start)

    logp, dlogp = list(map(bij.mapf, step.fs))
    H = Hamiltonian(logp, dlogp, step.potential)

    q0 = bij.map(start)
    p0 = np.ones(n)*.05
    for e in [.01, .1, 1.2]:
        for L in [1, 2, 3, 4, 20]:

            q, p = q0, p0
            q, p = leapfrog(H, q, p, L, e)
            q, p = leapfrog(H, q, -p, L, e)

            close_to(q, q0, 1e-8, str((L, e)))
            close_to(-p, p0, 1e-8, str((L, e)))
Exemple #8
0
import scipy.stats as ss

xdata = np.array([-27.020, 3.570, 8.191, 9.898, 9.603, 9.945, 10.056])
n = len(xdata)

model = pm.Model()
with model:
    # priors
    mu = pm.Normal('mu', mu=0, tau=0.001)
    lmbda = pm.Gamma('lambda', alpha=0.001, beta=0.001, shape=n)
    sigma = pm.Deterministic('sigma', tt.sqrt(1/lmbda))
    # data come from Gaussians with common mean, but n different precisions
    for ii in range(n):
        pm.Normal('x%d' % ii, mu=mu, tau=lmbda[ii], observed=xdata[ii])
    # instantiate sampler
    stepFunc = pm.HamiltonianMC() # pm.NUTS() isn't working well here..?
    # draw posterior samples (in parallel running chains)
    Nsample = 5000
    Nchains = 4
    traces = pm.sample(Nsample, step=stepFunc, njobs=Nchains)

plotVars = ('mu','sigma','lambda')
ax = pm.traceplot(traces, vars=plotVars, combined=False)

#%% separately plot each sigma[i] (sigma consists of Nchains x Nsample x n)
sigma = traces.get_values('sigma', combine=False, burn=1000)
sigma = np.squeeze(sigma)
#sigma = sigma[:,burnin:,:]
axs = plt.subplots(n, 2, squeeze=False, figsize=(10, n*2))
for ii in range(n):
    chains = sigma[:,:,ii].transpose()
Exemple #9
0
    def InitPyMCSampling(self, **kwargs):
        '''
        PyMC3 initialisation: Sampler
            N_tune :
            N_chains :
            N_cores :
            IsProgressbar :
            Sampler_Name :
        '''
        #Checking if all the necessary stuff is loaded
        if not self.VarNames:
            self.InitPar(kwargs["ParFile"])
        try:
            self.Cov[0][0]
        except TypeError:
            self.SetCovMatrix(Scale=1.2)
        if not self.basic_model:
            self.InitPyMC()

        # Further initialisation
        print(' >> Logging calculation steps in {}'.format(self.log_file_name))
        open(self.log_file_name, 'w+').close()
        with self.basic_model:
            Sampler_Name = kwargs.get("Sampler_Name", "Metropolis")
            N_tune = kwargs.get("N_tune", 0)
            N_chains = kwargs.get("N_chains", 1)
            N_cores = kwargs.get("N_cores", min(4, N_chains))
            IsProgressbar = kwargs.get("IsProgressbar", 1)
            print(
                '\n >> using configuration :  {:12}, N_tune = {}, N_chains = {}, N_cores = {}'
                .format(Sampler_Name, N_tune, N_chains, N_cores))

            self.S = Storage_Container(
                3 * N_chains * len(self.VarNames))  # updating the cach size

            # Setting up the samplers
            #   Calling S = self.Cov[::-1,::-1] is a neccessary hack in order to avoid a problem in the PyMC3 code:
            #   The order of the variables is inverted (by accident?) durint the BlockStep().__init__() (see terminal promts)
            if Sampler_Name == "DEMetropolis":
                step = pm.DEMetropolis(
                    S=self.Cov[::-1, ::-1],
                    proposal_dist=pm.MultivariateNormalProposal)

            elif Sampler_Name == "Metropolis":
                step = pm.Metropolis(
                    S=self.Cov[::-1, ::-1],
                    proposal_dist=pm.MultivariateNormalProposal,
                    blocked=True)

            elif Sampler_Name == "Hamiltonian":
                # the settings for HMC are very tricky. allowing adapt_step_size=True may lead to very small step sizes causing the method to stuck.
                length = max(
                    0.3, 1.5 * np.sqrt(np.sum(np.array(self.STDs)**2))
                )  # this is the length in the parameter-space to travel between two points
                #length = np.sqrt(self.STDs) * np.mean(self.STDs)
                sub_l = length / 7  # setting substeps
                step = pm.HamiltonianMC(scaling=self.Cov[::-1, ::-1],
                                        adapt_step_size=0,
                                        step_scale=sub_l,
                                        path_length=length,
                                        is_cov=True)

                self.step.adapt_step_size = False  # workaround for PyMC3 bug ( 'adapt_step_size= 0' is ignored)

                print(
                    ' >> Hamiltonian settings: {:7.4f} / {:7.4f}  = {:4} substeps between points'
                    .format(length, sub_l / (len(self.STDs)**0.25),
                            int(length / (sub_l / (len(self.STDs)**0.25)))))

            else:
                print(
                    ' >> Unknown Sampler_Name = {:20}, Using Metropolis instead'
                    .format(Sampler_Name))
                step = pm.Metropolis(
                    S=self.Cov[::-1, ::-1],
                    proposal_dist=pm.MultivariateNormalProposal,
                    blocked=True)

            self.Custom_sample_args = {
                "step": step,
                "progressbar": IsProgressbar,
                "chains": N_chains,
                "cores": N_cores,
                "tune": N_tune,
                #"parallelize" : True,
            }

        self.trace = None
        self.Prev_End = None
def main():
    '''
    This python function is a wrapper used to fit behavioral models to data.

    Examples of how to call it:

    # Fitting Main Model to Exp 1
    python fit_model_to_dataset.py --modelname "11" --exp 1 --steps 2000 --steps_tune 100 --covariate Bi3itemCDM --seed 3

    # Fitting Main Model to Exp 2
    python fit_model_to_dataset.py --modelname "11" --exp 2 --steps 2000 --steps_tune 100 --covariate Bi3itemCDM --seed 3

    # Fitting Other Models
    python fit_model_to_dataset.py --modelname "1" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "2" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "3" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "4" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "5" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "6" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "7" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "8" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "9" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "10" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "11" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "12" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3
    python fit_model_to_dataset.py --modelname "13" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3

    # Interaction model
    python fit_model_to_dataset.py --modelname "11trip" --exp 1 --steps 1000 --steps_tune 100 --covariate Bi3itemCDM --seed 3

    '''

    parser = argparse.ArgumentParser()
    parser.add_argument('--seed', type=int, default=4)
    parser.add_argument('--modelname', '-m', type=str, default=None)
    parser.add_argument('--steps', '-st', type=int, default=1000)
    parser.add_argument('--steps_tune', '-stt', type=int, default=100)
    parser.add_argument('--covariate', '-c', type=str, default='None')
    parser.add_argument('--exp', '-e', type=int, default=1)
    parser.add_argument('--hierarchical', '-hh', type=str, default='True')
    parser.add_argument('--task', '-tt', type=str, default='both')
    parser.add_argument('--subset', '-sub', type=str, default='all')
    parser.add_argument('--covariatemask', '-cm', type=str, default='None')

    args = parser.parse_args()
    print(args.modelname)
    print(args.steps)
    print(args.steps_tune)
    print(args.exp)
    print(args.hierarchical)
    print(args.subset)
    print(args.covariatemask)
    args.hierarchical = eval(args.hierarchical)

    if args.steps > 500:
        save_state_variables = False
    else:
        save_state_variables = False

    B_max = 10
    nonlinear_indicator = 0  # mag diff scaled

    if args.task == 'both':

        if args.modelname == '1':
            # Model 1 #
            import models_1_flex as model_specific
            params = [
                'lr_baseline',
                'lr_stabvol',
                'Gamma_baseline',
                'Gamma_stabvol',
                'Binv_baseline',
                'Binv_stabvol',
                'lr_rewpain',
                'lr_rewpain_stabvol',
                'Gamma_rewpain',
                'Gamma_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '2':
            # Model 2 #
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline',
                'lr_stabvol',
                'Amix_baseline',
                'Amix_stabvol',
                'Binv_baseline',
                'Binv_stabvol',
                'lr_rewpain',
                'lr_rewpain_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '3':
            # Model 3 #
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline',
                'lr_goodbad',
                'lr_stabvol',
                'lr_goodbad_stabvol',
                'Amix_baseline',
                'Amix_stabvol',
                'Binv_baseline',
                'Binv_stabvol',
                'lr_rewpain',
                'lr_rewpain_goodbad',
                'lr_rewpain_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '4':
            # Model 4 #
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline',
                'lr_stabvol',
                'Amix_baseline',
                'Amix_goodbad',
                'Amix_stabvol',
                'Amix_goodbad_stabvol',
                'Binv_baseline',
                'Binv_goodbad',
                'Binv_stabvol',
                'Binv_goodbad_stabvol',
                'lr_rewpain',
                'lr_rewpain_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_goodbad',
                'Amix_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_goodbad',
                'Binv_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '5':
            # Model 5 #
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline',
                'lr_goodbad',
                'lr_stabvol',
                'lr_goodbad_stabvol',
                'Amix_baseline',
                'Amix_goodbad',
                'Amix_stabvol',
                'Amix_goodbad_stabvol',
                'Binv_baseline',
                'Binv_goodbad',
                'Binv_stabvol',
                'Binv_goodbad_stabvol',
                'lr_rewpain',
                'lr_rewpain_goodbad',
                'lr_rewpain_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_goodbad',
                'Amix_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_goodbad',
                'Binv_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '6':
            # Model 6 #
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline',
                'lr_goodbad',
                'lr_stabvol',
                'lr_goodbad_stabvol',
                'Amix_baseline',
                'Amix_goodbad',
                'Amix_stabvol',
                'Amix_goodbad_stabvol',
                'Binv_baseline',
                'Binv_goodbad',
                'Binv_stabvol',
                'Binv_goodbad_stabvol',
                'lr_rewpain',
                'lr_rewpain_goodbad',
                'lr_rewpain_stabvol',
                'lr_rewpain_goodbad_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_goodbad',
                'Amix_rewpain_stabvol',
                'Amix_rewpain_goodbad_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_goodbad',
                'Binv_rewpain_stabvol',
                'Binv_rewpain_goodbad_stabvol',
            ]
            B_max = 10

        if args.modelname == '7':
            # Model 7 #
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline', 'lr_goodbad', 'lr_stabvol',
                'lr_goodbad_stabvol', 'Amix_baseline', 'Amix_goodbad',
                'Amix_stabvol', 'Amix_goodbad_stabvol', 'Binv_baseline',
                'Binv_goodbad', 'Binv_stabvol', 'Binv_goodbad_stabvol',
                'mag_baseline', 'lr_rewpain', 'lr_rewpain_goodbad',
                'lr_rewpain_stabvol', 'Amix_rewpain', 'Amix_rewpain_goodbad',
                'Amix_rewpain_stabvol', 'Binv_rewpain', 'Binv_rewpain_goodbad',
                'Binv_rewpain_stabvol', 'mag_rewpain'
            ]
            B_max = 10

        if args.modelname == '8':
            # Model 8 #
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline', 'lr_goodbad', 'lr_stabvol',
                'lr_goodbad_stabvol', 'Amix_baseline', 'Amix_goodbad',
                'Amix_stabvol', 'Amix_goodbad_stabvol', 'Binv_baseline',
                'Binv_goodbad', 'Binv_stabvol', 'Binv_goodbad_stabvol',
                'mag_baseline', 'eps_baseline', 'lr_rewpain',
                'lr_rewpain_goodbad', 'lr_rewpain_stabvol', 'Amix_rewpain',
                'Amix_rewpain_goodbad', 'Amix_rewpain_stabvol', 'Binv_rewpain',
                'Binv_rewpain_goodbad', 'Binv_rewpain_stabvol', 'mag_rewpain',
                'eps_rewpain'
            ]
            B_max = 10

        if args.modelname == '9':
            # Model 9 #
            import models_9_12 as model_specific
            params = [
                'lr_baseline',
                'lr_goodbad',
                'lr_stabvol',
                'lr_goodbad_stabvol',
                'Amix_baseline',
                'Amix_goodbad',
                'Amix_stabvol',
                'Amix_goodbad_stabvol',
                'Binv_baseline',
                'Binv_goodbad',
                'Binv_stabvol',
                'Binv_goodbad_stabvol',
                'mag_baseline',
                'decay_baseline',
                'decay_stabvol',
                'lr_rewpain',
                'lr_rewpain_goodbad',
                'lr_rewpain_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_goodbad',
                'Amix_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_goodbad',
                'Binv_rewpain_stabvol',
                'mag_rewpain',
                'decay_rewpain',
                'decay_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '10':
            # Model 10 #
            import models_10_13 as model_specific
            params = [
                'lr_baseline',
                'lr_goodbad',
                'lr_stabvol',
                'lr_goodbad_stabvol',
                'Amix_baseline',
                'Amix_goodbad',
                'Amix_stabvol',
                'Amix_goodbad_stabvol',
                'Binv_baseline',
                'Binv_goodbad',
                'Binv_stabvol',
                'Binv_goodbad_stabvol',
                'mag_baseline',
                'decay_baseline',
                'decay_stabvol',
                'lr_rewpain',
                'lr_rewpain_goodbad',
                'lr_rewpain_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_goodbad',
                'Amix_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_goodbad',
                'Binv_rewpain_stabvol',
                'mag_rewpain',
                'decay_rewpain',
                'decay_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '11':
            # Model 11 MAIN MODEL #
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline', 'lr_goodbad', 'lr_stabvol',
                'lr_goodbad_stabvol', 'lr_c_baseline', 'Amix_baseline',
                'Amix_goodbad', 'Amix_stabvol', 'Amix_goodbad_stabvol',
                'Binv_baseline', 'Binv_goodbad', 'Binv_stabvol',
                'Binv_goodbad_stabvol', 'Bc_baseline', 'mag_baseline',
                'lr_rewpain', 'lr_rewpain_goodbad', 'lr_rewpain_stabvol',
                'Amix_rewpain', 'Amix_rewpain_goodbad', 'Amix_rewpain_stabvol',
                'Binv_rewpain', 'Binv_rewpain_goodbad', 'Binv_rewpain_stabvol',
                'Bc_rewpain', 'mag_rewpain'
            ]
            B_max = 10

        if args.modelname == '12':
            # Model 12 #
            import models_9_12 as model_specific
            params = [
                'lr_baseline',
                'lr_goodbad',
                'lr_stabvol',
                'lr_goodbad_stabvol',
                'lr_c_baseline',
                'Amix_baseline',
                'Amix_goodbad',
                'Amix_stabvol',
                'Amix_goodbad_stabvol',
                'Binv_baseline',
                'Binv_goodbad',
                'Binv_stabvol',
                'Binv_goodbad_stabvol',
                'Bc_baseline',
                'mag_baseline',
                'decay_baseline',
                'decay_stabvol',
                'lr_rewpain',
                'lr_rewpain_goodbad',
                'lr_rewpain_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_goodbad',
                'Amix_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_goodbad',
                'Binv_rewpain_stabvol',
                'Bc_rewpain',
                'mag_rewpain',
                'decay_rewpain',
                'decay_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '13':
            # Model 13 #
            import models_10_13 as model_specific
            params = [
                'lr_baseline',
                'lr_goodbad',
                'lr_stabvol',
                'lr_goodbad_stabvol',
                'lr_c_baseline',
                'Amix_baseline',
                'Amix_goodbad',
                'Amix_stabvol',
                'Amix_goodbad_stabvol',
                'Binv_baseline',
                'Binv_goodbad',
                'Binv_stabvol',
                'Binv_goodbad_stabvol',
                'Bc_baseline',
                'mag_baseline',
                'decay_baseline',
                'decay_stabvol',
                'lr_rewpain',
                'lr_rewpain_goodbad',
                'lr_rewpain_stabvol',
                'Amix_rewpain',
                'Amix_rewpain_goodbad',
                'Amix_rewpain_stabvol',
                'Binv_rewpain',
                'Binv_rewpain_goodbad',
                'Binv_rewpain_stabvol',
                'Bc_rewpain',
                'mag_rewpain',
                'decay_rewpain',
                'decay_rewpain_stabvol',
            ]
            B_max = 10

        if args.modelname == '11trip':
            # Model 11 with triple interaction
            import models_2thr9_11 as model_specific
            params = [
                'lr_baseline', 'lr_goodbad', 'lr_stabvol',
                'lr_goodbad_stabvol', 'lr_c_baseline', 'Amix_baseline',
                'Amix_goodbad', 'Amix_stabvol', 'Amix_goodbad_stabvol',
                'Binv_baseline', 'Binv_goodbad', 'Binv_stabvol',
                'Binv_goodbad_stabvol', 'Bc_baseline', 'mag_baseline',
                'lr_rewpain', 'lr_rewpain_goodbad', 'lr_rewpain_stabvol',
                'lr_rewpain_goodbad_stabvol', 'Amix_rewpain',
                'Amix_rewpain_goodbad', 'Amix_rewpain_stabvol', 'Binv_rewpain',
                'Binv_rewpain_goodbad', 'Binv_rewpain_stabvol', 'Bc_rewpain',
                'mag_rewpain'
            ]
            B_max = 10

    # load data
    if args.exp == 1:
        dftmp = pd.read_csv('../data/participant_table_exp1.csv')
        data = get_data(dftmp)
    else:
        dftmp = pd.read_csv('../data/participant_table_exp2.csv')
        data = get_data_online(dftmp)

    u_covariate_mask = None
    mask_name = ''

    if args.task == 'both':

        if args.subset == 'all':

            if args.exp == 1:
                includes_subjs_with_one_task = True

                # prepare for model code
                subj_indices = slice(0, 157)
                Nboth = data['Nboth']

                Y = {}
                Y['participants_choice'] = data[
                    'participants_choice'][:, subj_indices]

                X = {}
                for var in [
                        'outcomes_c_flipped', 'mag_1_c', 'mag_0_c', 'stabvol',
                        'rewpain'
                ]:
                    X[var] = data[var][:, subj_indices]
                X['NN'] = X[var].shape[1]
                X['Nboth'] = data['Nboth']
                X['Nrewonly'] = data['Nrewonly']
                X['Npainonly'] = data['Npainonly']

                C = {}
                for stem in [
                        'Bi1item_w_j_scaled', 'Bi2item_w_j_scaled',
                        'Bi3item_w_j_scaled', 'PSWQ_scaled_residG',
                        'MASQAA_scaled_residG', 'MASQAD_scaled_residG',
                        'BDI_scaled_residG', 'STAIanx_scaled_residG',
                        'STAI_scaled_residG', 'PSWQ_scaled', 'MASQAA_scaled',
                        'MASQAD_scaled', 'BDI_scaled', 'STAIanx_scaled',
                        'STAI_scaled'
                ]:
                    for tail in ['both', 'pain_only', 'rew_only']:
                        C[stem + '_' + tail] = data[stem + '_' + tail]
            elif args.exp == 2:
                includes_subjs_with_one_task = False

                # prepare for model code
                subj_indices = slice(
                    0, data['participants_choice'].shape[1]
                )  #list(np.where(np.array(data['MID_combined'])=='cb100')[0])
                Nboth = data['Nboth']

                Y = {}
                Y['participants_choice'] = data[
                    'participants_choice'][:, subj_indices]

                X = {}
                for var in [
                        'outcomes_c_flipped', 'mag_1_c', 'mag_0_c', 'stabvol',
                        'rewpain'
                ]:
                    X[var] = data[var][:, subj_indices]
                X['NN'] = X[var].shape[1]
                X['Nboth'] = data['Nboth']

                C = {}
                C['STAI_scaled_both'] = data['STAI_scaled_both']
                for trait in [
                        'Bi1item_w_j_scaled', 'Bi2item_w_j_scaled',
                        'Bi3item_w_j_scaled', 'PSWQ_scaled_residG',
                        'MASQAA_scaled_residG', 'MASQAD_scaled_residG',
                        'BDI_scaled_residG', 'STAIanx_scaled_residG',
                        'STAI_scaled_residG', 'PSWQ_scaled', 'MASQAA_scaled',
                        'MASQAD_scaled', 'BDI_scaled', 'STAIanx_scaled',
                        'STAI_scaled'
                ]:
                    C[trait + '_both'] = np.array(list(data[trait + '_both']))

        # Create base model (i.e. prior), embedding factors into the priors
        idx_first_reward_pain = np.min(
            [pi for (pi, p) in enumerate(params) if 'rew' in p])
        print('compiling base model')
        model = create_model_base(
            X,
            Y,
            C,  # Changed here
            params=params,
            K=len(params),
            Konetask=idx_first_reward_pain,
            rew_slice=slice(0, idx_first_reward_pain),
            pain_slice=slice(0, idx_first_reward_pain),
            split_by_reward=True,
            includes_subjs_with_one_task=includes_subjs_with_one_task,
            covariate=args.covariate,
            hierarchical=args.hierarchical,
            covv='diag',
            coding='deviance',
            u_covariate_mask=u_covariate_mask)

    # Create likelihood model
    print('compiling specific model')
    model = model_specific.combined_prior_model_to_choice_model(
        X,
        Y,
        param_names=params,
        model=model,
        save_state_variables=save_state_variables,
        B_max=B_max,
        nonlinear_indicator=nonlinear_indicator)

    # Save name
    print('saving')
    now = datetime.datetime.now()
    filename='model='+args.modelname+'_covariate='+args.covariate+'_date='+str(now.year)+\
    '_'+str(now.month)+'_'+str(now.day)+'_samples='+str(args.steps)+'_seed='+str(args.seed)+'_exp='+str(args.exp)

    # Save empty placeholder
    with open('./model_fits/' + filename + '.pkl', "wb") as buff:
        pickle.dump({}, buff)

    # Fitting model
    with model:

        MAP = {}

        step = pm.HamiltonianMC(target_accept=.95)

        print('sampling ...')
        trace = pm.sample(args.steps,
                          step=step,
                          chains=4,
                          tune=args.steps_tune,
                          random_seed=args.seed)  # cores = 4

        ppc = pm.sample_ppc(trace, 500)

    with open('./model_fits/' + filename + '.pkl', "wb") as buff:
        pickle.dump({
            'model': model,
            'trace': trace,
            'ppc': ppc,
            'MAP': MAP
        }, buff)
#X_train = (X_train - X_train.mean(axis=0)) / X_train.std(axis=0)
#X_test = (X_test - X_train.mean(axis=0)) / X_train.std(axis=0)

#X_train = (X_train - X_train.min(axis=0)) / (X_train.max(axis=0)- X_train.min(axis=0))
#X_test = (X_test - X_train.min(axis=0)) / (X_train.max(axis=0)- X_train.min(axis=0))

n_iter = 100
step_size = 0.1
dim = X_train.shape[1]
classes = len(Y_train[0].unique())
print("model:")
with pm.Model() as iris_model:
	sd = pm.Gamma('sd', alpha=10,beta=1)
	alfa = pm.Normal('alfa', mu=0, sd=sd, shape=classes)
	beta = pm.Normal('beta', mu=0, sd=sd, shape=(dim,classes))
	mu = alfa + pm.math.dot(X_train, beta)
	theta = pm.Deterministic('theta', tt.nnet.softmax(mu))
	yl = pm.Categorical('yl', p=theta, observed=Y_train)
	step = pm.HamiltonianMC(step_scale=step_size,path_length=1.0,is_cov=True)
	iris_trace = pm.sample(n_iter,step)

print("post_pred:")

with iris_model:
    post_pred = pm.sample_ppc(iris_trace, samples=10)


traceplot(iris_trace)
plt.show()
plt.close()
def fitbayesianmodel(bayesian_model,
                     ytrain,
                     method=1,
                     n_=3000,
                     MAP=True,
                     chains=1,
                     jobs=1,
                     star='rrlyr',
                     classifier='RL',
                     PCA=False):
    print('chains: ', chains)
    print('jobs: ', jobs)
    if method == 4:
        print('------- Slice Sampling--------')
        with bayesian_model as model:
            map = 0
            step = pm.Slice()
            trace = pm.sample(n_, step=step, njobs=jobs)
        return trace, model, map

    if method == 5:
        print('------- HamiltonianMC--------')
        with bayesian_model as model:
            step = pm.HamiltonianMC()
            trace = pm.sample(n_,
                              chain=chains,
                              tune=2000,
                              njobs=jobs,
                              step=step,
                              init=None)
        return trace, model, map
    if method == 6:
        print('------- Default--------')
        with bayesian_model as model:
            map = 0
            trace = pm.sample(n_,
                              chain=chains,
                              njobs=jobs,
                              callbacks=[CheckParametersConvergence()])
        return trace, model, map

    if method == 7:
        print('------- Metropolis--------')
        with bayesian_model as model:
            map = 0
            step = pm.Metropolis()
            trace = pm.sample(n_,
                              step=step,
                              chain=chains,
                              njobs=jobs,
                              callbacks=[CheckParametersConvergence()],
                              tune=1000,
                              step_size=100)
            pm.traceplot(trace)
            name = star + '_' + classifier + '_PCA_' + str(PCA) + '2.png'
            plt.savefig(name)
            plt.clf()

        return trace, model, map

    if method == 8:
        print('------- NUTS--------')

        with bayesian_model as model:
            stds = np.ones(model.ndim)
            for _ in range(5):
                args = {'is_cov': True}
                trace = pm.sample(500,
                                  tune=1000,
                                  chains=1,
                                  init='advi+adapt_diag_grad',
                                  nuts_kwargs=args)
                samples = [model.dict_to_array(p) for p in trace]
                stds = np.array(samples).std(axis=0)
            traces = []
            for i in range(1):
                step = pm.NUTS(scaling=stds**2, is_cov=True,
                               target_accept=0.8)  #
                start = trace[-10 * i]

                trace_ = pm.sample(n_,
                                   cores=4,
                                   step=step,
                                   tune=1000,
                                   chain=chains,
                                   njobs=1,
                                   init='advi+adapt_diag_grad',
                                   start=start,
                                   callbacks=[CheckParametersConvergence()])
            trace = trace_
            map = 0
        return trace, model, map
y = [5, 1, 5, 14, 3, 19, 1, 1, 4, 22]  # Number of failure
t = [94, 16, 63, 126, 5, 31, 1, 1, 2, 10]  # Observation time length

# Define hyperparameters
alpha = 1.8
gam = 0.01
delta = 1.0
Nobs = len(y)
''' Model '''
HansModel = pm.Model()
with HansModel:
    beta = pm.Gamma('beta_est', alpha=delta, beta=gam)
    lamb = pm.Gamma('lamb_est', alpha=alpha, beta=beta, shape=Nobs)

    # Model param
    poi_mu = t * lamb

    # likelihood
    data = pm.Poisson('data', mu=poi_mu, observed=y)
''' Model fitting'''
with HansModel:
    start = pm.find_MAP(fmin=sp.optimize.fmin_powell)
    Method = pm.HamiltonianMC(vars=[beta, lamb])
    trace = pm.sample(10000, step=Method, start=start)

burnin = 5000
pm.traceplot(trace[burnin:])
print(pm.summary(trace[burnin:]))
plt.show()
Exemple #14
0
 def train_model(self, method, prior, res=22.):
     test_model = self.get_model(prior, test=True)
     with test_model:
         # True step size is scaled down
         # by (1/n)^(1/4), so that a res of 20º -> 15º sampling in 3-d
         # therefore, we scale UP by (1/n)^(1/4) so when we specify
         # 20º, we get 20º!
         step_scale = res * np.pi / 180 / (1 / self.n_d)**0.25
         if method == 'MH':
             step = pm.Metropolis()
             test_trace = pm.sample(step=step,
                                    cores=1,
                                    chains=1,
                                    draws=1,
                                    tune=8000,
                                    discard_tuned_samples=False)
             cov = pm.trace_cov(test_trace)
             self.write_(cov, method, prior)
             return
         elif method == 'HMC':
             step = pm.HamiltonianMC(step_scale=step_scale)
             test_trace = pm.sample(step=step,
                                    cores=1,
                                    chains=1,
                                    draws=1,
                                    tune=8000,
                                    discard_tuned_samples=False)
             cov = pm.trace_cov(test_trace)
             self.write_(cov, method, prior)
             return
         elif method == 'NUTS':
             step = pm.NUTS(step_scale=step_scale,
                            adapt_step_size=False,
                            target_accept=0.8)
             test_trace = pm.sample(step=step,
                                    cores=1,
                                    chains=1,
                                    draws=1,
                                    tune=8000,
                                    discard_tuned_samples=False)
             cov = pm.trace_cov(test_trace)
             accept = test_trace.get_sampler_stats("mean_tree_accept")
             print("Mean acceptance for method", method, "and prior", prior,
                   "is", accept.mean())
             print("Step size is",
                   test_trace.get_sampler_stats("step_size_bar"))
             t_a = accept.mean() if accept.mean() > 0.5 else 0.8
     training_model = self.get_model(prior)
     # Method is NUTS
     # NUTS tuning will be a little more intense:
     with training_model:
         step = pm.NUTS(scaling=cov,
                        is_cov=True,
                        step_scale=step_scale,
                        adapt_step_size=False,
                        target_accept=t_a)
         train_trace = pm.sample(step=step,
                                 cores=1,
                                 chains=1,
                                 draws=1,
                                 tune=500,
                                 discard_tuned_samples=False)
         cov = pm.trace_cov(train_trace)
     self.write_(cov, method, prior)
Exemple #15
0
def main():
    '''Example:
    python fit_model_to_generated_dataset.py --modelname "11" --exp 1 --steps 1000 --steps_tune 100 --seed 3

    '''
    parser = argparse.ArgumentParser()
    parser.add_argument('--seed', '-se', type=int, default=3)
    parser.add_argument('--modelname', '-m', type=str, default=None)
    parser.add_argument('--steps', '-st', type=int, default=1000)
    parser.add_argument('--steps_tune', '-stt', type=int, default=100)
    parser.add_argument('--task', '-tt', type=str, default='both')
    parser.add_argument('--exp', '-e', type=int, default=1)

    args = parser.parse_args()
    print(args.steps)
    print(args.steps_tune)
    print(args.seed)
    print(type(args.seed))
    print(args.exp)

    # load behavioral data
    if args.exp == 1:
        dftmp = pd.read_csv('../data/participant_table_exp1.csv')
        data = get_data(dftmp)
    else:
        dftmp = pd.read_csv('../data/participant_table_exp2.csv')
        data = get_data_online(dftmp)

    # set up data for model fitting (extract relevant behavioral data)
    X = {}
    Y = {}
    C = {}
    subj_indices = slice(0, 157)
    subj_indices_86 = slice(0, 86)
    X['NN'] = data['outcomes_c_flipped'].shape[1]
    X['Nboth'] = data['Nboth']
    X['Nrewonly'] = data['Nrewonly']
    X['Npainonly'] = data['Npainonly']
    subj_indices_both = slice(0, X['Nboth'])
    subj_indices_rew_only = slice(0, X['Nrewonly'])
    subj_indices_pain_only = slice(0, X['Npainonly'])

    Y['participants_choice'] = data['participants_choice'][:, subj_indices]
    for var in [
            'outcomes_c_flipped', 'mag_1_c', 'mag_0_c', 'stabvol', 'rewpain'
    ]:
        X[var] = data[var][:, subj_indices]

    C['Bi1item_w_j_scaled_both'] = data['Bi1item_w_j_scaled_both'][
        subj_indices_both]
    C['Bi2item_w_j_scaled_both'] = data['Bi2item_w_j_scaled_both'][
        subj_indices_both]
    C['Bi3item_w_j_scaled_both'] = data['Bi3item_w_j_scaled_both'][
        subj_indices_both]
    C['Bi1item_w_j_scaled_rew_only'] = data['Bi1item_w_j_scaled_rew_only'][
        subj_indices_rew_only]
    C['Bi2item_w_j_scaled_rew_only'] = data['Bi2item_w_j_scaled_rew_only'][
        subj_indices_rew_only]
    C['Bi3item_w_j_scaled_rew_only'] = data['Bi3item_w_j_scaled_rew_only'][
        subj_indices_rew_only]
    C['Bi1item_w_j_scaled_pain_only'] = data['Bi1item_w_j_scaled_pain_only'][
        subj_indices_pain_only]
    C['Bi2item_w_j_scaled_pain_only'] = data['Bi2item_w_j_scaled_pain_only'][
        subj_indices_pain_only]
    C['Bi3item_w_j_scaled_pain_only'] = data['Bi3item_w_j_scaled_pain_only'][
        subj_indices_pain_only]

    # load estimated parameters from actual dataset
    if args.modelname == '11':

        # some specifications for fitting
        covariate = 'Bi3itemCDM'
        hierarchical = True
        B_max = 10
        import models_2thr9_11 as model_specific

        # load previous fit / parameters
        # this file path might need to be changed, depending on how the main model was run.
        model_name = 'model=11_covariate=Bi3itemCDM_date=2021_1_5_samples=1000_seed=3_exp=1.pkl'
        with open('../fitting_behavioral_model/model_fits/' + model_name,
                  "rb") as buff:
            model_output = pickle.load(buff)
        trace = model_output['trace']
        ppc = model_output['ppc']
        model = model_output['model']
        params = model.params

        # extract previous parameters, these are the ground truth parameters that we want to recover
        Theta_est = trace['Theta'].mean(axis=0)

        #subset participants (not implemented right now)
        Theta_est = Theta_est[subj_indices, :]

    if args.modelname == '11trip':

        covariate = 'Bi3itemCDM'
        hierarchical = True
        B_max = 10
        import models_2thr9_11 as model_specific

        # load previous fit / parameters
        model_name = 'model=11trip_covariate=Bi3itemCDM_date=2021_1_5_samples=1000_seed=3_exp=1.pkl'
        with open('../fitting_behavioral_model/model_fits/' + model_name,
                  "rb") as buff:
            model_output = pickle.load(buff)
        trace = model_output['trace']
        ppc = model_output['ppc']
        model = model_output['model']
        params = model.params

        # extract previous parameters, these are the ground truth parameters that we want to recover
        Theta_est = Theta_est[subj_indices, :]

    # specify generative model
    f = model_specific.create_gen_choice_model(X,
                                               Y,
                                               param_names=params,
                                               B_max=B_max,
                                               seed=int(args.seed))

    # generate new data using ground truth parameters
    gen_choice, gen_outcome_valence, *_ = f(Theta_est)

    # replace participants choices with generative choices
    Y_gen = {}
    Y_gen['participants_choice'] = gen_choice
    X_gen = copy.deepcopy(X)
    X_gen[
        'outcome_valence'] = gen_outcome_valence  # only used for visualization

    idx_first_reward_pain = np.min(
        [pi for (pi, p) in enumerate(params) if 'rew' in p])

    # compile base model
    model = create_model_base(
        X_gen,
        Y_gen,
        C,
        params=params,
        K=len(params),
        Konetask=idx_first_reward_pain,
        rew_slice=slice(0, idx_first_reward_pain),
        pain_slice=slice(0, idx_first_reward_pain),
        split_by_reward=True,
        includes_subjs_with_one_task=True,
        covariate=covariate,
        hierarchical=hierarchical,
        covv='diag',
        coding='deviance',
    )

    # compile specific model
    model = model_specific.combined_prior_model_to_choice_model(
        X_gen,
        Y_gen,
        param_names=params,
        model=model,
        save_state_variables=False,
        B_max=B_max)

    # save name
    now = datetime.datetime.now()
    filename='model='+args.modelname+'_date='+str(now.year)+\
    '_'+str(now.month)+'_'+str(now.day)+'_samples='+str(args.steps)+'_seed='+str(args.seed)+'_exp='+str(args.exp)

    # save empty placeholder
    with open('./model_fits/' + filename + '.pkl', "wb") as buff:
        print('saving placeholder')
        pickle.dump({}, buff)

    # sample (fit)
    with model:
        print('sampling from posterior')
        MAP = {}
        step = pm.HamiltonianMC(target_accept=.95)
        trace = pm.sample(args.steps,
                          step=step,
                          chains=4,
                          tune=args.steps_tune,
                          random_seed=args.seed)
        ppc = pm.sample_ppc(trace, 500)

    if hierarchical:
        hier = 'hier'
    else:
        hier = 'nonhier'

    # save completed results
    with open('./model_fits/' + filename + '.pkl', "wb") as buff:
        pickle.dump(
            {
                'model': model,
                'trace': trace,
                'ppc': ppc,
                'MAP': MAP,
                'Theta_est': Theta_est,
                'X_gen': X_gen,
                'Y_gen': Y_gen,
                'C': C,
                'subj_indices': subj_indices,
                'subj_indices_both': subj_indices_both,
                'subj_indices_rew_only': subj_indices_rew_only,
                'subj_indices_pain_only': subj_indices_pain_only
            }, buff)
Exemple #16
0
def NUTS_run(samp_obj,
             T,
             nsamples=1000,
             tune=200,
             nchains=1,
             ncpus=4,
             hpc=False,
             method='NUTS',
             prior='umvt'):
    """
    Run the sampling job according to the given protocol (default: NUTS)
    """
    # Define the model and model parameters
    beta = 1 / (constants.kB * T) * constants.E_h
    logpE = lambda E: -E  # E must be dimensionless
    logp, Z, tmodes = generate_logprior(samp_obj, T, prior)
    syms = np.array([mode.get_symmetry_number() for mode in tmodes])
    Ks = np.array([beta * mode.get_spline_fn()(0, 2) for mode in tmodes])
    variances = get_initial_mass_matrix(tmodes, T)
    #variances = 1/Ks
    geom = Geometry(samp_obj, samp_obj.torsion_internal, syms)
    energy_fn = Energy(geom, beta)
    n_d = len(tmodes)
    resolution = 10.0  #degrees
    step_scale = resolution * (np.pi / 180) / (1 / n_d)**(0.25)
    L = 2 * np.pi / syms
    center_mod = lambda x: \
        ((x.transpose() % L) - L*((x.transpose() % L) // (L/2))).transpose()
    if not hpc:
        with pm.Model() as model:
            xy = pmx.UnitDisk('xy', shape=(2, n_d))
            x = pm.Deterministic('x', tt.arctan2(xy[1], xy[0]))
            phi = pm.DensityDist('phi', logp, observed=x)
            bE = pm.Deterministic('bE', -logp(phi)+\
                    (np.random.rand()-0.5)/10000)
            DeltaE = bE - (-logp(phi))
            alpha = pm.Deterministic('a', np.exp(-DeltaE))
            E_obs = pm.Potential('E_obs', logpE(DeltaE))
            #E_obs = pm.DensityDist('E_obs', lambda E: logpE(E),
            #        observed={'E':DeltaE})
        with model:
            print("Method is", method)
            if method == 'NUTS':
                pass
                #nuts_kwargs = dict(target_accept=0.5,
                #        step_scale=step_scale, early_max_treedepth=5,
                #        max_treedepth=6, adapt_step_size=False)
                #step = pm.NUTS(scaling=variances, is_cov=True,
                #        **nuts_kwargs)
                step = pm.NUTS(target_accept=0.6,
                               step_scale=step_scale,
                               adapt_step_size=False)
                trace = pm.sample(1000, cores=1, target_accept=0.6)
            elif method == 'HMC':
                return
                hmc_kwargs = dict(target_accept=0.5, step_scale=step_scale)
                step = pm.HamiltonianMC(scaling=variances,
                                        is_cov=True,
                                        **hmc_kwargs)
                trace = pm.sample(nsamples,
                                  tune=tune,
                                  step=step,
                                  chains=nchains,
                                  cores=1,
                                  discard_tuned_samples=False)
            elif method == 'MH':
                mh_kwargs = dict(S=variances)
                step = pm.Metropolis(**mh_kwargs)
                #step = pm.NUTS(
                #        target_accept=0.65, scaling=variances, is_cov=True,
                #        step_scale=step_scale, early_max_treedepth=6,
                #        max_treedepth=6, adapt_step_size=False)
                trace = pm.sample(nsamples,
                                  tune=tune,
                                  step=step,
                                  chains=nchains,
                                  cores=1,
                                  discard_tuned_samples=False)
    else:
        with pm.Model() as model:
            #x = pm.DensityDist('x', logp, shape=n_d, testval=np.random.rand(n_d)*2*np.pi)
            x = pm.DensityDist('x',
                               logp,
                               shape=n_d,
                               testval=get_initial_values(tmodes, T))
            xmod = pm.Deterministic('xmod', center_mod(x))
            bE = pm.Deterministic('bE', energy_fn(x))
            DeltaE = (bE) - (-logp(x))
            alpha = pm.Deterministic('a', np.exp(-DeltaE))
            E_obs = pm.DensityDist('E_obs',
                                   lambda E: logpE(E),
                                   observed={'E': DeltaE})
        with model:
            if method == 'NUTS':
                nuts_kwargs = dict(target_accept=0.5,
                                   step_scale=step_scale,
                                   early_max_treedepth=5,
                                   max_treedepth=6,
                                   adapt_step_size=False)
                step = pm.NUTS(scaling=variances, is_cov=True, **nuts_kwargs)
            elif method == 'HMC':
                step = pm.HamiltonianMC()
                pass
            elif method == 'MH':
                step = pm.Metropolis()
                pass
            trace = pm.sample(nsamples,
                              tune=tune,
                              step=step,
                              chains=nchains,
                              cores=1)
            #<-  Indent the following lines to here after debugging:
    pickle_the_model(trace,
                     model,
                     Z,
                     samp_obj,
                     geom,
                     tmodes,
                     ncpus,
                     nchains,
                     tune,
                     nsamples,
                     T,
                     prior=prior,
                     method=method)
    #thermo_obj = MCThermoJob(trace, T, sampT=T, samp_obj=samp_obj, model=model)
    #a,b = thermo_obj.execute()
    if not hpc:
        plot_MC_torsion_result(trace, tmodes, T)
Exemple #17
0
            n=1,
            p=[type1success, type2success, type3success, type4success],
            shape=4).random()
        indexCancer = np.where(cancerType == 1)[0]
        cancerTypeGeneratedSample = cancerTypeValues[indexCancer[0]]
        age = pm.Normal('age',
                        mu=meanValues[-1],
                        sigma=(maxAge - meanValues[-1])).random()
        cov = np.cov(DataAccessProcessedSymptopmsOnly.T)  #
        featurenumber = 41

        x = pm.math.stack(cancerTypeGeneratedSample, genderType, age)
        allMu = pm.math.concatenate([sympsTheano, x], axis=0)
        test = pm.MvNormal('out', mu=allMu, cov=cov, shape=featurenumber)
        returns = test
        step = pm.HamiltonianMC()
        trace = pm.sample(5000, step=step,
                          chains=2)  #,init='adapt_diag' # #, cores=1, chains=1

    plt.figure()
    # plot the trace file of the algorithm
    traceArray = trace['out']
    pm.traceplot(trace, var_names=['age', 'gender', 'symp1'], compact=False)
    plt.show()
    traceArray = trace['out']

    # plot the covariance fo original and generated data
    covv = np.cov(traceArray.T)
    # plt.show()
    fig = plt.figure()
    ax = sns.heatmap(covv[1:-3, 1:-3], center=0)  #
                        total_size=total_size)

    return model, out


# build BNN
BNN, out = build_model(ann_output)

# run inference with neural_network
print(
    '\nStarting sampling. If it hangs early on (<20 samples) try restarting script or reducing path_length.\n'
)
step = pm.HamiltonianMC(path_length=0.4,
                        adapt_step_size=True,
                        step_scale=0.04,
                        gamma=0.05,
                        k=0.9,
                        t0=1,
                        target_accept=0.9,
                        model=BNN)
trace = pm.sample(n_inf_samples,
                  step=step,
                  model=BNN,
                  chains=1,
                  n_jobs=1,
                  tune=300)

# make predictions - need to update inputs in this way with validation data
ann_input.set_value(X_grid.astype('float32'))
ann_input_per.set_value(X_grid_per.astype('float32'))
init_rbf_pred = np.repeat(init_rbf_orig, X_grid.shape[0], axis=1)
rbf_input.set_value(init_rbf_pred.astype('float32'))
Exemple #19
0
import pymc3.math
import time

#%pdb

basic_model = pymc3.Model()

with basic_model:
    pymc_beta = pymc3.Normal('beta', mu=0, sd=1, shape=D)
    pymc_y = pymc3.Bernoulli('y',
                             p=pymc3.math.sigmoid(pymc3.math.dot(x,
                                                                 pymc_beta)),
                             observed=y)

    step = pymc3.HamiltonianMC(step_scale=step_size * (1. / D)**(-1. / 4),
                               path_length=step_size * n_steps,
                               step_rand=lambda x: x,
                               scaling=np.ones(D))  #, np.float32))
    t0 = time.time()
    #pymc_samples = pymc3.sample(n_iterations, step=step, init=None, njobs=1)
    pymc_samples = pymc3.sample(n_iterations,
                                step=step,
                                init=None,
                                njobs=1,
                                chains=1,
                                tune=0)
pymc_time = time.time() - t0
print('pymc (CPU multithreaded) took %.3f seconds' % pymc_time)

#print 'PyMC3 (CPU, multi-threaded) took %.3f seconds (%.3f Edward time)' % (pymc_time, pymc_time / ed_time)

#plot(pymc_samples.get_values('beta')[:, 0])