Esempio n. 1
0
    weights = np.log(dists.normal(mean=0.0, std=pop[:, 3]).pdf(sse))

    # see if return both weights and predicted vals
    if save_posts:
        return weights, pred
    else:
        return weights


# set up the data
xData = np.array([5.357, 9.861, 5.457, 5.936, 6.161, 6.731])
yData = np.array([0.376, 7.104, 0.489, 1.049, 1.327, 2.077])

# set up the parameters
params = [
    Param(name='a', prior=dists.uniform(-100, 100)),
    Param(name='b', prior=dists.uniform(-100, 100)),
    Param(name='c', prior=dists.uniform(-100, 100)),
    Param(
        name='delta',
        display_name=r'$\mathbf{\delta}$',
        prior=dists.exp(20),
        init_prior=dists.uniform(0, 10),
    ),
]

# set up mod
mod = Model(name='fun',
            params=params,
            like_fun=eval_fun,
            like_args=(xData, yData),
Esempio n. 2
0
def eval_fun(abc, pop, *args):

    res = Parallel(n_jobs=n_jobs)(delayed(eval_prop)(indiv, args[0])
                                  for indiv in pop)

    weights = np.asarray(res)

    if abc._save_posts:
        return weights, None
    else:
        return weights


# set up the parameters
params = [
    Param(name='mu', prior=dists.uniform(-20, 20)),
    Param(name='sd', prior=dists.uniform(0, 20)),
]

burnin = 50
iterations = 500

# set up abc
do_true = True
abc_true = DEMC(params,
                eval_fun,
                eval_args=(do_true, ),
                num_groups=1,
                group_size=25,
                proposal_gen=DE(gamma_best=None, rand_base=True),
                migration_prob=0.0,
Esempio n. 3
0
        # set up the params
        params = [
            Param(name='a',
                  prior=dists.trunc_normal(5.0, 20.0, lower=0.0, upper=50.0)),
            Param(name='b', prior=dists.normal(0.0, 5.0)),
            Param(name='c',
                  prior=dists.trunc_normal(5.0, 10.0, lower=0.0, upper=30.0)),
            Param(name='kappa',
                  prior=dists.normal(0.0, 1.4),
                  transform=dists.invlogit),
            Param(name='beta',
                  prior=dists.normal(0.0, 1.4),
                  transform=dists.invlogit),
            Param(name='alpha',
                  prior=dists.trunc_normal(2.5, 10.0, lower=0.0, upper=30.0)),
            Param(name='t0', prior=dists.uniform(0., min_rt))
        ]
        pnames = [p.name for p in params]
        # instantiate model object
        m = Model('urdm_lca',
                  params=params,
                  like_fun=eval_fun_lca,
                  like_args=(s, ),
                  init_multiplier=4,
                  verbose=True,
                  purify_every=5)

        # set up the run name
        output_name = 'rdm_lca_tcv_both_cb_afrl_mri_subj_' + str(
            int(s)) + '.tgz'
Esempio n. 4
0

def eval_fun(abc, pop, *args):

    res = Parallel(n_jobs=n_jobs)(delayed(eval_prop)(indiv, args[0]) for indiv in pop)

    weights = np.asarray(res)

    if abc._save_posts:
        return weights, None
    else:
        return weights


# set up the parameters
params = [Param(name="mu", prior=dists.uniform(-20, 20)), Param(name="sd", prior=dists.uniform(0, 20))]

burnin = 50
iterations = 500

# set up abc
do_true = True
abc_true = DEMC(
    params,
    eval_fun,
    eval_args=(do_true,),
    num_groups=1,
    group_size=25,
    proposal_gen=DE(gamma_best=None, rand_base=True),
    migration_prob=0.0,
    initial_zeros_ok=False,
Esempio n. 5
0
    ])

    for s in dat.keys():
        # Append a new model, note the use of the hyperpriors
        params = [

            # sig_b is the sigmoid transition point
            #Param(name='sig_b',
            #                prior=dists.uniform(0., 15.0), # change to number of tau stars
            #                ),

            # new item strength
            Param(
                name='alpha',
                display_name=r'$\alpha$',
                prior=dists.uniform(0, 10.0),
                #init_prior=dists.trunc_normal(mean=.25,std=.5,lower=0,upper=5)
            ),
            Param(
                name='nu',
                display_name=r'$\nu$',
                prior=dists.uniform(0, 10.0),
                #init_prior=dists.trunc_normal(mean=.25,std=.5,lower=0,upper=5)
            ),
            # threshold
            Param(
                name='a',
                display_name=r'$a$',
                prior=dists.uniform(0, 10.0),
                #init_prior=dists.trunc_normal(mean=.25,std=.5,lower=0,upper=5)
            ),
Esempio n. 6
0
    # calculate the weight with a normal kernel
    weights = np.log(norm.pdf(sse,scale=pop[:,3]))
    #weights = np.log(norm.pdf(sse,scale=.1))

    # see if return both weights and predicted vals
    if abc._save_posts:
        return weights,pred
    else:
        return weights

# set up the data
xData = np.array([5.357, 9.861, 5.457, 5.936, 6.161, 6.731])
yData = np.array([0.376, 7.104, 0.489, 1.049, 1.327, 2.077])

# set up the parameters
params = [Param(name='a',prior=dists.uniform(-100,100)),
          Param(name='b',prior=dists.uniform(-100,100)),
          Param(name='c',prior=dists.uniform(-100,100)),
          Param(name='delta',display_name=r'$\mathbf{\delta}$',
                prior=dists.exp(20),
                init_prior=dists.uniform(0,10),
                ),
          ]

# set up abc
abc = DEMC(params, eval_fun, eval_args = (xData,yData),
            num_groups=4, group_size=30,
            proposal_gen=DE_LOCAL_TO_BEST(),
            migration_prob=0.1, initial_zeros_ok=False,
            use_priors=True, save_posts=True)
Esempio n. 7
0
        Param(
            name='nu',
            display_name=r'$\nu$',
            prior=dists.trunc_normal(mean=2., std=10., lower=0., upper=10.),
        ),
        Param(name='a',
              display_name=r'$a$',
              prior=dists.trunc_normal(mean=2., std=10., lower=0., upper=10.)),
        Param(name='w',
              display_name=r'$w$',
              prior=dists.normal(mean=0, std=1.4),
              transform=dists.invlogit),
        Param(
            name='t0',
            display_name=r'$t_0$',
            prior=dists.uniform(0, min_RT),
        ),
    ]

    # grab the param names
    pnames = [p.name for p in params]
    # initialize the model
    m = Model(s,
              params=params,
              like_fun=eval_fun,
              like_args=(s, data_sub, pnames),
              num_chains=80,
              init_multiplier=4,
              verbose=True)

    # set number of desired burn-in trials
Esempio n. 8
0
             display_name=r'$\beta$',
             prior=dists.CustomDist(
                 pdf=lambda x: np.exp(-1.5 * np.log(1 + x**2)),
                 rvs=dists.laplace(0, 5).rvs))
# Fixed noise across subjects
# Using a custom Jeffreys' prior
sigma = Param(name='sigma',
              display_name=r'$\sigma$',
              prior=dists.CustomDist(pdf=lambda x: np.exp(-np.log(x)),
                                     rvs=dists.dists.invgamma(1, 1).rvs))

# Hyperprior over intercept using a normal distribution
halpha = HyperPrior('alpha',
                    dists.normal,
                    params=[
                        Param(name='mu', prior=dists.uniform(-50, 50)),
                        Param(name='sig', prior=dists.invgamma(1, 1))
                    ])

# set up the submodels for each participant
smods = []
for j in range(nsubj):
    # Append a new model, note the use of the hyperprior for setting
    # up the intercept param, and the fixed beta and sigma across
    # participants
    smods.append(
        Model(name=str(j),
              params=[
                  Param(name='alpha', display_name=r'$\alpha$', prior=halpha),
                  beta, sigma
              ],