# priors
    intercept = pm.Normal('intercept', mu=0, sd=300)
    slope = pm.Normal('slope', mu=0, sd=300)
    sigma = pm.HalfNormal('sigma', sd=300)
    # likelihood
    mu = pm.Deterministic('mu', intercept + slope * np.log(freq))
    observed_rt = pm.Normal('observed_rt', mu=mu, sd=sigma,\
                            observed=rt)

#with log_freq_model:
#db = Text('./data/log_freq_model_trace')
#trace = pm.sample(draws=5000, trace=db, tune=15000,\
#n_init=200000, njobs=4)

with log_freq_model:
    trace = load('./data/log_freq_model_trace')

mu = trace["mu"]


def generate_log_freq_figure():
    fig, (ax1, ax2) = plt.subplots(ncols=1, nrows=2)
    fig.set_size_inches(5.5, 5.5)
    # plot 1
    ax1.plot(freq, rt, marker='o', linestyle='')
    ax1.plot(freq, mu.mean(axis=0), color='red', linestyle='-')
    ax1.set_title('Observed (blue) \& predicted (red) RTs\
                   against log frequency')
    ax1.set_xlabel('Log frequency (log of \# tokens/1 million words)')
    ax1.set_xscale('log', basex=10)
    ax1.set_ylabel('RTs (s)')
Ejemplo n.º 2
0
    # accuracy likelihood
    odds_reciprocal = tt.exp(-(activation_from_time - threshold) / noise)
    mu_prob = Deterministic('mu_prob', 1 / (1 + odds_reciprocal))
    prob_observed = Normal('prob_observed',
                           mu=mu_prob,
                           sd=0.01,
                           observed=ACCURACY)

#with lex_decision_with_bayes:
#step = pm.SMC(parallel=True)
#trace = pm.sample(draws=5000, step=step, njobs=1, cores=25)

#dump('../data/lex_dec_pyactr_no_imaginal', trace)

with lex_decision_with_bayes:
    trace = load('../data/lex_dec_pyactr_no_imaginal')

pm.diagnostics.gelman_rubin(trace)
pm.traceplot(trace)
plt.savefig('../figures/lex_dec_model_pyactr_no_imaginal_trace.eps')
plt.savefig('../figures/lex_dec_model_pyactr_no_imaginal_trace.png')
plt.savefig('../figures/lex_dec_model_pyactr_no_imaginal_trace.pdf')
#plt.show()

mu_rt = pd.DataFrame(trace['mu_rt']) * 1000
RT = RT * 1000
yerr_rt = [(mu_rt.mean() - mu_rt.quantile(0.025)),
           (mu_rt.quantile(0.975) - mu_rt.mean())]

mu_prob = pd.DataFrame(trace['mu_prob'])
yerr_prob = [(mu_prob.mean() - mu_prob.quantile(0.025)),
Ejemplo n.º 3
0
 def setUpClass(cls):
     super(TestTextDumpFunction, cls).setUpClass()
     text.dump(cls.name1, cls.mtrace1)
     with cls.model:
         cls.mtrace1 = text.load(cls.name1)
Ejemplo n.º 4
0
np.min(every_each["logRTresid"])
np.max(every_each["logRTresid"])

every_each_model = pm.Model()
with every_each_model:
    normal_density = pm.Normal('normal_density', mu=0, sd=10)

from pymc3.backends import Text
from pymc3.backends.text import load
#with every_each_model:
#db = Text('./data/normal_trace')
#trace = pm.sample(draws=5000, trace=db, n_init=500)

# we load the results / trace of previous run
with every_each_model:
    trace = load('./data/normal_trace')


def generate_normal_prior_figure():
    fig, ax = plt.subplots(ncols=1, nrows=1)
    fig.set_size_inches(5.5, 3.5)
    sns.distplot(trace['normal_density'], hist=True, ax=ax)
    ax.set_xlabel('Normal density, mean = 0, standard deviation = 10')
    plt.tight_layout(pad=0.5, w_pad=0.2, h_pad=0.7)
    plt.savefig('./figures/normal_prior.eps')
    plt.savefig('./figures/normal_prior.png')
    plt.savefig('./figures/normal_prior.pdf')


generate_normal_prior_figure()
Ejemplo n.º 5
0
 def setUpClass(cls):
     super(TestTextDumpFunction, cls).setUpClass()
     text.dump(cls.name1, cls.mtrace1)
     with cls.model:
         cls.mtrace1 = text.load(cls.name1)
Ejemplo n.º 6
0
    intercept = pm.Normal('intercept', mu=0, sd=100)
    slope = pm.Normal('slope', mu=0, sd=100)
    sigma = pm.HalfNormal('sigma', sd=100)
    # likelihood
    mu = pm.Deterministic('mu', intercept + slope * delay)
    log_savings = pm.Normal('log_savings',
                            mu=mu,
                            sd=sigma,
                            observed=np.log(savings))

#with exponential_model:
#db = Text('./data/exponential_model_trace')
#trace = pm.sample(draws=5000, trace=db, n_init=50000, njobs=4)

with exponential_model:
    trace = load('./data/exponential_model_trace')

mu = trace["mu"]


def generate_ebbinghaus_data_figure_2():
    fig, (ax1, ax2) = plt.subplots(ncols=1, nrows=2)
    fig.set_size_inches(5.5, 4.5)
    # plot 1
    ax1.plot(delay, savings, marker='o', linestyle='--')
    ax1.plot(delay, np.median(np.exp(mu), axis=0), color='red', linestyle='-')
    ax1.set_title(
        'b. Log performance (blue) and exponential model estimates (red)')
    ax1.set_xlabel('Delay (hours)')
    ax1.set_ylabel('Savings (log \\%)')
    ax1.set_yscale('log', basey=10)
    # accuracy likelihood
    odds_reciprocal = tt.exp(-(activation_from_time - threshold) / noise)
    mu_prob = Deterministic('mu_prob', 1 / (1 + odds_reciprocal))
    prob_observed = Normal('prob_observed',
                           mu=mu_prob,
                           sd=0.01,
                           observed=ACCURACY)

# with lex_decision_with_bayes:
# step = pm.SMC(parallel=True)
# trace = pm.sample(draws=5000, step=step, njobs=1, cores=25)

# dump('../data/lex_dec_pyactr_with_imaginal_delay_0', trace)

with lex_decision_with_bayes:
    trace = load('../data/lex_dec_pyactr_with_imaginal_delay_0')

pm.diagnostics.gelman_rubin(trace)
pm.traceplot(trace)
plt.savefig('../figures/lex_dec_model_pyactr_with_imaginal_delay_0_trace.eps')
plt.savefig('../figures/lex_dec_model_pyactr_with_imaginal_delay_0_trace.png')
plt.savefig('../figures/lex_dec_model_pyactr_with_imaginal_delay_0_trace.pdf')
#plt.show()

mu_rt = pd.DataFrame(trace['mu_rt']) * 1000
RT = RT * 1000
yerr_rt = [(mu_rt.mean() - mu_rt.quantile(0.025)),
           (mu_rt.quantile(0.975) - mu_rt.mean())]

mu_prob = pd.DataFrame(trace['mu_prob'])
yerr_prob = [(mu_prob.mean() - mu_prob.quantile(0.025)),
Ejemplo n.º 8
0
 def setup_class(cls):
     super().setup_class()
     text.dump(cls.name1, cls.mtrace1)
     with cls.model:
         cls.mtrace1 = text.load(cls.name1)
Ejemplo n.º 9
0
        # Priors
        latency_exponent = HalfNormal('le', sd=0.3)
        # Likelihood
        pyactr_rt = actrmodel_latency(latency_exponent)
        mu_rt = Deterministic('mu_rt', pyactr_rt)
        rt_observed = Normal('rt_observed', mu=mu_rt, sd=30, observed=RT)
        # Compute posteriors
        #step = pm.SMC()
        #trace = sample(draws=NDRAWS, step=step, njobs=1)
else:
    run_exp(rank)

#dump('parser_model_'+str(NDRAWS)+'_iterations', trace)

with parser_model:
    trace = load('../../data/parser_model_' + str(NDRAWS) + '_iterations')

pm.diagnostics.gelman_rubin(trace)
traceplot(trace)
plt.savefig('../../figures/parser_' + str(NDRAWS) + '_trace.eps')
plt.savefig('../../figures/parser_' + str(NDRAWS) + '_trace.png')
plt.savefig('../../figures/parser_' + str(NDRAWS) + '_trace.pdf')
#plt.show()

mu_rt = pd.DataFrame(trace['mu_rt'])
yerr_rt = [(mu_rt.mean()-mu_rt.quantile(0.025)),\
        (mu_rt.quantile(0.975)-mu_rt.mean())]


def generate_parser_model_figure():
    fig, ax1 = plt.subplots(ncols=1, nrows=1)
Ejemplo n.º 10
0
    # Likelihood
    pyactr_rt = actrmodel_latency(rule_firing, latency_factor,
                                  buffer_spreading_activation,
                                  strength_of_association)
    mu_rt = Deterministic('mu_rt', pyactr_rt)
    rt_observed = Normal('rt_observed', mu=mu_rt, sd=10, observed=RT)

#with fan_model:
    # Compute posteriors
    #step = pm.SMC(parallel=True)
    #trace = pm.sample(draws=5000, step=step, njobs=1, cores=50)

#dump('../../data/fan_5000_draws', trace)

with fan_model:
    trace = load('../../data/fan_5000_draws')

pm.diagnostics.gelman_rubin(trace)
traceplot(trace)
plt.savefig('../../figures/fan_5000_trace.eps')
plt.savefig('../../figures/fan_5000_trace.png')
plt.savefig('../../figures/fan_5000_trace.pdf')
#plt.show()

mu_rt = pd.DataFrame(trace['mu_rt'])
yerr_rt = [(mu_rt.mean()-mu_rt.quantile(0.025)),\
           (mu_rt.quantile(0.975)-mu_rt.mean())]

def generate_fan_model_figure():
    fig, ax1 = plt.subplots(ncols=1, nrows=1)
    fig.set_size_inches(5.5, 3.5)