Exemplo n.º 1
0
def boot_test(data, thr=0, n_samples=1000000):
    data = array(data)
    t_data = nanmean(data) - thr
    boot_data = data[array(
        bootstrap.bootstrap_indexes(data, n_samples=n_samples))]
    t_boot = (nanmean(boot_data, 1) - nanmean(data))
    p = nanmean(abs(t_data) <= abs(t_boot))
    return p, percentile(nanmean(boot_data, 1), [2.5, 97.5])
Exemplo n.º 2
0
 def test_bootstrap_indexes(self):
     np.random.seed(1234567890)
     indexes = np.array([
         x for x in boot.bootstrap_indexes(np.array([1, 2, 3, 4, 5]),
                                           n_samples=3)
     ])
     np.testing.assert_array_equal(
         indexes,
         np.array([[2, 4, 3, 1, 3], [1, 4, 1, 4, 4], [0, 2, 1, 4, 4]]))
Exemplo n.º 3
0
def boot_test1(data, thr=0, n_samples=1000000):
    data = array(data)
    t_data = nanmean(data) - thr
    boot_data = data[array(
        bootstrap.bootstrap_indexes(data, n_samples=n_samples))]
    t_boot = (nanmean(boot_data, 1) - nanmean(data))
    low = nanmean(t_data <= t_boot)
    high = nanmean(t_data >= t_boot)
    return low, high, percentile(nanmean(boot_data, 1), [2.5, 97.5])
Exemplo n.º 4
0
text(0,-0.14,"all (n=%i)" % (len(pos_idx)),color="gray",fontsize=18)

xticks([0, 1],['activity-silent', 'reactivation'])
ylabel("CCSI (sps/s)$^2$", color="k")
tick_params(axis="y",direction='in')
plot([-0.25,1.25],[0,0],"k--")
xlim([-0.25,1.25])
ylim(-0.2,0.1)

# X-CORRELATION DIFFERENCE PREF VS ANTI-PREF
subplot(2,2,2)

h_p=array(on_pref)[pos_idx]-array(out_pref)[pos_idx]
h_p=h_p.T

idx_p=bootstrap.bootstrap_indexes(h_p[0])
res_s_p = Parallel(n_jobs=num_cores)(delayed(smooth_i)(i,h_p) for i in idx_p)

ci_h_p = array(res_s_p)
high = amap(lambda x: percentile(x,100-16),ci_h_p.T)
low = amap(lambda x: percentile(x,16),ci_h_p.T)

h=array(on_pref)[neg_idx]-array(out_pref)[neg_idx]
h=h.T
idx_p=bootstrap.bootstrap_indexes(h[0])
res_s_n = Parallel(n_jobs=num_cores)(delayed(smooth_i)(i,h) for i in idx_p)

ci_h = array(res_s_n)
high_i = amap(lambda x: percentile(x,100-16),ci_h.T)
low_i = amap(lambda x: percentile(x,16),ci_h.T)
Exemplo n.º 5
0
    curr_cue = (close_trials[:, 0] - 1) / 8. * (2 * pi)

    cues_by_session[session] = mean_report_session[session]

    curr_mean_reports = mean_report_session[session][
        array(close_trials[:, 0], dtype='int') - 1]
    prev_curr += list(circdist(prev_report, curr_mean_reports))
    total_reports += list(close_report)
    total_cues += list(curr_mean_reports)

prev_curr = array(prev_curr)
total_reports = array(total_reports)
total_cues = array(total_cues)

num_cores = multiprocessing.cpu_count()
boot_idx = bootstrap.bootstrap_indexes(total_reports, n_samples=n_perms)


def one_boot(i):
    err, d, m_err, std_err, count, points_idx = compute_serial(
        total_reports[i], total_cues[i], prev_curr[i], xxx2)
    return m_err


M = Parallel(n_jobs=num_cores)(delayed(one_boot)(i) for i in boot_idx)

err, d, m_err, std_err, count, points_idx = compute_serial(
    total_reports, total_cues, prev_curr, xxx2)


def one_perm(prev_curr):
Exemplo n.º 6
0
# del(all_subjs[2][1])

all_vars = np.concatenate(all_subjs)
ps_rose = array([ttest_1samp(v, 0)[1] for v in array(all_vars).T])
m_var = mean(all_vars, 0)
stderr = 2 * std(all_vars, 0) / sqrt(len(all_vars))
low = m_var - stderr
high = m_var + stderr

m_fr = loadtxt("simulations_for_plot/wolff_sims_fr.txt")
time_stk = loadtxt("simulations_for_plot/wolff_sims_time.txt")
time_stk -= 0.3 / 2
m_fr = m_fr[mean(m_fr, 1) > 0]

baseline = mean(std(m_fr[:, :5], 0) / mean(m_fr[:, :5], 0))
idx = bootstrap.bootstrap_indexes(m_fr)

diff_ff_nostp = loadtxt("simulations_for_plot/diff_ff_sim_nostp.txt")
time_nostp = loadtxt("simulations_for_plot/time_sim_nostp.txt")

boot = [(std(m_fr[i], 0) / mean(m_fr[i], 0) - baseline) for i in idx]

#### var split

# wolff 2015 trial by trial analyses
root_dir = "Data/Wolff2015/"

time2015 = io.loadmat("Data/Wolff2015/time.mat")['t'][0]

eegs_imp = []
Exemplo n.º 7
0
##############################################################################
#								RUN SPLITS 					                 #
##############################################################################

results = Parallel(n_jobs=numcores)(delayed(get_split)(f) for f in list(zip(files,info)))

sb_time = np.array([r[0] for r in results])
splits 	= np.array([r[1] for r in results])

tmax 		= np.where(dtime>-.84)[0][0]
tmin 		= np.where(dtime>.00)[0][0]


##############################################################################
#							SMOOTH DIST CURVE 				                 #
##############################################################################

idx 	= boot.bootstrap_indexes(splits,n_samples=10000)
ci_h 	= np.array(Parallel(n_jobs=numcores)(delayed(hf.smooth_i)(splits, i, 16) for i in idx))
split  	= np.mean(ci_h,0)
high	= np.array(list(map(lambda x: np.percentile(x, 97.5), ci_h.T)))
low		= np.array(list(map(lambda x: np.percentile(x, 2.5), ci_h.T)))


##############################################################################
#							SAVE FOR PLOTTING      						     #
##############################################################################

# with open('../preprocessed_data/split.pkl', 'wb') as f:
#     pickle.dump([sb_time, dtime, split, high, low, tmax, tmin], f, protocol=2)
Exemplo n.º 8
0
broad_late = loadmat(
    "decoders/exp2_dec_mem_late_broadband.mat")["dec_mem_late"]

alpha_early = loadmat("decoders/exp2_dec_mem_early_alpha.mat")["dec_mem_early"]
beta_early = loadmat("decoders/exp2_dec_mem_early_beta.mat")["dec_mem_early"]
theta_early = loadmat("decoders/exp2_dec_mem_early_theta.mat")["dec_mem_early"]
broad_early = loadmat(
    "decoders/exp2_dec_mem_early_broadband.mat")["dec_mem_early"]

erp_late = loadmat("decoders/exp2_dec_mem_late_erp.mat")["dec_mem_late"]
erp_early = loadmat("decoders/exp2_dec_mem_early_erp.mat")["dec_mem_early"]
time = loadmat('decoders/exp2_dec_mem_late_time.mat')["time"][0]

### smoothing and CI for EXP 2
data = erp_early, erp_late, alpha_early, alpha_late, beta_early, beta_late, theta_early, theta_late, broad_early, broad_late
idx = bootstrap.bootstrap_indexes(erp_late, n_samples=1000)
res = Parallel(n_jobs=num_cores)(delayed(smooth_i)(i, data) for i in idx)
mres = mean(res, 2)

# BF_data = []

# for i,d in enumerate(data):
# 	print(i)
# 	BF_data.append(get_BF(d.T))

# eearly,elate,aearly,alate,bearly,blate,tearly,tlate,bbearly,bblate = transpose(mres,[1,0,2])

# w = 250
# bf_elate2 = bf_elate.copy()
# for i in range(len(time)):
# 	bf_elate2[i] = np.mean(bf_elate[i:i+w])
Exemplo n.º 9
0
 def test_bootstrap_indexes(self):
     np.random.seed(1234567890)
     indexes = np.array([x for x in boot.bootstrap_indexes(np.array([1,2,3,4,5]), n_samples=3)])
     np.testing.assert_array_equal(indexes, np.array([[2, 4, 3, 1, 3],[1, 4, 1, 4, 4],[0, 2, 1, 4, 4]]))
Exemplo n.º 10
0
p_diff_pos=exact_mc_perm_test(array(on_pref)[pos_idx],array(out_pref)[pos_idx],1000)

## 95% CI
pos_pref = (array(on_pref)[pos_idx]).T
ci_pos_pref = array([bootstrap.ci(d) for d in pos_pref])

##  SEM
sem_pos_pref=nanstd(pos_pref,1)/sqrt(sum(pos_idx))
ci_pos_pref=array([mean(pos_pref,1)+sem_pos_pref, mean(pos_pref,1)-sem_pos_pref]).T



# smooth each bootstrap, instead of smothing the bootstrapped mean - which would not make sense
h_p=array(on_pref)[pos_idx]-array(out_pref)[pos_idx]
h_p=h_p.T
idx_p=bootstrap.bootstrap_indexes(h_p[0])
res_s_p = Parallel(n_jobs=num_cores)(delayed(smooth_i)(i,h_p) for i in idx_p)

ci_h_p = array(res_s_p)
high = amap(lambda x: percentile(x,100-16),ci_h_p.T)
low = amap(lambda x: percentile(x,16),ci_h_p.T)

high_95 = amap(lambda x: percentile(x, 5),ci_h_p.T)
low_95 = amap(lambda x: percentile(x,95),ci_h_p.T)


h=array(on_pref)[neg_idx]-array(out_pref)[neg_idx]
h=h.T
idx_p=bootstrap.bootstrap_indexes(h[0])
res_s_n = Parallel(n_jobs=num_cores)(delayed(smooth_i)(i,h) for i in idx_p)
Exemplo n.º 11
0
		beg =axis[s]
		end = beg+w
		plt.fill_between([beg,end],[y[0],y[0]],[y[1],y[1]],color=color)

sig = 10
def smooth_i(i,data):
	erp_early,erp_late,alpha_early,alpha_late = data
	eearly = [gaussian_filter(erp, sigma=sig) for erp in erp_early[i]]
	elate = [gaussian_filter(erp, sigma=sig) for erp in erp_late[i]]
	aearly = [gaussian_filter(alpha, sigma=sig) for alpha in alpha_early[i]]
	alate = [gaussian_filter(alpha, sigma=sig) for alpha in alpha_late[i]]
	return [eearly,elate,aearly,alate]


### smoothing and CI for EXP 1
idx=bootstrap.bootstrap_indexes(alpha_cued,n_samples=1000)
data = erp_cued,erp_uncued,alpha_cued,alpha_uncued
res = Parallel(n_jobs=num_cores)(delayed(smooth_i)(i,data) for i in idx)
mres = mean(res,2)
ecued,euncued,acued,auncued = mres[:,0],mres[:,1],mres[:,2],mres[:,3]


ci_alpha_cued=array([percentile(alpha,[5,95]) for alpha in acued.T])
ci_alpha_uncued=array([percentile(alpha,[5,95]) for alpha in auncued.T])

ci_erp_cued=array([percentile(erp,[5,95]) for erp in ecued.T])
ci_erp_uncued=array([percentile(erp,[5,95]) for erp in euncued.T])


sem_alpha_cued=array([percentile(alpha,[32/2,100-32/2]) for alpha in acued.T])
sem_alpha_uncued=array([percentile(alpha,[32/2,100-32/2]) for alpha in auncued.T])
Exemplo n.º 12
0
w2 = degrees(w2)

serial = degrees(serial)
difs = degrees(difs)

# compute p values of permutation test
a = difs[:, good_pairs, 0]
b = difs[:, good_pairs, 1]
ps = Parallel(n_jobs=num_cores)(delayed(perm_test)(b[t], a[t])
                                for t in range(len(time)))


# smoth split through time
def smooth_i(h, i):
    m = nanmean(h[:, i], 1)
    p = filters.convolve1d(m, b / b.sum())
    return p


#negative diff -> attraction
h = -1 * (difs[:, good_pairs, 0] - difs[:, good_pairs, 1])  #

b = gaussian(5, 5)
idx = bootstrap.bootstrap_indexes(h[0], n_samples=10000)
res = Parallel(n_jobs=num_cores)(delayed(smooth_i)(h, i) for i in idx)
ci_h = array(res)

f = open("../preprocessed_data/0.05_1.0_beh_vs_dec_others.pickle", "w")
dump([time, ci_h, res, ps, serial, good_pairs, xxx, w2], f)
f.close()