Esempio n. 1
0
def test_bh():

    Q = 1.0
    p_vals = p.T
    useless_bh = bh_procedure(p_vals, Q)

    # Since the FDR is 100%, the bh_procedure should return the exact same thing as the original data.
    #assert_almost_equal(data[...,7], useless_bh[...,7])
    #assert_almost_equal(np.ravel(pval), useless_bh)

    Q_real = .25
    real_bh = bh_procedure(p_vals, Q_real)

    assert (not (np.all(np.ravel(p_vals) != real_bh)))
Esempio n. 2
0
def test_bh():


    Q = 1.0
    p_vals = p.T
    useless_bh = bh_procedure(p_vals, Q)

    # Since the FDR is 100%, the bh_procedure should return the exact same thing as the original data.
    #assert_almost_equal(data[...,7], useless_bh[...,7])
    #assert_almost_equal(np.ravel(pval), useless_bh)

    Q_real = .25
    real_bh = bh_procedure(p_vals, Q_real)

    assert(not (np.all(np.ravel(p_vals) != real_bh)))
Esempio n. 3
0
def small_q_bh():
    Q = .005
    p_vals = p.T
    small_q_bh = bh_procedure(p_vals, Q)

    # Since the Q value is so small, there should be no significant tests found
    # so the original p_vals (shape is 1-d, (len, 1)) should be returned rather 
    # than an np.array object of shape (len,)
    assert_equals(small_q_bh.shape, p_vals.shape)
    # It should return p_vals if it finds no signficant tests
    assert_equals(small_q_bh.all, p_vals.all)
Esempio n. 4
0
def small_q_bh():
    Q = .005
    p_vals = p.T
    small_q_bh = bh_procedure(p_vals, Q)

    # Since the Q value is so small, there should be no significant tests found
    # so the original p_vals (shape is 1-d, (len, 1)) should be returned rather
    # than an np.array object of shape (len,)
    assert_equals(small_q_bh.shape, p_vals.shape)
    # It should return p_vals if it finds no signficant tests
    assert_equals(small_q_bh.all, p_vals.all)
Esempio n. 5
0
def test_bh():
    img = nib.load(pathtoclassdata + "ds114_sub009_t2r1.nii")
    data = img.get_data()[..., 4:]
    # Read in the convolutions. 
    convolved = np.loadtxt(pathtoclassdata + "ds114_sub009_t2r1_conv.txt")[4:]
    # Create design matrix. 

    beta,t,df,p = t_stat(data, convolved,[1,1])
    beta2, t2,df2,p2 = t_stat(data, convolved,[0,1])

    Q = 1.0
    pval = p.T
    useless_bh = bh_procedure(pval, Q)

    # Since the FDR is 100%, the bh_procedure should return the exact same thing as the original data.
    #assert_almost_equal(data[...,7], useless_bh[...,7])
    #assert_almost_equal(np.ravel(pval), useless_bh)

    Q_real = .25
    real_bh = bh_procedure(pval, Q_real)
    #assert_not_equals(data[...,7], real_bh[...,7])
    assert(not (np.all(np.ravel(pval) != real_bh)))
Esempio n. 6
0
def test_bh():
    img = nib.load(pathtoclassdata + "ds114_sub009_t2r1.nii")
    data = img.get_data()[..., 4:]
    # Read in the convolutions.
    convolved = np.loadtxt(pathtoclassdata + "ds114_sub009_t2r1_conv.txt")[4:]
    # Create design matrix.

    beta, t, df, p = t_stat(data, convolved, [1, 1])
    beta2, t2, df2, p2 = t_stat(data, convolved, [0, 1])

    Q = 1.0
    pval = p.T
    useless_bh = bh_procedure(pval, Q)

    # Since the FDR is 100%, the bh_procedure should return the exact same thing as the original data.
    # assert_almost_equal(data[...,7], useless_bh[...,7])
    # assert_almost_equal(np.ravel(pval), useless_bh)

    Q_real = 0.25
    real_bh = bh_procedure(pval, Q_real)
    # assert_not_equals(data[...,7], real_bh[...,7])
    assert not (np.all(np.ravel(pval) != real_bh))
Esempio n. 7
0
    mask_data = mask.get_data()
    rachels_ones = np.ones((64, 64, 34))
    fitted_mask = make_mask(rachels_ones, mask_data, fit=True)
    fitted_mask[fitted_mask > 0] = 1

    #####################################
    # Run bh_procedure for each subject #
    #####################################
    p_3d = np.load("../data/p-values/" + name + "_pvalue.npy")
    p_1d = np.ravel(p_3d)

    mask = fitted_mask
    mask_1d = np.ravel(mask)
    p_bh = p_1d[mask_1d == 1]

    bh_first = bh_procedure(p_bh, q)
    bh_3d = masking_reshape_end(bh_first, mask, off_value=.5)
    bh_3d[bh_3d < .5] = 0
    bh_3d_1_good = 1 - bh_3d

    bh_final = neighbor_smoothing_binary(bh_3d_1_good, neighbors)

    bh_mean[..., i] = bh_3d_1_good

    #####################################
    # Run t_grouping for each subject   #
    #####################################
    t_3d = np.load("../data/t_stat/" + name + "_tstat.npy")

    #mask = fitted_mask
    t_group = t_grouping_neighbor(t_3d,
Esempio n. 8
0
def no_q_test():
    Q = 0
    p_vals = p.T
    no_bh = bh_procedure(p_vals, Q)
    np.testing.assert_array_equal(no_bh, p_vals)
Esempio n. 9
0
B, t, df, p = t_stat(data, my_hrf, np.array([0, 1]))

#########################
# Benjamini-Hochberg #
#########################

print(
    "# ==== BEGIN Visualization of Masked data over original brain data ==== #"
)

p_vals = p.T  # shape of p_vals is (139264, 1)

print("# ==== No Mask, bh_procedure ==== #")
# a fairly large false discovery rate
Q = .4
significant_pvals = bh_procedure(p_vals, Q)

# Reshape significant_pvals to shape of data
reshaped_sig_p = np.reshape(significant_pvals, data.shape[:-1])
slice_reshaped_sig_p = reshaped_sig_p[..., 7]
original_slice = data[..., 7]

plt.imshow(slice_reshaped_sig_p)
plt.colorbar()
plt.title('Significant p-values (No mask)')
plt.savefig(location_of_images + "NOMASK_significant_p_slice.png")
plt.close()
print("# ==== END No Mask, bh_procedure ==== #")

print("# ==== BEGIN varying the Q value = .005 (FDR) ==== #")
Q = .005
Esempio n. 10
0
plt.savefig(location_of_images+"rachels_ones.png")
plt.close()
#data_1d = np.ravel(data)

# subset into p-values array
#smaller_p = p_vals[ == 1]
"""

print("# ==== BEGIN Visualization of Masked data over original brain data ==== #")

p_vals = p.T # shape of p_vals is (139264, 1)

print("# ==== No Mask, bh_procedure ==== #")
# a fairly large false discovery rate
Q = .4
significant_pvals = bh_procedure(p_vals, Q)
#print(significant_pvals)
# Reshape significant_pvals to shape of data
reshaped_sig_p = np.reshape(significant_pvals, data.shape[:-1])
slice_reshaped_sig_p = reshaped_sig_p[...,7]
original_slice = data[...,7]

plt.imshow(slice_reshaped_sig_p)
plt.colorbar()
plt.title('Significant p-values (No mask)')
plt.savefig(location_of_images+"NOMASK_significant_p_slice.png")
plt.close()
print("# ==== END No Mask, bh_procedure ==== #")


#significant_pvals_old = bh_procedure(p_vals, fdr)
###############################
# Benjamini Hochberg Analysis #
###############################

toolbar_width=len(q1)
sys.stdout.write("Benjamini Hochberg: ")
sys.stdout.write("[%s]" % (" " * toolbar_width))
sys.stdout.flush()
sys.stdout.write("\b" * (toolbar_width+1)) # return to start of line, after '['
    


bh=[] # values a*6 + b - 1
count_a=0
for a,b in itertools.product(range(len(q1)),range(5)):
	bh_first = bh_procedure(p_bh,q1[a])
	bh_3d    = masking_reshape_end(bh_first,mask,off_value=.5)
	bh_3d[bh_3d<.5]=0

	

	bh_3d_1_good = 1-bh_3d	
	first  = neighbor_smoothing_binary(bh_3d_1_good,neighbors1[b])

	bh.append(first)


	if count_a==a and b==4:
		sys.stdout.write("-")
		sys.stdout.flush()
		count_a+=1
Esempio n. 12
0
def no_q_test():
    Q = 0
    p_vals = p.T
    no_bh = bh_procedure(p_vals, Q)
    np.testing.assert_array_equal(no_bh, p_vals)
Esempio n. 13
0
plt.savefig(location_of_images+"rachels_ones.png")
plt.close()
#data_1d = np.ravel(data)

# subset into p-values array
#smaller_p = p_vals[ == 1]
"""

print("# ==== BEGIN Visualization of Masked data over original brain data ==== #")

p_vals = p.T

print("# ==== No Mask, bh_procedure ==== #")
# a fairly large false discovery rate
Q = .4
significant_pvals = bh_procedure(p_vals, Q)
#print(significant_pvals)
# Reshape significant_pvals to shape of data
reshaped_sig_p = np.reshape(significant_pvals, data.shape[:-1])
slice_reshaped_sig_p = reshaped_sig_p[...,7]
original_slice = data[...,7]

plt.imshow(slice_reshaped_sig_p)
plt.colorbar()
plt.title('Significant p-values (No mask)')
plt.savefig(location_of_images+"NOMASK_significant_p_slice.png")
plt.close()
print("# ==== END No Mask, bh_procedure ==== #")


#significant_pvals_old = bh_procedure(p_vals, fdr)
Esempio n. 14
0
#data_1d = np.ravel(data)

# subset into p-values array
#smaller_p = p_vals[ == 1]
"""

print(
    "# ==== BEGIN Visualization of Masked data over original brain data ==== #"
)

p_vals = p.T

print("# ==== No Mask, bh_procedure ==== #")
# a fairly large false discovery rate
Q = .4
significant_pvals = bh_procedure(p_vals, Q)
#print(significant_pvals)
# Reshape significant_pvals to shape of data
reshaped_sig_p = np.reshape(significant_pvals, data.shape[:-1])
slice_reshaped_sig_p = reshaped_sig_p[..., 7]
original_slice = data[..., 7]

plt.imshow(slice_reshaped_sig_p)
plt.colorbar()
plt.title('Significant p-values (No mask)')
plt.savefig(location_of_images + "NOMASK_significant_p_slice.png")
plt.close()
print("# ==== END No Mask, bh_procedure ==== #")

#significant_pvals_old = bh_procedure(p_vals, fdr)