def test_map_t_real_data2():
    import os
    pth = os.path.join(os.path.dirname(__file__), 's_beamtf1_avg.mat')
    if not os.path.exists(pth):
        assert False, 'did not find data file'
        return
    mdict, mbeam, dof = su.split_combo_tfstats_matfile(pth)
    p_corr_pos = mdict['p val pos (corr)']
    p_corr_neg = mdict['p val neg (corr)']
    tt = mdict['T test']
    n = 2048

    # create the adapted results
    s_res = adapt_mlab_tf_snpm_stats(pth)

    alpha = 0.05
    while not (p_corr_neg.s <= alpha).any():
        alpha += 0.05
    print 'testing negative tail at significance level', alpha

    tc, alpha = s_res.threshold(alpha, 'neg')
    m = tt.s <= tc

    yield nt.assert_false, (p_corr_neg.s[m] > alpha).any()

    alpha = 0.05
    while not (p_corr_pos.s <= alpha).any():
        alpha += 0.05
    print 'testing positive tail at significance level', alpha        

    tc, alpha = s_res.threshold(alpha, 'pos')
    m = tt.s >= tc
    yield nt.assert_false, (p_corr_pos.s[m] > alpha).any()
Ejemplo n.º 2
0
def adapt_mlab_tf_snpm_stats(combo_beam, avg_beam=None):
    mdict, mbeam, dof = su.split_combo_tfstats_matfile(combo_beam)
    stat = mdict['T test'].s
    ranks = mdict['p val neg (uncorr)'].s
    p_scores_neg = mdict['p val neg (corr)'].s
    p_scores_pos = mdict['p val pos (corr)'].s
    if avg_beam is None:
        avg_beam = mbeam
    return AdaptedTimeFreqSnPMResults(
        stat, avg_beam.voxel_indices, ranks, p_scores_pos, p_scores_neg
        )
Ejemplo n.º 3
0
def test_map_t_real_data():
    import os
    pth = os.path.join(os.path.dirname(__file__), 's_beamtf1_avg.mat')
    if not os.path.exists(pth):
        assert False, 'did not find data file'
        return
    mdict, mbeam, dof = su.split_combo_tfstats_matfile(pth)
    p_corr_pos = mdict['p val pos (corr)']
    p_corr_neg = mdict['p val neg (corr)']
    tt = mdict['T test']
    n = 2048
    nt, nf = tt.s.shape[1:]
    max_t_maps = np.empty((n, nt, nf))
    min_t_maps = np.empty((n, nt, nf))
    for t in xrange(nt):
        for f in xrange(nf):
            edges, _ = su.map_t(tt.s[:,t,f], p_corr_neg.s[:,t,f], 1.0/n)
            min_t_maps[:,t,f] = edges
            edges, _ = su.map_t(-tt.s[:,t,f], p_corr_pos.s[:,t,f], 1.0/n)
            max_t_maps[:,t,f] = -edges


    min_t_maps = np.sort(min_t_maps, axis=0)
    alpha = 0.05
    while not (p_corr_neg.s <= alpha).any():
        alpha += 0.05
    print 'testing negative tail at significance level', alpha
    # highest k index satisfying t <= tc
    k_mn = int(alpha * n)
    tc = min_t_maps[k_mn]
    m = tt.s <= tc
    yield assert_false, (p_corr_neg.s[m] > alpha).any()

    max_t_maps = np.sort(max_t_maps, axis=0)
    alpha = 0.05
    while not (p_corr_pos.s <= alpha).any():
        alpha += 0.05
    print 'testing positive tail at significance level', alpha        
    # lowest k index in max_t_maps satisfying significant t >= tc
    k_mx = int((1-alpha) * n + 0.5)
    tc = max_t_maps[k_mx]
    m = tt.s >= tc
    print m.sum()
    yield assert_false, (p_corr_pos.s[m] > alpha).any()