Exemplo n.º 1
0
def permute_isc_within(a, b, x, outfile, mask='', isc_only=False, hdf5=None, thresh=6000, n_pass=.7, n_reps=1000, t=False):
    import numpy as np
    from pycorr.funcs_correlate import crosscor, intersubcorr
    from pycorr.statistics import perm, isc_corrmat_within_diff
    from pycorr.subject import Run, Exp
    from pycorr.pietools import mkdir_p, parse_section, arr_slice
    # TASK ID so script knows where to slice, converts SGE_TASK_ID to 0-indexed
    ID = parse_section(x) if x is not None else int(os.environ['SGE_TASK_ID']) - 1

    # OUTPUTS
    out = {}

    # Load and Slice data ---------------------------------------------------------
    mask = np.load(mask) if mask else slice(None)
    if not hdf5:
        # LOAD FILES
        if t:                     #TESTING FLAG 
            from pycorr.gen_corrmat import fourD
            A_files = fourD + 7000
            B_files = fourD + 7000
        elif a and b:    
            A_files = [os.path.join(a[0], fname) for fname in os.listdir(a[0])]  #TODO change back, hack until rondo jobs are fixed
            B_files = [os.path.join(b[0], fname) for fname in os.listdir(b[0])]
        else: raise BaseException('need either test or specify inputs')

        A = [arr_slice(fname, ID)[...,mask].astype('float') for fname in A_files]
        B = [arr_slice(fname, ID)[...,mask].astype('float') for fname in B_files]
        # Thresholding
        #Hack to get threshold function, which is a class method TODO def move threshold
        import h5py
        Run = Run(h5py.File('dummy.h5'))

        # threshold tcs with low mean
        for dat in A+B: dat[Run.threshold(6000, dat)] = np.nan      
        thresh_pass = [~np.isnan(dat.sum(axis=-1)) for dat in A+B]
        out['thresh_fail'] = Exp.cond_thresh(thresh_pass, mustpassprop=.7)
    else:
        E = Exp(hdf5)
        A = [run.load(use_subset=mask, standardized=True, threshold=True,  _slice=ID) for run in E.iter_runs(a[0])]
        if b:  #TODO fix, so hacky.. this script needs structure (want to let arg.b be optional
            B = [run.load(standardized=True, threshold=True, _slice=ID) for run in E.iter_runs(b[0])]
        else: B = []
        E.get_cond(a[0])
        out['thresh_fail'] = E.get_cond(a[0])['threshold'][...]

    # Combine group indices for correlation matrix (we will shuffle these) --------
    indx_A = range(len(A))
    indx_B = range(len(A), len(A + B))
    print indx_A
    print indx_B

    # Cross-Correlation matrix (we will permute rows and columns) -----------------
    out['isc_corrmat'] = crosscor(A+B, standardized=False)
    out['isc_A'] = intersubcorr(out['isc_corrmat'][..., indx_A, :][..., :, indx_A])

    # Permutation Test ------------------------------------------------------------
    if not isc_only:
        out_shape = (n_reps, ) + out['isc_corrmat'].shape[:-2]      #n_reps x spatial_dims
        swap_dims = range(1,len(out_shape)) + [0]                        #list with first and last dims swapped
        out['null'] = perm(indx_A, indx_B, isc_corrmat_within_diff, C = out['isc_corrmat'],
                        nreps=n_reps, out=np.zeros(out_shape))
        out['null'] = out['null'].transpose(swap_dims)                            #put corrs on last dim
        out['r'] = isc_corrmat_within_diff(indx_A, indx_B, out['isc_corrmat'])[..., np.newaxis] #since 1 corr, add axis for broadcasting
        out['p'] = np.mean(np.abs(out['r']) <= np.abs(out['null']), axis=-1)

    # Output ----------------------------------------------------------------------
    outtmp = os.path.join(outfile, "{fold}/{ID}.npy")
    for k, v in out.iteritems():
        outfile = outtmp.format(fold=k, ID=x or ID)
        mkdir_p(os.path.dirname(outfile))
        np.save(outfile, v)
Exemplo n.º 2
0
def test_isc_corrmat_within_null():
    A, B = range(3), range(3, 6)
    r_null = perm(B, A, isc_corrmat_within_diff, nreps=1000, C=C_null_rho1)
    assert np.all(np.array(r_null) == 0) 
Exemplo n.º 3
0
def permute_isc_within(a,
                       b,
                       x,
                       outfile,
                       mask='',
                       isc_only=False,
                       hdf5=None,
                       thresh=6000,
                       n_pass=.7,
                       n_reps=1000,
                       t=False):
    import numpy as np
    from pycorr.funcs_correlate import crosscor, intersubcorr
    from pycorr.statistics import perm, isc_corrmat_within_diff
    from pycorr.subject import Run, Exp
    from pycorr.pietools import mkdir_p, parse_section, arr_slice
    # TASK ID so script knows where to slice, converts SGE_TASK_ID to 0-indexed
    ID = parse_section(x) if x is not None else int(
        os.environ['SGE_TASK_ID']) - 1

    # OUTPUTS
    out = {}

    # Load and Slice data ---------------------------------------------------------
    mask = np.load(mask) if mask else slice(None)
    if not hdf5:
        # LOAD FILES
        if t:  #TESTING FLAG
            from pycorr.gen_corrmat import fourD
            A_files = fourD + 7000
            B_files = fourD + 7000
        elif a and b:
            A_files = [
                os.path.join(a[0], fname) for fname in os.listdir(a[0])
            ]  #TODO change back, hack until rondo jobs are fixed
            B_files = [os.path.join(b[0], fname) for fname in os.listdir(b[0])]
        else:
            raise BaseException('need either test or specify inputs')

        A = [
            arr_slice(fname, ID)[..., mask].astype('float')
            for fname in A_files
        ]
        B = [
            arr_slice(fname, ID)[..., mask].astype('float')
            for fname in B_files
        ]
        # Thresholding
        #Hack to get threshold function, which is a class method TODO def move threshold
        import h5py
        Run = Run(h5py.File('dummy.h5'))

        # threshold tcs with low mean
        for dat in A + B:
            dat[Run.threshold(6000, dat)] = np.nan
        thresh_pass = [~np.isnan(dat.sum(axis=-1)) for dat in A + B]
        out['thresh_fail'] = Exp.cond_thresh(thresh_pass, mustpassprop=.7)
    else:
        E = Exp(hdf5)
        A = [
            run.load(use_subset=mask,
                     standardized=True,
                     threshold=True,
                     _slice=ID) for run in E.iter_runs(a[0])
        ]
        if b:  #TODO fix, so hacky.. this script needs structure (want to let arg.b be optional
            B = [
                run.load(standardized=True, threshold=True, _slice=ID)
                for run in E.iter_runs(b[0])
            ]
        else:
            B = []
        E.get_cond(a[0])
        out['thresh_fail'] = E.get_cond(a[0])['threshold'][...]

    # Combine group indices for correlation matrix (we will shuffle these) --------
    indx_A = range(len(A))
    indx_B = range(len(A), len(A + B))
    print indx_A
    print indx_B

    # Cross-Correlation matrix (we will permute rows and columns) -----------------
    out['isc_corrmat'] = crosscor(A + B, standardized=False)
    out['isc_A'] = intersubcorr(out['isc_corrmat'][..., indx_A, :][..., :,
                                                                   indx_A])

    # Permutation Test ------------------------------------------------------------
    if not isc_only:
        out_shape = (
            n_reps, ) + out['isc_corrmat'].shape[:-2]  #n_reps x spatial_dims
        swap_dims = range(
            1, len(out_shape)) + [0]  #list with first and last dims swapped
        out['null'] = perm(indx_A,
                           indx_B,
                           isc_corrmat_within_diff,
                           C=out['isc_corrmat'],
                           nreps=n_reps,
                           out=np.zeros(out_shape))
        out['null'] = out['null'].transpose(swap_dims)  #put corrs on last dim
        out['r'] = isc_corrmat_within_diff(indx_A, indx_B, out['isc_corrmat'])[
            ..., np.newaxis]  #since 1 corr, add axis for broadcasting
        out['p'] = np.mean(np.abs(out['r']) <= np.abs(out['null']), axis=-1)

    # Output ----------------------------------------------------------------------
    outtmp = os.path.join(outfile, "{fold}/{ID}.npy")
    for k, v in out.iteritems():
        outfile = outtmp.format(fold=k, ID=x or ID)
        mkdir_p(os.path.dirname(outfile))
        np.save(outfile, v)
Exemplo n.º 4
0
def test_perm_isc_corrmat_within():
    A, B = range(3), range(3, 6)
    r_null = perm(B, A, isc_corrmat_within_diff, nreps=10000, C=C)
    r = isc_corrmat_within_diff(B, A, C)
    assert np.mean(np.array(r_null) <= r) == 1   #original grouping yields highest corr
Exemplo n.º 5
0
def test_perm_test_is_permuting():
    np.random.seed(1)
    fun = lambda  A, B: sum(A) - sum(B)
    out = perm([0,0,0,0], [1,1,1,1], fun, nreps=1000)
    assert .010 < np.mean(np.array(out) == 4) < .018
Exemplo n.º 6
0
def test_isc_corrmat_within_null():
    A, B = range(3), range(3, 6)
    r_null = perm(B, A, isc_corrmat_within_diff, nreps=1000, C=C_null_rho1)
    assert np.all(np.array(r_null) == 0)
Exemplo n.º 7
0
def test_perm_test_is_permuting():
    np.random.seed(1)
    fun = lambda A, B: sum(A) - sum(B)
    out = perm([0, 0, 0, 0], [1, 1, 1, 1], fun, nreps=1000)
    assert .010 < np.mean(np.array(out) == 4) < .018
Exemplo n.º 8
0
def test_perm_isc_corrmat_within():
    A, B = range(3), range(3, 6)
    r_null = perm(B, A, isc_corrmat_within_diff, nreps=10000, C=C)
    r = isc_corrmat_within_diff(B, A, C)
    assert np.mean(
        np.array(r_null) <= r) == 1  #original grouping yields highest corr