Beispiel #1
0
def permute_isc_within(a, b, x, outfile, mask='', isc_only=False, hdf5=None, thresh=6000, n_pass=.7, n_reps=1000, t=False):
    import numpy as np
    from pycorr.funcs_correlate import crosscor, intersubcorr
    from pycorr.statistics import perm, isc_corrmat_within_diff
    from pycorr.subject import Run, Exp
    from pycorr.pietools import mkdir_p, parse_section, arr_slice
    # TASK ID so script knows where to slice, converts SGE_TASK_ID to 0-indexed
    ID = parse_section(x) if x is not None else int(os.environ['SGE_TASK_ID']) - 1

    # OUTPUTS
    out = {}

    # Load and Slice data ---------------------------------------------------------
    mask = np.load(mask) if mask else slice(None)
    if not hdf5:
        # LOAD FILES
        if t:                     #TESTING FLAG 
            from pycorr.gen_corrmat import fourD
            A_files = fourD + 7000
            B_files = fourD + 7000
        elif a and b:    
            A_files = [os.path.join(a[0], fname) for fname in os.listdir(a[0])]  #TODO change back, hack until rondo jobs are fixed
            B_files = [os.path.join(b[0], fname) for fname in os.listdir(b[0])]
        else: raise BaseException('need either test or specify inputs')

        A = [arr_slice(fname, ID)[...,mask].astype('float') for fname in A_files]
        B = [arr_slice(fname, ID)[...,mask].astype('float') for fname in B_files]
        # Thresholding
        #Hack to get threshold function, which is a class method TODO def move threshold
        import h5py
        Run = Run(h5py.File('dummy.h5'))

        # threshold tcs with low mean
        for dat in A+B: dat[Run.threshold(6000, dat)] = np.nan      
        thresh_pass = [~np.isnan(dat.sum(axis=-1)) for dat in A+B]
        out['thresh_fail'] = Exp.cond_thresh(thresh_pass, mustpassprop=.7)
    else:
        E = Exp(hdf5)
        A = [run.load(use_subset=mask, standardized=True, threshold=True,  _slice=ID) for run in E.iter_runs(a[0])]
        if b:  #TODO fix, so hacky.. this script needs structure (want to let arg.b be optional
            B = [run.load(standardized=True, threshold=True, _slice=ID) for run in E.iter_runs(b[0])]
        else: B = []
        E.get_cond(a[0])
        out['thresh_fail'] = E.get_cond(a[0])['threshold'][...]

    # Combine group indices for correlation matrix (we will shuffle these) --------
    indx_A = range(len(A))
    indx_B = range(len(A), len(A + B))
    print indx_A
    print indx_B

    # Cross-Correlation matrix (we will permute rows and columns) -----------------
    out['isc_corrmat'] = crosscor(A+B, standardized=False)
    out['isc_A'] = intersubcorr(out['isc_corrmat'][..., indx_A, :][..., :, indx_A])

    # Permutation Test ------------------------------------------------------------
    if not isc_only:
        out_shape = (n_reps, ) + out['isc_corrmat'].shape[:-2]      #n_reps x spatial_dims
        swap_dims = range(1,len(out_shape)) + [0]                        #list with first and last dims swapped
        out['null'] = perm(indx_A, indx_B, isc_corrmat_within_diff, C = out['isc_corrmat'],
                        nreps=n_reps, out=np.zeros(out_shape))
        out['null'] = out['null'].transpose(swap_dims)                            #put corrs on last dim
        out['r'] = isc_corrmat_within_diff(indx_A, indx_B, out['isc_corrmat'])[..., np.newaxis] #since 1 corr, add axis for broadcasting
        out['p'] = np.mean(np.abs(out['r']) <= np.abs(out['null']), axis=-1)

    # Output ----------------------------------------------------------------------
    outtmp = os.path.join(outfile, "{fold}/{ID}.npy")
    for k, v in out.iteritems():
        outfile = outtmp.format(fold=k, ID=x or ID)
        mkdir_p(os.path.dirname(outfile))
        np.save(outfile, v)
Beispiel #2
0
import numpy as np
from numpy.testing import assert_almost_equal
from pycorr.funcs_correlate import standardize, corsubs, crosscor, intersubcorr

np.random.seed(10)

dims = (2,2, 10)
nsubs = 3
subs = [np.random.random(dims) for ii in range(nsubs)]
for M in subs: M[0,0] = range(dims[-1])   #0,0 is 1:N
for M in subs: standardize(M, inplace=True)
subs[0][1,1] = np.NAN                     #1,1 sub 0 has a NaN timecourse

C_all = crosscor(subs, standardized=True)
C_all[1,1,0] = np.NAN
isc1 = intersubcorr(C_all)

M_ttl = np.nansum(subs, axis=0)
isc2 = np.array([corsubs(M, M_ttl-M) for M in subs]).transpose([1,2,0])

isc3_list = []
for M in subs:
    r_all = corsubs(M, M_ttl)
    s_all = np.std(M_ttl, axis=-1, ddof=1)
    s_i = np.std(M, axis=-1, ddof=1)
    M_cors = (r_all*s_all - s_i) / \
            np.sqrt(s_i**2 + s_all**2 - 2*s_i*s_all*r_all) #wherry formula
    isc3_list.append(M_cors)
isc3 = np.array(isc3_list).transpose([1,2,0])

def test_intersubcorrXmeantc():
Beispiel #3
0
def permute_isc_within(a,
                       b,
                       x,
                       outfile,
                       mask='',
                       isc_only=False,
                       hdf5=None,
                       thresh=6000,
                       n_pass=.7,
                       n_reps=1000,
                       t=False):
    import numpy as np
    from pycorr.funcs_correlate import crosscor, intersubcorr
    from pycorr.statistics import perm, isc_corrmat_within_diff
    from pycorr.subject import Run, Exp
    from pycorr.pietools import mkdir_p, parse_section, arr_slice
    # TASK ID so script knows where to slice, converts SGE_TASK_ID to 0-indexed
    ID = parse_section(x) if x is not None else int(
        os.environ['SGE_TASK_ID']) - 1

    # OUTPUTS
    out = {}

    # Load and Slice data ---------------------------------------------------------
    mask = np.load(mask) if mask else slice(None)
    if not hdf5:
        # LOAD FILES
        if t:  #TESTING FLAG
            from pycorr.gen_corrmat import fourD
            A_files = fourD + 7000
            B_files = fourD + 7000
        elif a and b:
            A_files = [
                os.path.join(a[0], fname) for fname in os.listdir(a[0])
            ]  #TODO change back, hack until rondo jobs are fixed
            B_files = [os.path.join(b[0], fname) for fname in os.listdir(b[0])]
        else:
            raise BaseException('need either test or specify inputs')

        A = [
            arr_slice(fname, ID)[..., mask].astype('float')
            for fname in A_files
        ]
        B = [
            arr_slice(fname, ID)[..., mask].astype('float')
            for fname in B_files
        ]
        # Thresholding
        #Hack to get threshold function, which is a class method TODO def move threshold
        import h5py
        Run = Run(h5py.File('dummy.h5'))

        # threshold tcs with low mean
        for dat in A + B:
            dat[Run.threshold(6000, dat)] = np.nan
        thresh_pass = [~np.isnan(dat.sum(axis=-1)) for dat in A + B]
        out['thresh_fail'] = Exp.cond_thresh(thresh_pass, mustpassprop=.7)
    else:
        E = Exp(hdf5)
        A = [
            run.load(use_subset=mask,
                     standardized=True,
                     threshold=True,
                     _slice=ID) for run in E.iter_runs(a[0])
        ]
        if b:  #TODO fix, so hacky.. this script needs structure (want to let arg.b be optional
            B = [
                run.load(standardized=True, threshold=True, _slice=ID)
                for run in E.iter_runs(b[0])
            ]
        else:
            B = []
        E.get_cond(a[0])
        out['thresh_fail'] = E.get_cond(a[0])['threshold'][...]

    # Combine group indices for correlation matrix (we will shuffle these) --------
    indx_A = range(len(A))
    indx_B = range(len(A), len(A + B))
    print indx_A
    print indx_B

    # Cross-Correlation matrix (we will permute rows and columns) -----------------
    out['isc_corrmat'] = crosscor(A + B, standardized=False)
    out['isc_A'] = intersubcorr(out['isc_corrmat'][..., indx_A, :][..., :,
                                                                   indx_A])

    # Permutation Test ------------------------------------------------------------
    if not isc_only:
        out_shape = (
            n_reps, ) + out['isc_corrmat'].shape[:-2]  #n_reps x spatial_dims
        swap_dims = range(
            1, len(out_shape)) + [0]  #list with first and last dims swapped
        out['null'] = perm(indx_A,
                           indx_B,
                           isc_corrmat_within_diff,
                           C=out['isc_corrmat'],
                           nreps=n_reps,
                           out=np.zeros(out_shape))
        out['null'] = out['null'].transpose(swap_dims)  #put corrs on last dim
        out['r'] = isc_corrmat_within_diff(indx_A, indx_B, out['isc_corrmat'])[
            ..., np.newaxis]  #since 1 corr, add axis for broadcasting
        out['p'] = np.mean(np.abs(out['r']) <= np.abs(out['null']), axis=-1)

    # Output ----------------------------------------------------------------------
    outtmp = os.path.join(outfile, "{fold}/{ID}.npy")
    for k, v in out.iteritems():
        outfile = outtmp.format(fold=k, ID=x or ID)
        mkdir_p(os.path.dirname(outfile))
        np.save(outfile, v)
Beispiel #4
0
from pycorr.funcs_correlate import standardize, corsubs, crosscor, intersubcorr

np.random.seed(10)

dims = (2, 2, 10)
nsubs = 3
subs = [np.random.random(dims) for ii in range(nsubs)]
for M in subs:
    M[0, 0] = range(dims[-1])  #0,0 is 1:N
for M in subs:
    standardize(M, inplace=True)
subs[0][1, 1] = np.NAN  #1,1 sub 0 has a NaN timecourse

C_all = crosscor(subs, standardized=True)
C_all[1, 1, 0] = np.NAN
isc1 = intersubcorr(C_all)

M_ttl = np.nansum(subs, axis=0)
isc2 = np.array([corsubs(M, M_ttl - M) for M in subs]).transpose([1, 2, 0])

isc3_list = []
for M in subs:
    r_all = corsubs(M, M_ttl)
    s_all = np.std(M_ttl, axis=-1, ddof=1)
    s_i = np.std(M, axis=-1, ddof=1)
    M_cors = (r_all*s_all - s_i) / \
            np.sqrt(s_i**2 + s_all**2 - 2*s_i*s_all*r_all) #wherry formula
    isc3_list.append(M_cors)
isc3 = np.array(isc3_list).transpose([1, 2, 0])