Esempio n. 1
0
def runfeat(scanlist_file):
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)
    # dir config
    doc_dir = os.path.abspath(
        os.path.join(scan_info['pardir'], '../doc/feat_emo'))
    nii_dir = scan_info['sessdir']
    # template config
    template_fsf = os.path.join(doc_dir, 'trial.fsf')
    for subj in subj_list:
        # get run infor for emo task
        sid = subj.sess_ID
        subj_dir = os.path.join(nii_dir, sid, 'emo')
        # get par index for each emo run
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        for i in range(len(run_idx)):
            run_dir = os.path.join(subj_dir, '00' + run_idx[i])
            lss_dir = os.path.join(run_dir, 'lss')
            for j in range(80):
                t_fsf = os.path.join(lss_dir, 't%s' % (j + 1), 'design.fsf')
                with open(template_fsf, 'r') as fin:
                    with open(t_fsf, 'w') as fout:
                        for line in fin:
                            line - line.replace('AAA', sid[:3] + '1')
                            line = line.replace('SSS', sid)
                            line = line.replace('RRR', '00' + run_idx[i])
                            line = line.replace('TTT', 't%s' % (j + 1))
                            fout.write(line)
                os.system('feat %s' % t_fsf)
Esempio n. 2
0
def clcfeat(scanlist_file):
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)
    # dir config
    doc_dir = os.path.abspath(
        os.path.join(scan_info['pardir'], '../doc/feat_emo'))
    nii_dir = scan_info['sessdir']
    # template config
    template_fsf = os.path.join(doc_dir, 'trial.fsf')
    for subj in subj_list:
        # get run infor for emo task
        sid = subj.sess_ID
        subj_dir = os.path.join(nii_dir, sid, 'emo')
        # get par index for each emo run
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        for i in range(len(run_idx)):
            run_dir = os.path.join(subj_dir, '00' + run_idx[i])
            lss_dir = os.path.join(run_dir, 'lss')
            for j in range(80):
                t_fsf = os.path.join(lss_dir, 't%s' % (j + 1), 'design.fsf')
                feat_dir = os.path.join(lss_dir, 't%s' % (j + 1), 'func.feat')
                if os.path.exists(t_fsf):
                    os.system('rm %s' % t_fsf)
                else:
                    print 'No fsf file found - %s' % (t_fsf)
                if os.path.exists(feat_dir):
                    os.system('rm -rf %s' % feat_dir)
                else:
                    print 'No feat dir found - %s' % (feat_dir)
Esempio n. 3
0
def copypar(scanlist_file):
    """Copy par file for each separate calculation."""
    # get data info from scanlist file
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)
    # dir config
    par_dir = os.path.join(scan_info['pardir'], 'emo')
    nii_dir = scan_info['sessdir']
    for subj in subj_list:
        # get run infor for emo task
        sid = subj.sess_ID
        subj_dir = os.path.join(nii_dir, sid, 'emo')
        # get par index for each emo run
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        for i in range(len(run_idx)):
            run_dir = os.path.join(subj_dir, '00' + run_idx[i])
            lss_dir = os.path.join(run_dir, 'lss')
            os.system('mkdir ' + lss_dir)
            pair_file = os.path.join(par_dir,
                                     'pair_run_' + par_idx[i] + '.txt')
            os.system('cp %s %s' % (pair_file, lss_dir))
            for j in range(80):
                t_dir = os.path.join(lss_dir, 't%s' % (j + 1))
                os.system('mkdir %s' % (t_dir))
                par_file = os.path.join(par_dir,
                                        'run_%s_%s.par' % (par_idx[i], j + 1))
                os.system('cp %s %s' %
                          (par_file, os.path.join(t_dir, 'trial.par')))
                par2ev(os.path.join(t_dir, 'trial.par'))
Esempio n. 4
0
def mergecope(scanlist_file):
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)

    # dir config
    par_dir = os.path.join(scan_info['pardir'], 'emo')
    nii_dir = scan_info['sessdir']

    seq_prefix = r'trial_seq'
    seq_type = ['train', 'test']

    for subj in subj_list:
        # get run infor for emo task
        sid = subj.sess_ID
        subj_dir = os.path.join(nii_dir, sid, 'emo')
        # get par index for each emo run
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        for i in range(len(run_idx)):
            run_dir = os.path.join(subj_dir, '00' + run_idx[i])
            lss_dir = os.path.join(run_dir, 'lss')
            for st in seq_type:
                seq_file = os.path.join(
                    par_dir, seq_prefix + '_' + str(i + 1) + '_' + st + '.txt')
                seq = open(seq_file).readlines()
                seq = [line.strip().split(',') for line in seq]
                merged_file = os.path.join(run_dir, st + '_merged_cope.nii.gz')
                strcmd = 'fslmerge -t %s' % (merged_file)
                for line in seq:
                    tmp_file = os.path.join(lss_dir, line[0], 'func.feat',
                                            'reg_standard', 'stats',
                                            'cope1.nii.gz')
                    strcmd = strcmd + ' ' + tmp_file
                subprocess.call(strcmd, shell=True)
Esempio n. 5
0
def get_run_idx(scanlist_file, sid, par_idx):
    """Get run index from one subject's info based on par index."""
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)
    for subj in subj_list:
        if (subj.sess_ID[:2]==sid) and ('emo' in subj.run_info):
            [run_list, par_list] = subj.getruninfo('emo')
            if str(par_idx) in par_list:
                return subj.sess_ID, '00'+run_list[par_list.index(str(par_idx))]
    return None, None
Esempio n. 6
0
def get_mvp_group_roi(root_dir):
    """Get multivoxel activity pattern for each srimulus from each ROI."""
    # directory config
    nii_dir = os.path.join(root_dir, 'nii')
    ppi_dir = os.path.join(root_dir, 'ppi')
    # load rois
    #mask_data = nib.load(os.path.join(ppi_dir, 'cube_rois.nii.gz')).get_data()
    mask_data = nib.load(
        os.path.join(root_dir, 'group-level', 'rois', 'neurosynth',
                     'cube_rois.nii.gz')).get_data()
    roi_num = int(mask_data.max())
    # get scan info from scanlist
    scanlist_file = os.path.join(root_dir, 'doc', 'scanlist.csv')
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)

    for subj in subj_list:
        # get run infor for emo task
        sid = subj.sess_ID
        subj_dir = os.path.join(nii_dir, sid, 'emo')
        # get run index
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        # var for MVP
        mvp_dict = {}
        for r in range(roi_num):
            mvp_dict['roi_%s' % (r + 1)] = []
        for i in range(10):
            if str(i + 1) in par_idx:
                print 'Run %s' % (i + 1)
                # load cope data
                ipar = par_idx.index(str(i + 1))
                run_dir = os.path.join(subj_dir, '00' + run_idx[ipar])
                print run_dir
                trn_file = os.path.join(run_dir, 'train_merged_cope.nii.gz')
                test_file = os.path.join(run_dir, 'test_merged_cope.nii.gz')
                trn_cope = nib.load(trn_file).get_data()
                test_cope = nib.load(test_file).get_data()
                run_cope = np.concatenate((trn_cope, test_cope), axis=3)
                # XXX: remove mean cope from each trial
                mean_cope = np.mean(run_cope, axis=3, keepdims=True)
                run_cope = run_cope - mean_cope
                # get MVP for each ROI
                for r in range(roi_num):
                    roi_mask = mask_data.copy()
                    roi_mask[roi_mask != (r + 1)] = 0
                    roi_mask[roi_mask == (r + 1)] = 1
                    roi_coord = niroi.get_roi_coord(roi_mask)
                    for j in range(run_cope.shape[3]):
                        vtr = niroi.get_voxel_value(roi_coord, run_cope[...,
                                                                        j])
                        mvp_dict['roi_%s' % (r + 1)].append(vtr.tolist())
        for roi in mvp_dict:
            mvp_dict[roi] = np.array(mvp_dict[roi])
        outfile = r'%s_roi_mvp.mat' % (sid)
        sio.savemat(outfile, mvp_dict)
Esempio n. 7
0
def get_emo_ts(root_dir, seq):
    """Get neural activity time course of each roi on each emotion condition."""
    nii_dir = os.path.join(root_dir, 'nii')
    ppi_dir = os.path.join(root_dir, 'ppi')
    # load roi
    rois = nib.load(
        os.path.join(root_dir, 'group-level', 'rois', 'neurosynth',
                     'cube_rois_r2.nii.gz')).get_data()
    roi_num = int(rois.max())
    # get run info from scanlist
    scanlist_file = os.path.join(root_dir, 'doc', 'scanlist.csv')
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)
    for subj in subj_list:
        sid = subj.sess_ID
        print sid
        subj_dir = os.path.join(nii_dir, sid, 'emo')
        # get par index for each emo run
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        for i in range(10):
            if str(i + 1) in par_idx:
                print 'Run %s' % (i + 1)
                # load cope data
                ipar = par_idx.index(str(i + 1))
                run_dir = os.path.join(subj_dir, '00' + run_idx[ipar])
                print run_dir
                train_cope_f = os.path.join(run_dir,
                                            'train_merged_cope.nii.gz')
                test_cope_f = os.path.join(run_dir, 'test_merged_cope.nii.gz')
                train_cope = nib.load(train_cope_f).get_data()
                test_cope = nib.load(test_cope_f).get_data()
                # get trial sequence for each emotion
                for j in range(4):
                    train_seq = [
                        line[0] for line in seq[i + 1]['train']
                        if line[1] == (j + 1)
                    ]
                    test_seq = [
                        line[0] for line in seq[i + 1]['test']
                        if line[1] == (j + 1)
                    ]
                    emo_data = np.zeros(
                        (91, 109, 91, len(train_seq) + len(test_seq)))
                    emo_data[..., :len(train_seq)] = train_cope[..., train_seq]
                    emo_data[..., len(train_seq):] = test_cope[..., test_seq]
                    # get time course for each roi
                    roi_ts = np.zeros((emo_data.shape[3], roi_num))
                    for k in range(roi_num):
                        roi_ts[:, k] = niroi.extract_mean_ts(
                            emo_data, rois == (k + 1))
                    outfile = '%s_roi_ts_run%s_emo%s.npy' % (sid[:2], i + 1,
                                                             j + 1)
                    outfile = os.path.join(ppi_dir, 'decovPPI', outfile)
                    np.save(outfile, roi_ts)
Esempio n. 8
0
def get_trial_data(root_dir, seq):
    """Get neural activity time course of each roi on each emotion condition."""
    nii_dir = os.path.join(root_dir, 'nii')
    ppi_dir = os.path.join(root_dir, 'ppi')
    # load roi
    rois = nib.load(
        os.path.join(root_dir, 'group-level', 'rois', 'neurosynth',
                     'cube_rois_r2.nii.gz')).get_data()
    roi_num = int(rois.max())
    # get run info from scanlist
    scanlist_file = os.path.join(root_dir, 'doc', 'scanlist.csv')
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)
    for subj in subj_list:
        sid = subj.sess_ID
        print sid
        subj_dir = os.path.join(nii_dir, sid, 'emo')
        # get par index for each emo run
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        for i in range(10):
            if str(i + 1) in par_idx:
                print 'Run %s' % (i + 1)
                # load cope data
                ipar = par_idx.index(str(i + 1))
                run_dir = os.path.join(subj_dir, '00' + run_idx[ipar])
                print run_dir
                train_cope_f = os.path.join(run_dir,
                                            'train_merged_cope.nii.gz')
                test_cope_f = os.path.join(run_dir, 'test_merged_cope.nii.gz')
                train_cope = nib.load(train_cope_f).get_data()
                test_cope = nib.load(test_cope_f).get_data()
                # get time course for each roi
                train_x = np.zeros((train_cope.shape[3], roi_num))
                test_x = np.zeros((test_cope.shape[3], roi_num))
                for k in range(roi_num):
                    train_x[:,
                            k] = niroi.extract_mean_ts(train_cope,
                                                       rois == (k + 1))
                    test_x[:,
                           k] = niroi.extract_mean_ts(test_cope,
                                                      rois == (k + 1))
                train_y = [line[1] for line in seq[i + 1]['train']]
                test_y = [line[1] for line in seq[i + 1]['test']]
                # save dataset
                outfile = '%s_run%s_roi_data' % (sid[:2], i + 1)
                outfile = os.path.join(ppi_dir, 'decovPPI', outfile)
                np.savez(outfile,
                         train_x=train_x,
                         train_y=train_y,
                         test_x=test_x,
                         test_y=test_y)
Esempio n. 9
0
def get_vxl_trial_rsp(root_dir):
    """Get multivoxel activity pattern for each srimulus
    from whole brain mask.
    """
    # directory config
    nii_dir = os.path.join(root_dir, 'prepro')
    rsp_dir = os.path.join(root_dir, 'workshop', 'trial_rsp', 'whole_brain')
    # load rois
    mask_data = nib.load(
        os.path.join(root_dir, 'group-level', 'rois', 'neurosynth',
                     'cube_rois_r2.nii.gz')).get_data()
    mask_data = mask_data > 0
    # get scan info from scanlist
    scanlist_file = os.path.join(root_dir, 'doc', 'scanlist.csv')
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)

    for subj in subj_list:
        # get run infor for emo task
        sid = subj.sess_ID
        print sid
        subj_dir = os.path.join(nii_dir, sid)
        # get run index
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        # var for MVP
        for i in range(10):
            if not str(i + 1) in par_idx:
                continue
            print 'Run %s' % (i + 1)
            mvp_data = []
            # load cope data
            ipar = par_idx.index(str(i + 1))
            run_dir = os.path.join(subj_dir, '00' + run_idx[ipar])
            print run_dir
            rsp_file = os.path.join(run_dir, 'mni_sfunc_data_mcf_hp.nii.gz')
            rsp = nib.load(rsp_file).get_data()
            # derive trial-wise response
            trsp = np.zeros((91, 109, 91, 88))
            for t in range(88):
                trsp[..., t] = (rsp[..., 4 * t + 5] + rsp[..., 4 * t + 6]) / 2
            # get MVP of mask
            vxl_coord = niroi.get_roi_coord(mask_data)
            for j in range(trsp.shape[3]):
                vtr = niroi.get_voxel_value(vxl_coord, trsp[..., j])
                mvp_data.append(vtr.tolist())
            outfile = os.path.join(rsp_dir,
                                   '%s_r%s_mvp.npy' % (sid[:2], i + 1))
            np.save(outfile, np.array(mvp_data))
Esempio n. 10
0
def get_mvp_group_roi(scanlist_file, mask_file):
    """Get multivoxel activity pattern for each srimulus from each ROI."""
    # get scan info from scanlist
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)
    # directory config
    nii_dir = scan_info['sessdir']
    
    # load rois
    mask_data = nib.load(mask_file).get_data()
    roi_dict = {'rOFA': 1, 'lOFA': 2, 'rFFA': 3, 'lFFA': 4}
 
    #output_file = os.path.join(roi_dir, 'neo_group_roi_mvpa.csv')
    #f = open(output_file, 'wb')
    #f.write('SID,rOFA,lOFA,rFFA,lFFA,rpcSTS,lpcSTS\n')

    for subj in subj_list:
        # get run infor for emo task
        sid = subj.sess_ID
        subj_dir = os.path.join(nii_dir, sid, 'emo')
        # get par index for each emo run
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        # var for MVP
        mvp_dict = {}
        for roi in roi_dict:
            mvp_dict[roi] = []
        for i in range(len(run_idx)):
            run_dir = os.path.join(subj_dir, '00'+run_idx[i])
            trn_file = os.path.join(run_dir, 'train_merged_cope.nii.gz')
            test_file = os.path.join(run_dir, 'test_merged_cope.nii.gz')
            trn_cope = nib.load(trn_file).get_data()
            test_cope = nib.load(test_file).get_data()
            run_cope = np.concatenate((trn_cope, test_cope), axis=3)
            # XXX: remove mean cope from each trial
            #mean_cope = np.mean(run_cope, axis=3, keepdims=True)
            #run_cope = run_cope - mean_cope
            # get MVP for each ROI
            for roi in roi_dict:
                roi_mask = mask_data.copy()
                roi_mask[roi_mask!=roi_dict[roi]] = 0
                roi_mask[roi_mask==roi_dict[roi]] = 1
                roi_coord = niroi.get_roi_coord(roi_mask)
                for j in range(run_cope.shape[3]):
                    trl_vtr = niroi.get_voxel_value(roi_coord, run_cope[..., j])

        f.write(','.join(temp)+'\n')
        print 'cost %s s'%(time.time() - start_time)
Esempio n. 11
0
def standardizecope(scanlist_file, stage):
    """For stage 1: `updatefeatreg`;
    For stage 2: Apply nonlinear warp for cope files.
    """
    [scan_info, subj_list] = pyunpack.readscanlist(scanlist_file)
    # dir config
    nii_dir = scan_info['sessdir']
    # warp source for nonlinear registration
    srcfiles = ['highres2standard_2mm.mat', 'highres2standard_warp_2mm.nii.gz']
    fsl_dir = os.getenv('FSL_DIR')
    mnistd = os.path.join(fsl_dir, 'data', 'standard',
                          'MNI152_T1_2mm_brain.nii.gz')
    # parameter estimates needed to be standardized
    peimgs = ['cope', 'tstat', 'zstat', 'varcope']
    for subj in subj_list:
        # get run infor for emo task
        sid = subj.sess_ID
        anat_dir = os.path.join(nii_dir, sid[:3] + '1', '3danat', 'reg_fsl')
        emo_dir = os.path.join(nii_dir, sid, 'emo')
        # get par index for each emo run
        if not 'emo' in subj.run_info:
            continue
        [run_idx, par_idx] = subj.getruninfo('emo')
        for i in range(len(run_idx)):
            run_dir = os.path.join(emo_dir, '00' + run_idx[i])
            lss_dir = os.path.join(run_dir, 'lss')
            for j in range(80):
                feat_dir = os.path.join(lss_dir, 't%s' % (j + 1), 'func.feat')
                print feat_dir
                funcreg = os.path.join(feat_dir, 'reg')
                if stage == 1:
                    shutil.copy(mnistd, os.path.join(funcreg,
                                                     'standard.nii.gz'))
                    for f in srcfiles:
                        shutil.copy(
                            os.path.join(anat_dir, f),
                            os.path.join(funcreg, f.replace('_2mm', '')))
                    subprocess.call(' '.join([
                        'fsl_sub', '-q', 'veryshort.q', 'updatefeatreg',
                        feat_dir
                    ]),
                                    shell=True)
                else:
                    refvol = os.path.join(funcreg, 'standard.nii.gz')
                    warpvol = os.path.join(funcreg,
                                           'highres2standard_warp.nii.gz')
                    premat = os.path.join(funcreg, 'example_func2highres.mat')
                    targdir = os.path.join(feat_dir, 'reg_standard', 'stats')
                    if not os.path.exists(targdir):
                        print targdir + 'not exist, create it automatically.'
                        os.makedirs(targdir)
                    statsdir = os.path.join(feat_dir, 'stats')
                    fl = os.listdir(statsdir)
                    file_num = len(
                        [item for item in fl if item[0:4] == 'cope'])
                    for idx in range(1, file_num + 1):
                        for img in peimgs:
                            infile = os.path.join(statsdir, img + str(idx))
                            outfile = os.path.join(targdir, img + str(idx))
                            subprocess.call(' '.join([
                                'fsl_sub', '-q', 'veryshort.q', 'applywarp',
                                '--ref=' + refvol, '--in=' + infile,
                                '--out=' + outfile, '--warp=' + warpvol,
                                '--premat=' + premat, '--interp=trilinear'
                            ]),
                                            shell=True)