args = ap.parse_args()
    wmDepth = args.wmDepth
    matter = args.matter
        
    # === Determine the subject list and their group memberships ===
    check_dir(BASE_DIR)
    ds = glob.glob(os.path.join(BASE_DIR, "S??"))
    ds.sort()

    sIDs = []
    isPWS = []
    SSI4 = []
    for (i0, t_path) in enumerate(ds):
        (t_path_0, t_sID) = os.path.split(t_path)
        sIDs.append(t_sID)
        SSI4.append(get_qdec_info(t_sID, "SSI"))

        if get_qdec_info(t_sID, "diagnosis") == "PWS":
            isPWS.append(1)
        else:
            isPWS.append(0)
        
    isPWS = np.array(isPWS)

    assert(len(sIDs) > 0)
    assert(len(sIDs) == len(isPWS))

    # === Get the list of cortical ROIs ===
    rois0 = get_aparc12_cort_rois(bSpeech=True)

    check_file(CTAB)
        assert(fwhm > 0)

    # === Find subject IDs and group labels === #
    check_dir(tractSegDir)

    ds = glob.glob(os.path.join(tractSegDir, "*"))
    ds.sort()

    sIDs = []
    bPWS = []

    for (i0, t_fn) in enumerate(ds):
        (t_path, t_sID) = os.path.split(t_fn)
        sIDs.append(t_sID)
        
        if get_qdec_info(t_sID, "diagnosis") == "PWS":
            bPWS.append(1)
        else:
            bPWS.append(0)

    sIDs = np.array(sIDs)
    bPWS = np.array(bPWS)


    # === Locate the d2a files of each subject  === #
    d2as = []
    FAimgs = []
    warps = []
    for (i0, t_sID) in enumerate(sIDs):
        t_d2a = os.path.join(TRACULA_DIR, t_sID, "dmri", "xfms", "d2a.mat")
        check_file(t_d2a)
    origDir = os.path.join(tbssDir, "origdata")
    check_dir(origDir)

    ds = glob.glob(os.path.join(origDir, "S??.nii.gz"))
    ds.sort()

    sIDs = []
    idxPWS = []
    idxPFS = []

    for (i0, d) in enumerate(ds):
        [tpath, tfn] = os.path.split(d)
        sID = tfn.replace(".nii.gz", "")
        sIDs.append(sID)
        
        if get_qdec_info(sID, "diagnosis") == "PWS":
            idxPWS.append(i0)
        elif get_qdec_info(sID, "diagnosis") == "PFS":
            idxPFS.append(i0)
        else:
            raise Exception, "Unrecognized diagnosis for subject %s: %s" % \
                             (sID, get_qdec_info(sID, "diagnosis"))
    
    # === Process the clusters === #
    for i0 in range(nClust):
        # == Determine label == #
        roi_fn = tempfile.mktemp() + ".nii.gz"
        roi_cmd = "fslroi %s %s %d 1 %d 1 %d 1" % \
                  (atlas_label_fn, roi_fn, clustX[i0], clustY[i0], clustZ[i0])
        saydo(roi_cmd)
        check_file(roi_fn)
    skipIDs = []
    if args.skipSubj != None:
        skipIDs = args.skipSubj.split(",")

    sIDs = []
    grps = []
    for (i0, d) in enumerate(ds):
        t_sID = os.path.split(d)[1]

        if skipIDs.count(t_sID) > 0:
            info_log("Skipping subject: %s" % t_sID)
            continue

        sIDs.append(t_sID)
        grps.append(get_qdec_info(t_sID, "diagnosis"))

    ugrps = list((np.unique(np.array(grps))))
    ugrps.sort()

    info_log("Discovered %s subjects" % len(sIDs))
    info_log("The subjects belong to %d groups:" % (len(ugrps)))
    for (i0, grp) in enumerate(ugrps):
        info_log("\t%s" % grp)
                 
    matFile = __file__.replace(".py", ".mat")
    from scipy.io import savemat, loadmat
    
    if not os.path.isfile(matFile) or args.bRedo:
        #=== Extract ROI morphological info ===#
        morphInfo = []
    
    d0 = glob.glob(os.path.join(inDir, "S??"))

    sIDs = []

    for (i0, t_d) in enumerate(d0):
        sIDs.append(os.path.split(t_d)[1])
        

    sIDs.sort()

    grps = []
    ages = []
    genders = []
    for (i0, t_sID) in enumerate(sIDs):
        if get_qdec_info(t_sID, "diagnosis") == "PWS":
            grps.append(1)
        else:
            grps.append(0)

        if get_qdec_info(t_sID, "gender") == "Male":
            genders.append(1)
        else:
            genders.append(0)

        ages.append(float(get_qdec_info(t_sID, "Age")))

    grps = np.array(grps)
    ages = np.array(ages)
    genders = np.array(genders)
    d_orig = glob.glob(os.path.join(orig_dir, 'S*.nii.gz'))
    S_list = []
    for fn in d_orig:
        S_list.append(fn.split('/')[-1].split('.')[0])
    S_list.sort()
    print('\nFound ' + str(len(S_list)) + ' subjects in ' + orig_dir + '\n')
    # === Get subjects info ===
    isPWS_list = [0] * len(S_list)
    isFemale_list = [0] * len(S_list)
    age_list = [-1.0] * len(S_list)
#    done_list = [0] * len(S_list)
#    SSI4_list = [-1.0] * len(S_list)

    for (i0, sID) in enumerate(S_list):
        t_diag = get_qdec_info(sID, "diagnosis")
        if t_diag == "PWS":
            isPWS_list[i0] = 1
        else:
            isPWS_list[i0] = 0

        t_gen = get_qdec_info(sID, "gender")
        if t_gen.lower() == "female":
            isFemale_list[i0] = 1
        else:
            isFemale_list[i0] = 0
            
        age_list[i0] = int(get_qdec_info(sID, "Age"))

    isPWS_list = np.array(isPWS_list)
    isFemale_list = np.array(isFemale_list)
    args = ap.parse_args()
    subjsListFN = args.subjsListFN
    label = args.label

    check_file(subjsListFN)

    f_subjsList = open(subjsListFN, 'r')
    t_subjs = f_subjsList.read().split('\n')
    f_subjsList.close()

    t_subjs = remove_empty_strings(t_subjs)

    sIDs = {"PFS": [], "PWS": []}
    for (i0, t_sID) in enumerate(t_subjs):
        t_grp = get_qdec_info(t_sID, "diagnosis")
        sIDs[t_grp].append(t_sID)
    
    grps = sIDs.keys()
    hemis = ["lh", "rh"]

    asymmIdx = {}
    for (i0, grp) in enumerate(grps):
        asymmIdx[grp] = [np.nan] * len(sIDs[grp])
        for (i1, sID) in enumerate(sIDs[grp]):
            sDir = os.path.join(LABEL_BASE_DIR, sID)
            check_dir(sDir)

            vols = [0] * len(hemis)
            for (j0, hemi) in enumerate(hemis):
                diff_mask_fn = os.path.join(sDir, \
            'PWS': np.array([np.nan] * nPWS)}

    for grp in corr_z_tab.keys():
        print('Processing data from group %s...'%grp)
        for (i1, sID) in enumerate(sIDs[grp]):
            print('Processing data from subject %s...'%sID)

            res = np.load(os.path.join(bips_resting_dir, sID, resfn))
            t_zs = np.matrix.reshape(res['corr_tab'], [1, nROIs * nROIs])[0]
            
            # Fisher (z) transformation
            corr_z_tab[grp][i1] = np.log((1 + t_zs) / (1 - t_zs)) * 0.5
            # sys.exit(0)

            if grp == 'PWS':
                t_SSI = get_qdec_info(sID, 'SSI')
                SSI4[i1] = float(t_SSI)

            if len(corrMeas) > 0:
                t_meas = get_qdec_info(sID, corrMeas)
                try:
                    meas[grp][i1] = float(t_meas)
                except:
                    print('\tWARNING: %s measure for %s is not valid'%(corrMeas, sID))
                       
        print('')
        corr_z_tab[grp] = np.matrix.transpose(corr_z_tab[grp])

    ### Write to a clean mat file ###
    idx_keep = []
    for i0 in range(len(corr_z_tab['PFS'])):
    ds.sort()
    sIDs = []
    isPWS = []
    aparc12_fns = []
    aparc12_diff_fns = []

    merged = os.path.join(aparc12Dir, "merged.nii.gz")
    os.system("rm -f %s" % merged)
    merge_cmd = "fslmerge -t %s " % merged
    
    for (i0, d) in enumerate(ds):
        [tpath, tfn] = os.path.split(d)
        sID = tfn.replace(".nii.gz", "")
        sIDs.append(sID)
        
        isPWS.append(get_qdec_info(sID, "diagnosis") == "PWS")
        
        t_aparc12 = os.path.join(DATA_DIR, sID, "aparc12.nii.gz")
        check_file(t_aparc12)
        aparc12_fns.append(t_aparc12)

        # == Locate the d2a FSL xfm mat == #
        d2a = os.path.join(TRACULA_DIR, sID, "dmri", "xfms", "d2a.mat")
        check_file(d2a)
        
        # == Use convert_xfm to create a2d FSL xfm mat == #
        a2d = os.path.join(TRACULA_DIR, sID, "dmri", "xfms", "a2d.mat")
        os.system("rm -f %s" % a2d)
        inv_cmd = "convert_xfm -omat %s -inverse %s" % (a2d, d2a)
        saydo(inv_cmd)
        check_file(a2d)