def get_first(mse): first = "" first_seg = glob("{}/{}/first_all/*first*seg*.nii.gz".format( _get_output(mse), mse)) if len(first_seg) == 0 or not "corrected" in first_seg: cmd = ["pbr", mse, "-w", "first_all", "-R"] Popen(cmd).wait() first = first_seg[0] else: print(mse, "EXISTS") first = first_seg[0] return first
def get_t1(mse): t1 = "" align = "{}/{}/alignment/status.json".format(_get_output(mse), mse) if os.path.exists(align): with open(align) as data_file: data = json.load(data_file) if len(data["t1_files"]) > 0: t1 = data["t1_files"][-1] t1 = t1.replace("N4corr", "").replace("reorient", "").replace("brain_mask", "") return t1
def get_series(mse): t1, t2, flair = "", "", "" align = "{}/{}/alignment/status.json".format(_get_output(mse), mse) if os.path.exists(align): with open(align) as data_file: data = json.load(data_file) if len(data["t1_files"]) > 0: t1 = data["t1_files"][-1] if len(data["flair_files"]) > 0: flair = data["flair_files"][-1] if len(data["t2_files"]) > 0: t2 = data["t2_files"][-1] return [t1, t2, flair]
def check_nii(mse): nifti = "" nii = "{}/{}/nii/".format(_get_output(mse), mse) if os.path.exists(nii): for series in os.listdir(nii): #print(series) if "C2_3_2Fl_seg_psir_TI_PSIR" in series: nifti = True else: x = True else: nifti = "Nifti not run" return nifti
def check_for_sq_names(mse, sq, pipeline, sequence_name ): nifti_align = _get_output(mse)+"/"+mse+ pipeline + "/status.json" if os.path.exists(nifti_align): with open(nifti_align) as data_file: data = json.load(data_file) if sq == "T1": check_in_nii_align(sq, "t1_files", mse, data, pipeline, sequence_name) if sq == "T2": check_in_nii_align(sq, "t2_files", mse, data, pipeline, sequence_name) if sq == "FLAIR": check_in_nii_align(sq, "flair_files", mse, data, pipeline, sequence_name) if sq == "T1_Gad": check_in_nii_align(sq, "gad_files", mse, data, pipeline, sequence_name)
def run_pbr_align(mseid): alignment_folder = _get_output(mseid) + '/' + mseid + "/alignment/" if os.path.exists(alignment_folder): cmd_rm = ['rm', '-r', alignment_folder] print(cmd_rm) proc = Popen(cmd_rm) proc.wait() cmd = ['pbr', mseid, '-w', 'align', '-R'] print(cmd) proc = Popen(cmd) proc.wait()
def get_sienax(mse): wm_path = glob( "{}/{}/sienax_optibet/ms*/I_stdmaskbrain_seg_2.nii.gz".format( _get_output(mse), mse)) wm_path2 = glob("{}/{}/sienaxorig_*/I_stdmaskbrain_seg_2.nii.gz".format( _get_output(mse), mse)) if len(wm_path) > 0: wm = wm_path[0] elif len(wm_path2) > 0: wm = wm_path2[-1] else: wm = "" cmd = ["pbr", mse, "-w", "sienax_optibet", "-R"] #Popen(cmd).wait() gm = wm.replace("seg_2", "seg_1") bm = wm.replace("I_stdmaskbrain_seg_2", "I_brain") pGM = wm.replace("seg_2", "pve_1_segperiph") csf = wm.replace("seg_2", "seg_0") lesion = wm.replace("I_stdmaskbrain_seg_2", "lesion_mask") if not os.path.exists(lesion): lesion = "" return wm, gm, csf, pGM, bm, lesion
def get_t1(mse): t1_file = "" if mse.startswith("mse"): get_align = "{}/{}/alignment/status.json".format(_get_output(mse), mse) if os.path.exists(get_align): with open(get_align) as data_file: data = json.load(data_file) if len(data["t1_files"]) > 0: t1_file = data["t1_files"][-1].replace("_reorient", "") if "DESPOT" in t1_file: cmd = ["pbr", mse, "-w", "align", "-R"] #Popen(cmd).wait() t1_file = data["t1_files"][0].replace("_reorient", "") return t1_file
def reg_to_BL(mse_list, long): bl_mse = mse_list[0] bl_t1 = get_t1(bl_mse) other_mse = mse_list[1:] for mse in other_mse: t1_in = get_t1(mse) jacobian_path = "{}/{}/jacobian/".format(_get_output(mse), mse) if not os.path.exists(jacobian_path): os.mkdir(jacobian_path) affine = jacobian_path + bl_mse + "_affinereg_" + mse + ".mat" affine_reg = jacobian_path + bl_mse + "_affinereg_" + mse + ".nii.gz" fnirt_reg = jacobian_path + bl_mse + "_fnirt_" + mse + ".nii.gz" jacobian = jacobian_path + bl_mse + "_jacobian_" + mse + ".nii.gz" cmd = [ 'flirt', '-ref', bl_t1, '-in', t1_in, '-omat', affine, '-out', affine_reg ] Popen(cmd).wait() cmd = [ 'fnirt', '--ref=' + bl_t1, '--in=' + t1_in, '--aff=' + affine, '--iout=' + fnirt_reg, '--jout=' + jacobian ] Popen(cmd).wait() if not os.path.exists(long + "/first/"): os.mkdir(long + "/first/") out_first = long + "/first/" + mse + "first.nii.gz" cmd = [ "flirt", "-init", affine, "-applyxfm", "-in", get_first(mse), "-ref", bl_t1, "-out", out_first ] Popen(cmd).wait() cmd = [ "fslmaths", out_first, "-bin", out_first.replace(".n", "-bin.n") ] Popen(cmd).wait() cmd = [ "fslmaths", get_first(bl_mse), "-bin", long + '/first/' + bl_mse + "first_seg-bin_BL" ] Popen(cmd).wait() shutil.copy(get_first(bl_mse), long + '/first' + bl_mse + "-first_segBL.nii.gz")
def check_for_resampling_sienax(mse): check = "" try: t1 = glob("{}/{}/sienaxorig_*/I_brain.nii.gz".format(_get_output(mse), mse ))[-1] cmd = ["fslstats", t1, "-R" ] proc = Popen(cmd, stdout=PIPE) max = str(float([l.decode("utf-8").split() for l in proc.stdout.readlines()[:]][0][-1])) check = "" if max.endswith(".0"): check = True else: check = False except: pass return check
def get_sq_name(mse): try: with open(_get_output(mse)+"/"+mse+"/nii/status.json") as data_file: data = json.load(data_file) if len(data["t1_files"]) == 0: print("") sq = "" else: t1_file = data["t1_files"][-1] sq = t1_file.split('-')[2:3][0].lstrip("0") print(sq) return sq except: pass
def run_pbr_align(mse): from getpass import getpass alignment_folder = _get_output(mse) + "/{0}/alignment".format(mse) if os.path.exists(alignment_folder): cmd_rm = ['rm', '-r', alignment_folder] print(cmd_rm) proc = Popen(cmd_rm) proc.wait() print("") password = getpass("mspacman password: "******"-ps", password] print(cmd) proc = Popen(cmd) proc.wait()
def run_lst_sienax(msid, config): lst = mse_lst = "" lst_edit_sienax = config[ "long_output_directory"] + "/" + msid + "/lst_edit_sienax/" if os.path.exists(lst_edit_sienax): for mse_lst in os.listdir(lst_edit_sienax): print("**************", mse_lst) if mse_lst.startswith("mse"): lst_mask = glob( "{0}/{1}/mindcontrol/ms*{1}*FLAIR*/lst/lst_edits/no_FP_filled_FN*.nii.gz" .format(_get_output(mse), mse_lst)) lst_mask2 = glob( "{}/{}/lst/lpa/ples_lpa_m*index*.nii.gz".format( _get_output(mse_lst), mse_lst)) if len(lst_mask) > 0: lst = lst_mask[-1] elif len(lst_mask2) > 0: lst = lst_mask2[-1] else: L = get_lst(msid) lst = L[0] mse_lst = L[1] if os.path.exists(lst): if not os.path.exists(config["long_output_directory"] + "/" + msid): os.mkdir(config["long_output_directory"] + "/" + msid) if not os.path.exists(lst_edit_sienax): os.mkdir(lst_edit_sienax) if mse_lst.startswith("mse"): t1 = get_series(mse_lst)[0] cmd = [ "sienax_optibet", t1, "-lm", lst, "-r", "-d", "-o", lst_edit_sienax + mse_lst ] print(cmd) Popen(cmd).wait() return [lst_edit_sienax, mse_lst]
def get_first_values(mse): L_thal,L_caud,L_put,L_pall,L_hipp, L_amy, L_acc, R_thal, R_caud, R_put,R_pall, R_hipp, R_amy, R_acc,BS = '','','','','','','','','','','','','','','' if os.path.exists(_get_output(mse) +"/"+ mse + "/first_all/"): for files in os.listdir(_get_output(mse) + "/" +mse +"/first_all/"): if files.endswith("firstseg.nii.gz") or files.endswith("firstsegs.nii.gz"): print(files) seg = _get_output(mse) + "/" +mse +"/first_all/"+ files L_thal = calc_first(seg, int(10)) L_caud = calc_first(seg, 11) L_put = calc_first(seg, 12) L_pall = calc_first(seg, 13) L_hipp = calc_first(seg, 17) L_amy = calc_first(seg, 18) L_acc = calc_first(seg,26) R_thal = calc_first(seg, 49) R_caud = calc_first(seg, 50) R_put = calc_first(seg, 51) R_pall = calc_first(seg, 52) R_hipp = calc_first(seg, 53) R_amy = calc_first(seg, 54) R_acc = calc_first(seg,58) BS = calc_first(seg, 16) return [L_thal,L_caud,L_put,L_pall,L_hipp, L_amy, L_acc, R_thal, R_caud, R_put,R_pall, R_hipp, R_amy, R_acc,BS]
def get_mt(mse): mt_ON, mt_OFF, mt_t1 = "", "", "" nii = "{}/{}/nii/status.json".format(_get_output(mse), mse) if os.path.exists(nii): with open(nii) as data_file: data = json.load(data_file) if len(data["mt_files"]) > 0: mt = data["mt_files"][0] for lines in mt: if "NON_MT_TR11_FL15" in lines: mt_t1 = lines elif "NON_MT" in lines: mt_OFF = lines else: mt_ON = lines return mt_ON, mt_OFF, mt_t1
def edit_json(mse): if mse == "mse6780": nii = _get_output(mse) + "/" + mse + "/nii/status.json" if os.path.exists(nii): with open(nii) as data_file: data = json.load(data_file) json.dump(data) if len(data["t1_files"]) > 0: seq = data["t1_files"] for reorient in seq: if "reorient" in reorient: print(reorient) print(data) print("******") print(data_file, reorient) json.dump(data_file, reorient)
def write_csv(c, out): t1 = "" df = pd.read_csv("{}".format(c)) for idx in range(len(df)): msid = df.loc[idx, 'msid'] mse = df.loc[idx, "mse"] #t1 = get_t1(ID) first_path = "{}/{}/first_all/".format(_get_output(mse), mse) print(first_path) if os.path.exists(first_path): #first_path = first_path[0] print("FIRST PATH", first_path) F = get_first_values(first_path) df.loc[idx, 'Left-Thalamus-Proper new'] = F[0] df.loc[idx, 'Left-Caudate new'] = F[1] df.loc[idx, 'Left-Putamen new'] = F[2] df.loc[idx, 'Left-Pallidum new'] = F[3] df.loc[idx, 'Left-Hippocampus new'] = F[4] df.loc[idx, 'Left-Amygdala new'] = F[5] df.loc[idx, 'Left-Accumbens new'] = F[6] df.loc[idx, 'Right-Thalamus-Proper new'] = F[7] df.loc[idx, 'Right-Caudate new'] = F[8] df.loc[idx, 'Right-Putamen new'] = F[9] df.loc[idx, 'Right-Pallidum new'] = F[10] df.loc[idx, 'Right-Hippocampus new'] = F[11] df.loc[idx, 'Right-Amygdala new'] = F[12] df.loc[idx, 'Right-Accumbens new'] = F[13] df.loc[idx, 'Brain Stem new'] = F[14] sienax_path = glob(base_dir + '/sienax_output/{}*'.format(id)) if len(sienax_path) > 0: report = sienax_path[0] + '/report.sienax' SX = get_sienax(report) df.loc[idx, 'V Scale'] = SX[0] df.loc[idx, 'pGM'] = SX[1] df.loc[idx, 'CSF'] = SX[2] df.loc[idx, 'GM'] = SX[3] df.loc[idx, 'WM'] = SX[4] df.loc[idx, "BV"] = SX[5] df.to_csv('{}'.format(out))
def get_first_values(mse): L_thal=L_caud=L_put=L_pall=L_hipp= L_amy= L_acc= R_thal= R_caud= R_put=R_pall= R_hipp= R_amy= R_acc=BS = '' status = "{}/{}/first_all/status.json".format(_get_output(mse),mse) if os.path.exists(status): with open(status, "r") as f: lines = [line.strip() for line in f.readlines()] for line in lines: if "Brain-Stem" in line: print(line) BS = line.split()[3].replace(",","") print(BS, "^^^^^^^^^^^^^^^^^^") elif "Left-Accumbens"in line: L_acc = line.split()[1].replace(",","") elif "Left-Amy"in line: L_amy = line.split()[1].replace(",","") elif "Left-Caud"in line: L_caud = line.split()[1].replace(",","") elif "Left-Hipp"in line: L_hipp = line.split()[1].replace(",","") elif "Left-Palli"in line: L_pall = line.split()[1].replace(",","") elif "Left-Puta"in line: L_put = line.split()[1].replace(",","") elif "Left-Thalamus"in line: L_thal = line.split()[1].replace(",","") elif "Right-Accumbens"in line: R_acc = line.split()[1].replace(",","") elif "Right-Amy"in line: R_amy = line.split()[1].replace(",","") elif "Right-Caud"in line: R_caud = line.split()[1].replace(",","") elif "Right-Hipp"in line: R_hipp = line.split()[1].replace(",","") elif "Right-Palli"in line: R_pall = line.split()[1].replace(",","") elif "Right-Puta"in line: R_put = line.split()[1].replace(",","") elif "Right-Thalamus"in line: R_thal = line.split()[1].replace(",","") else: continue print(L_thal,L_caud,L_put,L_pall,L_hipp, L_amy, L_acc, R_thal, R_caud, R_put,R_pall, R_hipp, R_amy, R_acc,BS) return [L_thal,L_caud,L_put,L_pall,L_hipp, L_amy, L_acc, R_thal, R_caud, R_put,R_pall, R_hipp, R_amy, R_acc,BS]
def check_for_resampling_align(mse): check = "" align = _get_output(mse)+"/"+mse+"/alignment/status.json" if os.path.exists(align): with open(align) as data_file: data = json.load(data_file) if len(data["t1_files"]) > 0: t1_file = data["t1_files"][-1] try: cmd = ["fslstats", t1_file, "-R" ] proc = Popen(cmd, stdout=PIPE) max = str(float([l.decode("utf-8").split() for l in proc.stdout.readlines()[:]][0][-1])) check = "" if max.endswith(".0"): check = True else: check = False except: pass return check
def register_non_chop(mse, in_file, config): if in_file.endswith(".nii.gz"): nifti = in_file.replace("alignment", "nii").replace("_T1mni", "") mni_affine = os.path.split(in_file)[0] + "/mni_affine.mat" reorient = config["working_directory"] + mse + "_reorient.nii.gz" if not os.path.exists(config["working_directory"] + mse): os.mkdir(config["working_directory"] + mse) bl_nochop = "{}/{}/alignment/baseline_mni/no_chop/".format( _get_output(mse), mse) fullstd = bl_nochop + in_file.split("/")[-1].replace( ".nii.gz", "_fullstd.mat") if not "brain_mask" in in_file: if not os.path.exists( fullstd.replace("fullstd.mat", "_nochop.nii.gz")): cmd = ["fslreorient2std", nifti, reorient] Popen(cmd).wait() cmd = [ "flirt", "-in", reorient, "-ref", in_file.replace("_T1mni", ""), "-dof", "6", "-omat", fullstd.replace("fullstd", "_chop") ] Popen(cmd).wait() cmd = [ "convert_xfm", "-omat", fullstd, "-concat", mni_affine, fullstd.replace("fullstd", "_chop") ] print(cmd) Popen(cmd).wait() cmd = [ "convert_xfm", "-omat", fullstd, "-concat", config["chop"], fullstd ] print(cmd) Popen(cmd).wait() flirt_reg(reorient, config["mni_paddy"], fullstd, fullstd.replace("fullstd.mat", "_nochop.nii.gz"))
def check_json(mse): nii = _get_output(mse) + "/" + mse + "/nii/status.json" if os.path.exists(nii): with open(nii) as data_file: data = json.load(data_file) file = ["t1_files", "t2_files", "affine", "flair_files"] #for sq in file: #print(sq) if len(data["t1_files"]) > 0: seq = data["t1_files"] for data in seq: if "reorient" in data: #print(data) #print(nii) print( "python", "/data/henry6/gina/scripts/grid_submit.py", '"{0} {1} {2} {3} {4} {5}"'.format( "pbr", mse, "-w", "nifti", "align", "-R"))
def find_hardi(c): df = pd.read_csv("{}".format(c)) for _, row in df.iterrows(): #extracting the msid and mse from the csv file that you give as an arument msid = row["msid"] mse = row['mse'] #looks within the status.json file to find the files that you are looking for if os.path.exists(_get_output(mse) + '/' + mse + '/nii/status.json'): with open(get_nifti(mse)) as data_file: data = json.load(data_file) if len(data["t1_files"]) > 0: t1_file = data["t1_files"][-1].split('/')[-1] print(t1_file) if len(data["nifti_files"]) > 0: niftis = data["nifti_files"] if "hardi" in niftis: print(niftis)
def get_sienax(mse): les_vol = sienax_label = sienax = VS = PG = VCSF = GM = WM = BV = num_lesions = "" sienax_optibet = glob("/{0}/{1}/sienax_optibet/ms*/report.sienax".format( _get_output(mse), mse)) if len(sienax_optibet) >= 1: sienax = sienax_optibet[-1] with open(sienax, "r") as f: lines = [line.strip() for line in f.readlines()] for line in lines: try: if line.startswith("VSCALING"): VS = line.split()[1] elif line.startswith("pgrey"): PG = line.split()[2] elif line.startswith("vcsf"): VCSF = line.split()[2] elif line.startswith("GREY"): GM = line.split()[2] elif line.startswith("WHITE"): WM = line.split()[2] elif line.startswith("BRAIN"): BV = line.split()[2] except: pass lm = sienax.replace("report.sienax", "/lesion_mask.nii.gz") if os.path.exists(lm): try: img = nib.load(lm) data = img.get_data() les_vol = np.sum(data) num_lesions = count_les(lm) #num_lesions="" except: pass return [VS, PG, VCSF, GM, WM, BV, str(les_vol), str(num_lesions)]
def align_to_baseline(msid, mse_bl, mse): bl_t1 = get_series(mse_bl)[0] print(bl_t1) bl_t1_mni = get_mni_angulated(bl_t1, mse_bl) copy_mni_bl(bl_t1_mni, mse_bl) if mse.startswith("mse") and len(mse) > 3: t1 = get_series(mse)[0].replace("reorient", "").replace("N4corr", "").replace( "_brain_mask", "") t2 = get_series(mse)[1] flair = get_series(mse)[2] gad = get_series(mse)[3] lst_edit = run_lst_sienax(msid, config) lst_edit_sienax = lst_edit[0] mse_lst = lst_edit[1] print(lst_edit) print(lst_edit_sienax) print(mse_lst) if mse_lst.startswith("mse") and len(mse_lst) > 3: t1_lst = get_series(mse_lst)[0] print(t1_lst) create_affine(t1_lst, bl_t1_mni, mse_lst) #register everything to baseline T1 MNI space - creating /alignment/baseline_mni directories print("create affine") create_affine(t1, bl_t1_mni, mse) files = [t1, t2, flair, gad] for file in files: register_non_chop(mse, file, config) if not mse == mse_bl: print("apply tp2 flirt") apply_tp2_flirt(file, bl_t1_mni, mse) #lst_edit_sienax = run_lst_sienax(msid, lst, config) #lst_mse = lst.split('/')[5] if os.path.exists(lst_edit_sienax) and mse_lst.startswith( "mse") and mse.startswith("mse"): masks = register_masks_MNI(lst_edit_sienax, bl_t1_mni, config, msid) lesion = masks[0] wm_mni = masks[1] gm_mni = masks[2] lesion_bin = masks[3] flair = "{}/{}/alignment/baseline_mni/{}".format( _get_output(mse), mse, flair.split('/')[-1].replace(".nii", "_T1mni.nii")) t1_mni = "{}/{}/alignment/baseline_mni/{}".format( _get_output(mse), mse, t1.split('/')[-1].replace(".nii", "_T1mni.nii")) t2 = "{}/{}/alignment/baseline_mni/{}".format( _get_output(mse), mse, t2.split('/')[-1].replace(".nii", "_T1mni.nii")) if not flair.endswith("none") and len(t1) > 1: print("FLAIR ^^^^^^^^^^^^^^^^^^^^^^^^^^^", msid, mse, t1, flair, wm_mni, gm_mni, lesion, lesion_bin) create_flair_lesions(msid, mse, t1, flair, wm_mni, gm_mni, lesion, lesion_bin, config) elif len(t2) > 5 and len(t1) > 1: print("T2 ^^^^^^^^^^^^^^^^^^^^^^", msid, mse, t1, t2, wm_mni, gm_mni, lesion, lesion_bin) create_t2_lesions(msid, mse, t1, t2, wm_mni, gm_mni, lesion, lesion_bin, config) else: print(mse, "CAN NOT MAKE LESION MASK")
def create_t2_lesions(msid, mse, t1, t2, wm_mni, gm_mni, lesion, lesion_bin, config): base_dir = config["working_directory"] + "/" + mse + "/lesion_mni_t2/" mni_long = config["long_output_directory"] + "/" + msid + "/MNI/" #lesion = str(glob(mni_long + "lesion_mse*.nii.gz")[0]) wm_eroded = base_dir + "/wm_eroded" wm_t2 = base_dir + "/wm_t2.nii.gz" wm_with_les = base_dir + "/wm_withles.nii.gz" lesion_dil = base_dir + "/lesion_dil.nii.gz" lesion_mul_t2 = base_dir + "/lesion.nii.gz" prob_map = base_dir + "/prob_map_new.nii.gz" final_lesion = base_dir + "/lesion_final_new.nii.gz" wm_no_bs = base_dir + "/wm_no_bs.nii.gz" if os.path.exists(t2): t2_lesion = "{0}/{1}/lesion_origspace_t2/lesion.nii.gz*".format( _get_output(mse), mse) #if not os.path.exists(t2_lesion): if not os.path.exists(config["working_directory"] + "/" + mse): os.mkdir(config["working_directory"] + "/" + mse) if not os.path.exists(base_dir): os.mkdir(base_dir) if not os.path.exists( _get_output(mse) + "/" + mse + "/lesion_mni_t2/"): os.mkdir(_get_output(mse) + "/" + mse + "/lesion_mni_t2/") cmd = ["fslmaths", lesion, "-bin", lesion_bin] Popen(cmd).wait() print("RUNNING BIAS CORR") t2_file = run_bias_corr(t2, mse) get_nawm(wm_mni, t2_file, wm_eroded, base_dir) median_nawm = cal_median(wm_eroded) new_median_nawm = median_nawm - .000001 create_ero_wm(wm_mni, t2_file, wm_eroded, new_median_nawm, base_dir) dil_lesion_minus_gm(lesion_bin, gm_mni, lesion_dil) create_wm_with_les(lesion_dil, wm_mni, wm_with_les, t2_file, wm_t2) std_nawm = get_std(base_dir + "/ero_WM_Lhalf.nii.gz") vol_nawm = get_vol(wm_eroded) cal_hist(std_nawm, vol_nawm, wm_t2, median_nawm, base_dir + "/wm_hist.nii.gz") median_lesion = cal_median(lesion_mul_t2) new_median_lesion = median_lesion - .000001 les_mul_file(lesion_bin, t2_file, lesion_mul_t2, new_median_lesion, base_dir, median_nawm) std_lesion = get_std(base_dir + "/lesion_Uhalf.nii.gz") vol_lesion = get_vol(lesion_mul_t2) if not vol_lesion == 0.0: cal_hist(std_lesion, vol_lesion, wm_t2, median_lesion, base_dir + "/lesion_hist.nii.gz") make_prob_map(gm_mni, base_dir, wm_mni, no_wm, lesion_dil, wm_no_bs, wm_with_les, prob_map) create_t2_les(wm_with_les, lesion_dil, prob_map, wm_eroded, final_lesion, lesion, base_dir) reg_les_origspace(mse, final_lesion, t1, "/lesion_origspace_t2/") new_les = "{}/{}/new_lesion/lesion.nii.gz".format( _get_output(mse), mse) if os.path.exists(new_les): shutil.move( new_les, "/data/henry6/gina/s_test/" + mse + "_new_les.nii.gz") #shutil.rmtree(new_les) cmd = ["pbr", mse, "-w", "sienax_optibet", "-R"] #Popen(cmd).wait() submit("pbr " + mse + " -w sienax_optibet -R")
def get_siena_data(msid, mse, mse2): #siena_long = glob("/data/henry*/PBR_long/subjects/" + msid + '/siena_optibet/') henry12 = "/data/henry12/PBR_long/subjects/" + msid + '/siena_optibet/' henry10 = "/data/henry10/PBR_long/subjects/" + msid + '/siena_optibet/' pbvc_henry12, mse2_t1, siena_label, pbvc_henry10, final_pbvc = "", "", "", "","" try: align = "{}/{}/alignment/status.json".format(_get_output(mse2), mse2) if os.path.exists(align): with open(align) as data_file: data = json.load(data_file) if len(data["t1_files"]) > 0: mse2_t1 = data["t1_files"][-1].split('/')[-1].split('-')[3].replace(".nii.gz", "") if "DESPOT" in mse2_t1: mse2_t1 = data["t1_files"][0] print("MSE2", mse2_t1) except: pass if os.path.exists(henry12): for mse_siena12 in os.listdir(henry12): if mse_siena12.startswith(mse) and str(mse_siena12).endswith(str(mse2)): siena_report = os.path.join(henry12, mse_siena12, "report.siena") if os.path.exists(siena_report): print(siena_report) siena_label = "True" with open(siena_report, "r") as f: lines = [line.strip() for line in f.readlines()] for line in lines: if line.startswith("finalPBVC"): pbvc_henry12 = line.split()[1] if os.path.exists(henry10): for mse_siena in os.listdir(henry10): if mse_siena.startswith(mse) and str(mse_siena).endswith(str(mse2)): siena_report = os.path.join(henry10, mse_siena, "report.siena") if os.path.exists(siena_report): print(siena_report) siena_label = "True" with open(siena_report, "r") as f: lines = [line.strip() for line in f.readlines()] for line in lines: if line.startswith("finalPBVC"): pbvc_henry10 = line.split()[1] siena_path = henry10 + mse_siena if len(pbvc_henry10) >4 and len(pbvc_henry12) > 4: if pbvc_henry10 > pbvc_henry12: final_pbvc = pbvc_henry12 print(mse,mse2) print( pbvc_henry10, "this is larger than...", pbvc_henry12) print("removing.....", siena_path) shutil.rmtree(siena_path) else: final_pbvc = pbvc_henry10 elif len(pbvc_henry12) > 4: final_pbvc = pbvc_henry12 elif len(pbvc_henry10) > 4: final_pbvc = pbvc_henry10 else: final_pbvc = "" return [final_pbvc, mse2_t1, siena_label]
elif len(t1) > 1 and len(flair) > 1: cmd = [ MONSTR, '--t1', t1, '--fl', flair, '--atlasdir', atlas, '--robust', 'ncpu', '4', 'NumAtlas', '6', '--o', output ] print(cmd) check_call(cmd) elif len(t1) > 1: cmd = [ MONSTR, '--t1', t1, '--atlasdir', atlas, '--robust', 'ncpu', '4', 'NumAtlas', '6', '--o', output ] print(cmd) check_call(cmd) else: print("{0} NO T1 image to perform brain extraction".format(mse)) rotate_BM(mse, output, t1) if __name__ == '__main__': parser = argparse.ArgumentParser( 'This code runs MONSTR brain extraction using EPIC brain atlases as a reference' ) parser.add_argument('-mse', help='Input the mseID') parser.add_argument args = parser.parse_args() mse = args.mse output = _get_output(mse) + '/' + mse + '/MONSTR/' if not os.path.exists(output): os.mkdir(output) run_monstr(mse)
def write_spreadsheet(df): writer = open("/home/sf522915/Documents/EPIC_siena.csv", "w") spreadsheet = csv.DictWriter(writer, fieldnames=["msID", "mseID","examdate", "siena", "PBVC","sienax_flair", "sienax_t2","sienax", "T1", "T2", "FLAIR", \ "vscale", "brain vol (u, mm3)", "WM vol (u, mm3)", "GM vol (u, mm3)", "vCSF vol (u, mm3)", "cortical vol (u, mm3)", "lesion vol (u, mm3)" ]) spreadsheet.writeheader() for _, row in df.iterrows(): msid ="ms"+ str(row['msid']).replace("ms", "").lstrip("0") #msid = "ms" + msid.replace("ms", "").lstrip("0") siena_long = "/data/henry11/PBR_long/subjects/" + msid #date = row["examdate"] date = "" mse = str(row["mse"]) print(msid, mse) sienax_flair = "" sienax_t2 = "" sienax = "" t1_file = "" t2_file = "" flair_file = "" vscale = "" brain = "" wm = "" gm = "" csf = "" cortical = "" lesion = "" pbvc = "" FINALmse_siena = "" if mse.startswith("mse"): if os.path.exists(_get_output(mse)+ "/" + mse + "/sienax_flair"): sienax_flair = "sienax_flair" list = os.listdir(_get_output(mse)+'/'+mse+"/sienax_flair/") # dir is your directory path number_files = len(list) if number_files > 30: report = os.path.join(_get_output(mse), mse, "sienax_flair/report.sienax") with open(report, "r") as f: lines = [line.strip() for line in f.readlines()] for line in lines: if line.startswith("VSCALING"): vscale = line.split()[1] elif line.startswith("pgrey"): cortical = line.split()[2] elif line.startswith("vcsf"): csf = line.split()[2] elif line.startswith("GREY"): gm = line.split()[2] elif line.startswith("WHITE"): wm = line.split()[2] elif line.startswith("BRAIN"): brain = line.split()[2] lm = os.path.join(_get_output(mse), mse, "sienax_flair/lesion_mask.nii.gz") img = nib.load(lm) data = img.get_data() lesion = np.sum(data) elif os.path.exists(_get_output(mse) + "/" + mse + "/sienax_t2"): sienax_t2 = "sienax_t2" list = os.listdir(_get_output(mse)+ '/' + mse + "/sienax_t2/") # dir is your directory path number_files = len(list) if number_files > 30: report = os.path.join(_get_output(mse), mse, "sienax_t2/report.sienax") with open(report, "r") as f: lines = [line.strip() for line in f.readlines()] for line in lines: if line.startswith("VSCALING"): vscale = line.split()[1] elif line.startswith("pgrey"): cortical = line.split()[2] elif line.startswith("vcsf"): csf = line.split()[2] elif line.startswith("GREY"): gm = line.split()[2] elif line.startswith("WHITE"): wm = line.split()[2] elif line.startswith("BRAIN"): brain = line.split()[2] lm = os.path.join(_get_output(mse), mse, "sienax_t2/lesion_mask.nii.gz") img = nib.load(lm) data = img.get_data() lesion = np.sum(data) elif os.path.exists(_get_output(mse) + "/" + mse + "/sienax"): sienax = "sienax no lesion mask" list = os.listdir(_get_output(mse)+ '/' + mse + "/sienax/") # dir is your directory path report = glob(os.path.join(_get_output(mse), mse, "sienax/*/report.sienax"))[0] print(report) print("# #######################################################################") if os.path.exists(report): print(report, "THIS IS THE SIENAX WITHOUT LESION MASKS") with open(report, "r") as f: lines = [line.strip() for line in f.readlines()] for line in lines: if line.startswith("VSCALING"): vscale = line.split()[1] elif line.startswith("pgrey"): cortical = line.split()[2] elif line.startswith("vcsf"): csf = line.split()[2] elif line.startswith("GREY"): gm = line.split()[2] elif line.startswith("WHITE"): wm = line.split()[2] elif line.startswith("BRAIN"): brain = line.split()[2] if os.path.exists(_get_output(mse)+"/"+mse+"/alignment/status.json"): with open(_get_output(mse)+"/"+mse+"/alignment/status.json") as data_file: data = json.load(data_file) if len(data["t1_files"]) == 0: continue else: t1_file = data["t1_files"][-1] t1_file = (t1_file.split('/')[-1]) #mseID = t1_file.split("-")[1] series = t1_file.split("-")[2].lstrip("0") if not len(series) > 0: series = "1" try: dcm = glob("/working/henry_temp/PBR/dicoms/{0}/E*/{1}/*.DCM".format(mse,series)) except: pass if len(dcm) > 0 : dcm = dcm[0] cmd = ["dcmdump", dcm] proc = Popen(cmd, stdout=PIPE) lines = str([l.decode("utf-8").split() for l in proc.stdout.readlines()[:]]) row["scanner"] = lines if "qb3-3t" in lines: scanner = "qb3" elif "SIEMENS" in lines: scanner = "Skyra" elif "CB3TMR" or "CB-3TMR" in lines: scanner = "CB" else: scanner = "unknown" if len(data["t2_files"]) == 0: row = {"T2": "NONE"} else: t2_file = data["t2_files"][-1] t2_file = (t2_file.split('/')[-1]) row = {"T2": t2_file} if len(data["flair_files"]) == 0: row = {"FLAIR": "NONE"} else: flair_file = data["flair_files"][-1] flair_file = (flair_file.split('/')[-1]) row = {"FLAIR": flair_file} if os.path.exists(siena_long): for mse_siena in os.listdir(siena_long): if mse_siena.startswith(mse): FINALmse_siena = mse_siena print(siena_long + '/' + FINALmse_siena, "THIS IS THE SIENA LONG DIRECTORY") siena_report = os.path.join(siena_long, FINALmse_siena, "report.siena") print(siena_report, "THIS IS THE SIENA REPORT") if not os.path.exists(siena_report): continue with open(siena_report, "r") as f: lines = [line.strip() for line in f.readlines()] for line in lines: if line.startswith("finalPBVC"): pbvc = line.split()[1] row = {"PBVC": pbvc} print(pbvc, "THIS IS THE PBVC") row = {"siena" : FINALmse_siena} print(mse_siena, "THIS IS THE MSE SIENA #####") row = {"msID": msid, "mseID": mse,"examdate": date, "sienax_flair" : sienax_flair, "sienax_t2": sienax_t2, "sienax": sienax , "T1": t1_file, "T2": t2_file, "FLAIR": flair_file, \ "vscale": vscale, "brain vol (u, mm3)": brain, "WM vol (u, mm3)" : wm, "GM vol (u, mm3)": gm, "vCSF vol (u, mm3)": csf, "cortical vol (u, mm3)": cortical, "lesion vol (u, mm3)": lesion, "PBVC": pbvc, "siena": FINALmse_siena} print(row) spreadsheet.writerow(row) writer.close()
def reg_to_BL(mse_list, msid, jacobian_dir, reg_to_bl, combined_masks): #defining baseline mse and non-baseline mse's bl_mse = mse_list[0] bl_t1 = get_t1(bl_mse) other_mse = mse_list[1:] for mse in other_mse: t1_in = get_t1(mse) if not os.path.exists(jacobian_dir): os.mkdir(jacobian_dir) if not os.path.exists(reg_to_bl): os.mkdir(reg_to_bl) # defining output files - all in folder /data/henry10/PBR_long/subjects/<msid>/jacobian/reg_to_baseline affine = reg_to_bl + mse + "_affinereg_" + bl_mse + ".mat" affine_reg = reg_to_bl + mse + "_affinereg_" + bl_mse + ".nii.gz" fnirt_reg = reg_to_bl + mse + "_fnirt_" + bl_mse + ".nii.gz" jacobian = reg_to_bl + mse + "_jacobian_" + bl_mse + ".nii.gz" if not os.path.exists(jacobian): #fllirt registration, t1's to baseline t1 if not os.path.exists(affine_reg): cmd = [ 'flirt', '-ref', bl_t1, '-in', t1_in, '-omat', affine, '-out', affine_reg ] Popen(cmd).wait() #fnirt registration, t1 to baseline t1, outputing the jacobian if not os.path.exists(fnirt_reg): cmd = [ 'fnirt', '--ref=' + bl_t1, '--in=' + t1_in, '--aff=' + affine, '--iout=' + fnirt_reg, '--jout=' + jacobian ] print("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^") print( "python /data/henry6/gina/scripts/grid_submit.py ", "'{} {} {} {} {} {}'".format('fnirt', '--ref=' + bl_t1, '--in=' + t1_in, '--aff=' + affine, '--iout=' + fnirt_reg, '--jout=' + jacobian)) cmd = [ "python", "/data/henry6/gina/scripts/grid_submit.py", "'", "{}".format('fnirt'), "{}".format('--ref=' + bl_t1), "{}".format('--in=' + t1_in), "{}".format('--aff=' + affine), "{}".format('--iout=' + fnirt_reg), "{}".format('--jout=' + jacobian) ] print(cmd) #Popen(cmd).wait() mse_jacobian = "{}/{}/jacobian/".format(_get_output(mse), mse) #sym link jacobian to subjects specific folder if not os.path.exists(mse_jacobian): os.mkdir(mse_jacobian) try: os.symlink( fnirt_reg, mse_jacobian + mse + "_fnirt_" + bl_mse + ".nii.gz") os.symlink( jacobian, mse_jacobian + mse + "_jacobian_" + bl_mse + ".nii.gz") except: pass #retrieving various masks first = get_first(mse) wm = get_sienax(mse)[0] gm = get_sienax(mse)[1] csf = get_sienax(mse)[2] pGM = get_sienax(mse)[3] bm = get_sienax(mse)[4] lesion = get_sienax(mse)[5] brain_masks = [first, wm, gm, csf, pGM, bm, lesion] for mask in brain_masks: if not os.path.exists(mask): continue if mask == first: bl = get_first(bl_mse) seg = "first" elif mask == wm: bl = get_sienax(bl_mse)[0] seg = "wm" elif mask == gm: bl = get_sienax(bl_mse)[1] seg = "gm" elif mask == csf: bl = get_sienax(bl_mse)[2] seg = "csf" elif mask == pGM: bl = get_sienax(bl_mse)[3] seg = "pGM" elif mask == bm: bl = get_sienax(bl_mse)[4] seg = "bm" elif mask == lesion: bl = get_sienax(bl_mse)[5] seg = "les" else: bl = "" if not os.path.exists(combined_masks): os.mkdir(combined_masks) mask_out = '{}/{}_'.format(combined_masks, seg) print(mask_out) out_mse = mask_out + mse + "_affine_" + bl_mse + ".nii.gz" if not os.path.exists(out_mse): # apply affine matrix to masks to get masks in baseline space cmd = [ "flirt", "-init", affine, "-applyxfm", "-in", mask, "-ref", bl_t1, "-out", out_mse ] Popen(cmd).wait() if not os.path.exists(out_mse.replace(".n", "-bin.n")): #binarize masks cmd = [ "fslmaths", mask, "-bin", out_mse.replace(".n", "-bin.n") ] Popen(cmd).wait() if not os.path.exists(mask_out + bl_mse + "_seg-bin_BL.nii.gz"): # binarize baseline mask and copy over to combined masks folder cmd = [ "fslmaths", bl, "-bin", mask_out + bl_mse + "_seg-bin_BL.nii.gz" ] print("fslmaths", bl, "-bin", mask_out + bl_mse + "_seg-bin_BL.nii.gz") Popen(cmd).wait() print("**************") shutil.copy(bl, mask_out + bl_mse + "-_segBL.nii.gz".format(seg))
for _, row in df.iterrows(): msid = row["msid"] msid = "/data/henry6/mindcontrol_ucsf_env/watchlists/long/VEO/gina/" + msid + ".txt" ind = 0 with open(msid) as f: content = f.read().splitlines() size = len(content) - 1 index = 0 while index < size: index += 1 print(row["msid"], content[index - 1], content[index]) mse1 = content[index - 1] mse2 = content[index] if not os.path.exists(_get_output(mse1)+"/"+mse1+"/alignment/status.json") or \ not os.path.exists(_get_output(mse2)+"/"+mse2+"/alignment/status.json"): continue with open( _get_output(mse1) + "/" + mse1 + "/alignment/status.json") as data_file: data = json.load(data_file) if len(data["t1_files"]) == 0: t1_file1 = "none" else: t1_file1 = data["t1_files"][-1] t1_file1 = t1_file1.split("alignment")[0] + "alignment/baseline_mni/" + \ t1_file1.split('/')[-1].split('.')[0] + "_T1mni.nii.gz"