Beispiel #1
0
    connectomes_dir, subj + str_identifier + "_grouping.xlsx")

#anat_path = os.path.join(diff_dir + subj + "/" + subj + "_nii4D_masked_isotropic.nii.gz"
#anat_path = path.join(diff_dir, subj, subj + "_nii4D_masked_isotropic.nii.gz")

#outpath = "/Volumes/Data/Badea/Lab/mouse/C57_JS/Whiston_figures_files/" + subj + "*/"
##import glob
#dir = glob.glob(outpath)
#outpath = dir[0]
save_trk = True

tract_path = tract_dir + subj + str_identifier + "_pruned.trk"

tract_path, trkexists = gettrkpath(tract_dir,
                                   subj,
                                   str_identifier,
                                   pruned=True,
                                   verbose=True)
#labelspath = path.join(labels_input_dir, subj, subj + "_IITmean_RPI_labels.nii.gz")
labelmask, affine_labels, labelspath = getlabelmask(labels_input_dir, subj,
                                                    verbose)
labels_convert_path = os.path.join(labels_output_dir,
                                   subj + "_IITmean_RPI_labels_convert.nii.gz")

### Change this to a list with append that is converted to a matrix instead of this
if textfilepath is not None:
    roi_selection_matrix = np.zeros((100, 2))
    i = 0
    with open(textfilepath, newline='', encoding='utf-8-sig') as csvfile:
        matrixread = csv.reader(csvfile, delimiter=' ', quotechar='|')
        for row in matrixread:
save_temp_files = True
recenter=1
contrast='dwi'
native_ref=''

orient_string = os.path.join(path_DWI, 'relative_orientation.txt')
orient_relative = open(orient_string, mode='r').read()
orientation_out = orient_relative.split(',')[0]
orientation_out = orientation_out.split(':')[1]
orientation_in = orient_relative.split(',')[1]
orientation_in = orientation_in.split(':')[1]

nii_test_files = 1

for subj in subjects:
	subj_trk, _ = gettrkpath(path_TRK, subj, str_identifier, pruned=True, verbose=verbose)
	#if not os.path.exists(subj_trk):
	#	print(f'could not find {subj_trk}, skipping')
	#continue
	trkname = os.path.basename(subj_trk)
	trk_MDT_space = os.path.join(path_TRK_output, trkname)

	trans = os.path.join(path_transforms, f"{subj}_0DerivedInitialMovingTranslation.mat")
	rigid = os.path.join(path_transforms, f"{subj}_rigid.mat")
	affine = os.path.join(path_transforms, f"{subj}_affine.txt")
	affine_orig = os.path.join(path_transforms, f"{subj}_affine.mat")
	runno_to_MDT = os.path.join(path_transforms, f'{subj}_to_MDT_warp.nii.gz')

	print(f'Beginning the process to transfer trk file {subj_trk} to {trk_MDT_space}')

	if nii_test_files:
         index_to_struct[target_tuple[1]] + '_' + ref + '_lines.py'))
     grouping_files[ref, 'points'] = (os.path.join(
         centroid_folder, group_str + '_MDT' + ratio_str + '_' +
         index_to_struct[target_tuple[0]] + '_to_' +
         index_to_struct[target_tuple[1]] + '_' + ref + '_points.py'))
     list_files, exists = check_files(grouping_files)
 if not os.path.exists(centroid_file_path) or not np.all(exists) or (
         not os.path.exists(streamline_file_path)
         and write_streamlines) or (not os.path.exists(stats_path)
                                    and write_stats) or overwrite:
     subjects = groups_subjects[group]
     subj = 1
     for subject in subjects:
         trkpath, exists = gettrkpath(TRK_folder,
                                      subject,
                                      str_identifier,
                                      pruned=False,
                                      verbose=True)
         if not exists:
             txt = f'Could not find subject {subject} at {TRK_folder} with {str_identifier}'
             warnings.warn(txt)
             continue
         #streamlines, header, _ = unload_trk(trkpath)
         if np.shape(groupLines[group, ref])[0] != np.shape(
                 groupstreamlines[group])[0]:
             raise Exception('happened from there')
         trkdata = load_trk(trkpath, 'same')
         header = trkdata.space_attributes
         picklepath_connectome = os.path.join(
             pickle_folder, subject + str_identifier + '_connectomes.p')
         picklepath_grouping = os.path.join(
Beispiel #4
0
        #subj_torecenter_transform_affine = get_affine_transform_test(subj_affine, subj_affine_new)
        #added_trans = subj_affine[:3, 3] + np.multiply(preprocess_affine[:3, 3], [1,1,-1]) + [-1,-1,0]
        #added_trans = subj_affine[:3, 3] + np.multiply(subjtorecenter_affine[:3, 3], [-1,-1,-1])
        #subj_torecenter_transform_affine[:3, 3] = reorient_trans + added_trans

        SAMBA_input_real_file = os.path.join(path_DWI, f'{subj}_dwi{ext}')

        #new_affine, translation, translate_affine = recenter_nii_affine(SAMBA_input_real_file, return_translation=True)

        #subj_trk, _ = gettrkpath(path_TRK, subj, str_identifier, pruned=True, verbose=verbose)

        check_dif_ratio(path_TRK, subj, str_identifier, ratio)
        subj_trk, trkexists = gettrkpath(path_TRK,
                                         subj,
                                         str_identifier,
                                         pruned=prune,
                                         verbose=False)

        _, exists = check_files([trans, rigid, runno_to_MDT])
        if np.any(exists == 0):
            raise Exception('missing transform file')
        if not os.path.exists(affine) and not os.path.exists(affine_orig):
            raise Exception('missing transform file')
        streamlines_prepro, header = unload_trk(subj_trk)

        #streamlines_prepro, header_prepro = unload_trk(trk_preprocess)
        mat_struct = loadmat(trans)
        var_name = list(mat_struct.keys())[0]
        later_trans_mat = mat_struct[var_name]
        new_transmat = np.eye(4)