def run_probtrackx2(i, seeds_text, dwi_dir, probtrackx_output_dir_path, procmem, num_total_samples, vent_CSF_diff_mask_path=None, way_mask=None): import random import nipype.interfaces.fsl as fsl samples_i = int(round(float(num_total_samples) / float(procmem[0]), 0)) nodif_brain_mask_path = "%s%s" % (dwi_dir, '/nodif_brain_mask.nii.gz') merged_th_samples_path = "%s%s" % (dwi_dir, '/merged_th1samples.nii.gz') merged_f_samples_path = "%s%s" % (dwi_dir, '/merged_f1samples.nii.gz') merged_ph_samples_path = "%s%s" % (dwi_dir, '/merged_ph1samples.nii.gz') tmp_dir = "%s%s%s" % (probtrackx_output_dir_path, '/tmp_samples_', str(i)) if not os.path.exists(tmp_dir): os.makedirs(tmp_dir) probtrackx2 = fsl.ProbTrackX2() probtrackx2.inputs.network = True probtrackx2.inputs.seed = seeds_text probtrackx2.inputs.onewaycondition = True probtrackx2.inputs.c_thresh = 0.2 probtrackx2.inputs.n_steps = 2000 probtrackx2.inputs.step_length = 0.5 probtrackx2.inputs.n_samples = samples_i probtrackx2.inputs.dist_thresh = 0.0 probtrackx2.inputs.opd = True probtrackx2.inputs.loop_check = True probtrackx2.inputs.omatrix1 = False probtrackx2.overwrite = True probtrackx2.inputs.verbose = False probtrackx2.inputs.mask = nodif_brain_mask_path probtrackx2.inputs.out_dir = tmp_dir probtrackx2.inputs.thsamples = merged_th_samples_path probtrackx2.inputs.fsamples = merged_f_samples_path probtrackx2.inputs.phsamples = merged_ph_samples_path probtrackx2.inputs.use_anisotropy = False if vent_CSF_diff_mask_path: probtrackx2.inputs.avoid_mp = vent_CSF_diff_mask_path else: print('No ventricular CSF mask used. This is not recommended.') if way_mask: probtrackx2.inputs.waypoints = way_mask probtrackx2.inputs.waycond = 'OR' print( 'No waypointmask used. This will instantiate a computationally expensive probtrackx run and is generally not recommended.' ) rseed_arg = ' --rseed=' + str(random.randint(1, 1000)) os.chdir(dwi_dir) os.system(probtrackx2.cmdline + rseed_arg) del probtrackx2 return
def run_probtrackx2(i, seeds_text, bedpostx_dir, probtrackx_output_dir_path, vent_CSF_diff_mask_path, WM_diff_mask_path, procmem): import random import nipype.interfaces.fsl as fsl num_total_samples = 5000 samples_i = int(round(float(num_total_samples) / float(procmem[0]), 0)) nodif_brain_mask_path = bedpostx_dir + '/nodif_brain_mask.nii.gz' merged_th_samples_path = bedpostx_dir + '/merged_th1samples.nii.gz' merged_f_samples_path = bedpostx_dir + '/merged_f1samples.nii.gz' merged_ph_samples_path = bedpostx_dir + '/merged_ph1samples.nii.gz' max_i = max(range(int(procmem[0]))) tmp_dir = probtrackx_output_dir_path + '/tmp_samples_' + str(i) if not os.path.exists(tmp_dir): os.makedirs(tmp_dir) probtrackx2 = fsl.ProbTrackX2() probtrackx2.inputs.network = True probtrackx2.inputs.seed = seeds_text probtrackx2.inputs.onewaycondition = True probtrackx2.inputs.c_thresh = 0.2 probtrackx2.inputs.n_steps = 2000 probtrackx2.inputs.step_length = 0.5 probtrackx2.inputs.n_samples = samples_i probtrackx2.inputs.dist_thresh = 0.0 probtrackx2.inputs.opd = True probtrackx2.inputs.loop_check = True probtrackx2.inputs.omatrix1 = True probtrackx2.overwrite = True probtrackx2.inputs.verbose = True probtrackx2.inputs.mask = nodif_brain_mask_path probtrackx2.inputs.out_dir = tmp_dir probtrackx2.inputs.thsamples = merged_th_samples_path probtrackx2.inputs.fsamples = merged_f_samples_path probtrackx2.inputs.phsamples = merged_ph_samples_path probtrackx2.inputs.use_anisotropy = True try: probtrackx2.inputs.avoid_mp = vent_CSF_diff_mask_path except: pass try: probtrackx2.inputs.waypoints = WM_diff_mask_path probtrackx2.inputs.waycond = 'OR' except: pass rseed_arg = ' --rseed=' + str(random.randint(1, 1000)) os.chdir(bedpostx_dir) os.system(probtrackx2.cmdline + rseed_arg) del (probtrackx2) filename = probtrackx_output_dir_path + '/' + str(i) + '_complete.txt' open(filename, 'w').close() return max_i
def create_workflow(name='tracking'): from nipype.workflows.dmri.fsl import create_bedpostx_pipeline import nipype.interfaces.fsl as fsl import nipype.pipeline.engine as pe import nipype.interfaces.utility as niu wf = pe.Workflow(name=name) bed_wf = create_bedpostx_pipeline() inputspec = pe.Node(niu.IdentityInterface(fields=[ 'dwi', 'mask', 'reg', 'mean', 'bvecs', 'bvals', "subject_id", "surf_dir" ]), name="inputspec") outputspec = pe.Node(niu.IdentityInterface( fields=['fdt_paths', 'log', 'particle_files', 'targets', 'way_total']), name='outputspec') wf.connect(inputspec, 'dwi', bed_wf, 'inputnode.dwi') wf.connect(inputspec, 'mask', bed_wf, 'inputnode.mask') wf.connect(inputspec, 'bvecs', bed_wf, 'inputnode.bvecs') wf.connect(inputspec, 'bvals', bed_wf, 'inputnode.bvals') try: prob2 = fsl.ProbTrackX2(verbose=2) except AttributeError: prob2 = fsl.ProbTrackX(verbose=2) #prob2._cmd='probtrackx2' #prob2.inputs.mode = Undefined track = pe.MapNode(prob2, name='probtrackx', iterfield=["seed"]) wf.connect(bed_wf, 'outputnode.thsamples', track, 'thsamples') wf.connect(bed_wf, 'outputnode.phsamples', track, 'phsamples') wf.connect(bed_wf, 'outputnode.fsamples', track, 'fsamples') wf.connect(inputspec, 'mask', track, 'mask') regions = get_regions() wf.connect(inputspec, "subject_id", regions, "inputspec.subject_id") wf.connect(inputspec, "surf_dir", regions, "inputspec.surf_dir") wf.connect(inputspec, "reg", regions, "inputspec.reg_file") wf.connect(inputspec, "mean", regions, "inputspec.mean") wf.connect(regions, "outputspec.ROIs", track, "seed") wf.connect(regions, "outputspec.ROIs", track, "target_masks") wf.connect(track, 'fdt_paths', outputspec, 'fdt_paths') wf.connect(track, 'log', outputspec, 'log') wf.connect(track, 'particle_files', outputspec, 'particle_files') wf.connect(track, 'targets', outputspec, 'targets') wf.connect(track, 'way_total', outputspec, 'way_total') return wf
grad_dev = os.path.join(datadir, 'Diffusion_7T', 'grad_dev.nii.gz') brain_mask = os.path.join(datadir, 'Diffusion_7T', 'nodif_brain_mask.nii.gz') seed = os.path.join(datadir, 'Diffusion_7T', 'IC_L_sphere_bin.nii.gz') from nipype.interfaces import fsl bedp = pe.Node(fsl.BEDPOSTX5(), name='bedpost') bedp.inputs.bvecs = bvecs bedp.inputs.bvals = bvals bedp.inputs.dwi = dwi bedp.inputs.mask = brain_mask bedp.inputs.grad_dev = grad_dev bedp.inputs.n_fibres = 1 bedp.inputs.use_gpu = True bedp.inputs.out_dir = os.path.join(datadir, 'Diffusion_7T.bedpostX') from nipype.interfaces import fsl pbx2 = pe.Node(fsl.ProbTrackX2(), name='probtrackx') pbx2.inputs.seed = seed pbx2.inputs.mask = brain_mask pbx2.inputs.out_dir = os.path.join(datadir, 'Diffusion_7T.probtrackx2') wf = pe.Workflow(name='fdt') wf.connect([(bedp, pbx2, [('merged_fsamples', 'fsamples'), ('merged_phsamples', 'phsamples'), ('merged_thsamples', 'thsamples')])]) wf.run(plugin='SLURM', plugin_args='--gres=gpu:1 --time=18:00:00 --qos=gablab --mem=40G -c 4')
name='cvtwarp_dti_to_mni') stout_tractography.connect(dti_datasource, 'nodif_brain_mask', cvtwarp_dti_to_mni, 'reference') stout_tractography.connect(dti_datasource, 'MNI_to_struct', cvtwarp_dti_to_mni, 'warp1') stout_tractography.connect(dti_datasource, 'struct_to_nodif_aff', cvtwarp_dti_to_mni, 'postmat') # $statement .= " --ref=$WORKINGDATAPATH/" . $subj[$i] . "/DTI/data/nodif_brain_mask.nii.gz "; # $statement .= " --warp1=$WORKINGDATAPATH/" . $subj[$i] . "/xfms/MNI_warp_struc_warpfield.nii.gz "; # $statement .= " --postmat=$WORKINGDATAPATH/" . $subj[$i] . "/xfms/struc_12dof_nodif.mat "; # $statement .= " --out=$WORKINGDATAPATH/" . $subj[$i] . "/xfms/MNI_warp_struc_12dof_nodif_warpfield.nii.gz "; #PERFORM PROBALISTIC TRACTOGRAPHY pbx2 = pe.Node(interface=fsl.ProbTrackX2(), name='pbx2') pbx2.inputs.c_thresh = 0.2 pbx2.inputs.n_steps = 2000 pbx2.inputs.step_length = 0.5 pbx2.inputs.n_samples = 5000 pbx2.inputs.opd = True pbx2.inputs.loop_check = True pbx2.inputs.omatrix2 = True ##This requires I include target2 pbx2.inputs.correct_path_distribution = True ##corresponds to the --pd flag pbx2.inputs.onewaycondition = True pbx2.inputs.target2 = os.path.join( RAWDATAPATH, "MNI152_T1_1mm_brain_mask_downsample_2.nii.gz") pbx2.inputs.rand_fib = 0 ### This will eventually become an iterable pbx2.inputs.seed = os.path.join(RAWDATAPATH, "ROIs/Human_IFG-vPrCG_L.nii.gz")
def run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path, NETWORK, coords_MNI, node_size, atlas_select, atlas_name, label_names, plot_switch): edge_threshold = 0.90 connectome_fdt_thresh = 1000 ####Auto-set INPUTS#### nodif_brain_mask_path = bedpostx_dir + '/nodif_brain_mask.nii.gz' merged_th_samples_path = bedpostx_dir + '/merged_th1samples.nii.gz' merged_f_samples_path = bedpostx_dir + '/merged_f1samples.nii.gz' merged_ph_samples_path = bedpostx_dir + '/merged_ph1samples.nii.gz' input_MNI = FSLDIR + '/data/standard/MNI152_T1_2mm_brain.nii.gz' probtrackx_output_dir_path = bedpostx_dir + '/probtrackx_' + NETWORK ####Auto-set INPUTS#### ##Delete any existing roi spheres del_files_spheres = glob.glob(bedpostx_dir + '/roi_sphere*diff.nii.gz') try: for i in del_files_spheres: os.remove(i) except: pass ##Create transform matrix between diff and MNI using FLIRT flirt = pe.Node(interface=fsl.FLIRT(cost_func='mutualinfo'), name='coregister') flirt.inputs.reference = merged_f_samples_path flirt.inputs.in_file = input_MNI flirt.inputs.out_matrix_file = bedpostx_dir + '/xfms/MNI2diff.mat' flirt.run() ##Apply transform between diff and MNI using FLIRT flirt = pe.Node(interface=fsl.FLIRT(cost_func='mutualinfo'), name='coregister') flirt.inputs.reference = merged_f_samples_path flirt.inputs.in_file = input_MNI flirt.inputs.apply_xfm = True flirt.inputs.in_matrix_file = bedpostx_dir + '/xfms/MNI2diff.mat' flirt.inputs.out_file = bedpostx_dir + '/xfms/MNI2diff_affine.nii.gz' flirt.run() x_vox = np.diagonal( masking._load_mask_img(nodif_brain_mask_path)[1][:3, 0:3])[0] y_vox = np.diagonal( masking._load_mask_img(nodif_brain_mask_path)[1][:3, 0:3])[1] z_vox = np.diagonal( masking._load_mask_img(nodif_brain_mask_path)[1][:3, 0:3])[2] def mmToVox(mmcoords): voxcoords = ['', '', ''] voxcoords[0] = int((round(int(mmcoords[0]) / x_vox)) + 45) voxcoords[1] = int((round(int(mmcoords[1]) / y_vox)) + 63) voxcoords[2] = int((round(int(mmcoords[2]) / z_vox)) + 36) return voxcoords ##Convert coords back to voxels coords_vox = [] for coord in coords_MNI: coords_vox.append(mmToVox(coord)) coords = list(tuple(x) for x in coords_vox) j = 0 for i in coords: ##Grow spheres at ROI X = coords[j][0] Y = coords[j][1] Z = coords[j][2] out_file1 = bedpostx_dir + '/roi_point_' + str(j) + '.nii.gz' args = '-mul 0 -add 1 -roi ' + str(X) + ' 1 ' + str(Y) + ' 1 ' + str( Z) + ' 1 0 1' maths = fsl.ImageMaths(in_file=input_MNI, op_string=args, out_file=out_file1) os.system(maths.cmdline + ' -odt float') out_file2 = bedpostx_dir + '/roi_sphere_' + str(j) + '.nii.gz' args = '-kernel sphere ' + str(node_size) + ' -fmean -bin' maths = fsl.ImageMaths(in_file=out_file1, op_string=args, out_file=out_file2) os.system(maths.cmdline + ' -odt float') ##Map ROIs from Standard Space to diffusion Space: ##Applying xfm and input matrix to transform ROI's between diff and MNI using FLIRT, flirt = pe.Node(interface=fsl.FLIRT(cost_func='mutualinfo'), name='coregister') flirt.inputs.reference = nodif_brain_mask_path flirt.inputs.in_file = out_file2 out_file_diff = out_file2.split('.nii')[0] + '_diff.nii.gz' flirt.inputs.out_file = out_file_diff flirt.inputs.apply_xfm = True flirt.inputs.in_matrix_file = bedpostx_dir + '/xfms/MNI2diff.mat' flirt.run() j = j + 1 if not os.path.exists(probtrackx_output_dir_path): os.makedirs(probtrackx_output_dir_path) seed_files = glob.glob(bedpostx_dir + '/*diff.nii.gz') seeds_text = probtrackx_output_dir_path + '/masks.txt' try: os.remove(seeds_text) except OSError: pass seeds_file_list = [] for seed_file in seed_files: seeds_file_list.append(seed_file) f = open(seeds_text, 'w') l1 = map(lambda x: x + '\n', seeds_file_list) f.writelines(l1) f.close() del_files_points = glob.glob(bedpostx_dir + '/roi_point*.nii.gz') for i in del_files_points: os.remove(i) del_files_spheres = glob.glob(bedpostx_dir + '/roi_sphere*[!diff].nii.gz') for i in del_files_spheres: os.remove(i) mx_path = dir_path + '/' + str(ID) + '_' + NETWORK + '_structural_mx.txt' probtrackx2 = pe.Node(interface=fsl.ProbTrackX2(), name='probtrackx2') probtrackx2.inputs.network = True probtrackx2.inputs.seed = seeds_text probtrackx2.inputs.onewaycondition = True probtrackx2.inputs.c_thresh = 0.2 probtrackx2.inputs.n_steps = 2000 probtrackx2.inputs.step_length = 0.5 probtrackx2.inputs.n_samples = 5000 probtrackx2.inputs.dist_thresh = 0.0 probtrackx2.inputs.opd = True probtrackx2.inputs.loop_check = True probtrackx2.inputs.omatrix1 = True probtrackx2.overwrite = True probtrackx2.inputs.verbose = True probtrackx2.inputs.mask = nodif_brain_mask_path probtrackx2.inputs.out_dir = probtrackx_output_dir_path probtrackx2.inputs.thsamples = merged_th_samples_path probtrackx2.inputs.fsamples = merged_f_samples_path probtrackx2.inputs.phsamples = merged_ph_samples_path probtrackx2.iterables = ("seed", seed_files) try: probtrackx2.inputs.avoid_mp = vetricular_CSF_mask_path except: pass probtrackx2.run() del (probtrackx2) if os.path.exists(probtrackx_output_dir_path + '/fdt_network_matrix'): mx = np.genfromtxt(probtrackx_output_dir_path + '/fdt_network_matrix') waytotal = np.genfromtxt(probtrackx_output_dir_path + '/waytotal') np.seterr(divide='ignore', invalid='ignore') conn_matrix = np.divide(mx, waytotal) conn_matrix[np.isnan(conn_matrix)] = 0 conn_matrix = np.nan_to_num(conn_matrix) conn_matrix = normalize(conn_matrix) ##Save matrix out_path_mx = dir_path + '/' + str( ID) + '_' + NETWORK + '_structural_mx.txt' np.savetxt(out_path_mx, conn_matrix, delimiter='\t') if plot_switch == True: rois_num = conn_matrix.shape[0] print("Creating plot of dimensions:\n" + str(rois_num) + ' x ' + str(rois_num)) plt.figure(figsize=(10, 10)) plt.imshow(conn_matrix, interpolation="nearest", vmax=1, vmin=-1, cmap=plt.cm.RdBu_r) ##And display the labels plt.colorbar() plt.title(atlas_select.upper() + ' ' + NETWORK + ' Structural Connectivity') out_path_fig = dir_path + '/' + str( ID) + '_' + NETWORK + '_structural_adj_mat.png' plt.savefig(out_path_fig) plt.close() conn_matrix_symm = np.maximum(conn_matrix, conn_matrix.transpose()) fdt_paths_loc = probtrackx_output_dir_path + '/fdt_paths.nii.gz' ##Plotting with glass brain ##Create transform matrix between diff and MNI using FLIRT flirt = pe.Node(interface=fsl.FLIRT(cost_func='mutualinfo'), name='coregister') flirt.inputs.reference = input_MNI flirt.inputs.in_file = nodif_brain_mask_path flirt.inputs.out_matrix_file = bedpostx_dir + '/xfms/diff2MNI.mat' flirt.run() ##Apply transform between diff and MNI using FLIRT flirt = pe.Node(interface=fsl.FLIRT(cost_func='mutualinfo'), name='coregister') flirt.inputs.reference = input_MNI flirt.inputs.in_file = nodif_brain_mask_path flirt.inputs.apply_xfm = True flirt.inputs.in_matrix_file = bedpostx_dir + '/xfms/diff2MNI.mat' flirt.inputs.out_file = bedpostx_dir + '/xfms/diff2MNI_affine.nii.gz' flirt.run() flirt = pe.Node(interface=fsl.FLIRT(cost_func='mutualinfo'), name='coregister') flirt.inputs.reference = input_MNI flirt.inputs.in_file = fdt_paths_loc out_file_MNI = fdt_paths_loc.split('.nii')[0] + '_MNI.nii.gz' flirt.inputs.out_file = out_file_MNI flirt.inputs.apply_xfm = True flirt.inputs.in_matrix_file = bedpostx_dir + '/xfms/diff2MNI.mat' flirt.run() fdt_paths_MNI_loc = probtrackx_output_dir_path + '/fdt_paths_MNI.nii.gz' if plot_switch == True: norm = colors.Normalize(vmin=-1, vmax=1) clust_pal = sns.color_palette("Blues_r", 4) clust_colors = colors.to_rgba_array(clust_pal) connectome = plotting.plot_connectome( conn_matrix_symm, coords_MNI, edge_threshold=edge_threshold, node_color=clust_colors, edge_cmap=plotting.cm.black_blue_r) connectome.add_overlay(img=fdt_paths_MNI_loc, threshold=connectome_fdt_thresh, cmap=plotting.cm.cyan_copper_r) out_file_path = dir_path + '/structural_connectome_fig_' + NETWORK + '_' + str( ID) + '.png' plt.savefig(out_file_path) plt.close() from pynets import plotting as pynplot NETWORK = NETWORK + '_structural' pynplot.plot_connectogram(conn_matrix, conn_model, atlas_name, dir_path, ID, NETWORK, label_names) if NETWORK != None: est_path = dir_path + '/' + ID + '_' + NETWORK + '_structural_est.txt' else: est_path = dir_path + '/' + ID + '_structural_est.txt' try: np.savetxt(est_path, conn_matrix_symm, delimiter='\t') except RuntimeError: print('Diffusion network connectome failed!') return (est_path)
thsamples='%s/T1w/Diffusion.bedpostX/merged_%s.nii*', fsamples='%s/T1w/Diffusion.bedpostX/merged_%s.nii*', phsamples='%s/T1w/Diffusion.bedpostX/merged_%s.nii*', nodif_brain_mask='%s/T1w/Diffusion.bedpostX/%s.nii*', mniROIs='addlInfoV2/subject/%s/DTI_ROIs/Human_*_trans.nii.gz' ) datasource.inputs.template_args = dict( thsamples = [['subject_id','th1samples']], phsamples = [['subject_id','ph1samples']], fsamples = [['subject_id','f1samples']], nodif_brain_mask = [['subject_id','nodif_brain_mask']], mniROIs=[['subject_id']] ) pbx2 = pe.MapNode(interface=fsl.ProbTrackX2(), name='pbx2', iterfield=['seed']) pbx2.inputs.c_thresh = 0.2 # -c 0.2 F cutoff pbx2.inputs.n_steps = 2000 # -S 2000 pbx2.inputs.step_length = 0.5 # --steplength=0.5 pbx2.inputs.n_samples = 25000 # -P 5000 pbx2.inputs.opd = True pbx2.inputs.loop_check = True pbx2.inputs.correct_path_distribution = True # -pd i.e. distance correction #runpbx.connect(subj_infosource,'subject_id',datasource,'subject_id') runpbx2 = pe.Workflow(name="runpbx2_gpu_dtispace_fixedwarps") runpbx2.base_dir = "/data/NipypeScratch/" samples_base_name_fxn = lambda x : x.replace('_th1samples.nii.gz','')
def pbX_wf(subject_id, sink_directory, name='hcp_pbX'): hcp_pbX_wf = pe.Workflow(name='hcp_pbX_wf') #making all the keys for the dictionary info = dict(merged_thsamples=[['subject_id', 'merged_th']], merged_phsamples=[['subject_id', 'merged_ph']], merged_fsamples=[['subject_id', 'merged_f']], dmri_brain=[['subject_id', 'T1w_acpc_dc_restore_1.25']], fs_brain=[['subject_id', 'T1w_acpc_dc']], aparcaseg=[['subject_id', 'aparc+aseg']], mask=[['subject_id', 'nodif_brain_mask']]) # Create a datasource node to get the dwi, bvecs, and bvals #This uses the dictionary created above and inputs the keys from the dictionary datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=list( info.keys())), name='datasource') datasource.inputs.template = '%s/%s' datasource.inputs.subject_id = subject_id datasource.inputs.base_directory = os.path.abspath('/home/data/hcp') datasource.inputs.field_template = dict( merged_thsamples= '/home/data/madlab/data/mri/hcp/bedpostX/%s/hcpbpX/thsamples/%s*.nii.gz', merged_phsamples= '/home/data/madlab/data/mri/hcp/bedpostX/%s/hcpbpX/phsamples/%s*.nii.gz', merged_fsamples= '/home/data/madlab/data/mri/hcp/bedpostX/%s/hcpbpX/fsamples/%s*.nii.gz', dmri_brain='/home/data/hcp/%s/T1w/%s.nii.gz', fs_brain='/home/data/hcp/%s/T1w/%s.nii.gz', aparcaseg='/home/data/hcp/%s/T1w/%s.nii.gz', mask='/home/data/hcp/%s/T1w/Diffusion/%s.nii.gz') datasource.inputs.template_args = info datasource.inputs.sort_filelist = True # Create a flirt node to calculate the dmri_brain to fs_brain xfm #Basically creating a conversion from DWI space to Freesurfer space dmri2fs_xfm = pe.Node(fsl.FLIRT(), name='dmri2fs_xfm') dmri2fs_xfm.inputs.out_matrix_file = 'dmri_2_fs_xfm.mat' hcp_pbX_wf.connect(datasource, 'dmri_brain', dmri2fs_xfm, 'in_file') hcp_pbX_wf.connect(datasource, 'fs_brain', dmri2fs_xfm, 'reference') # Create a convertxfm node to create inverse xfm of dmri2fs affine # Basicaaly creating a conversion from freesurfer space to DWI space invt_dmri2fs = pe.Node(fsl.ConvertXFM(), name='invt_dmri2fs') invt_dmri2fs.inputs.invert_xfm = True invt_dmri2fs.inputs.out_file = 'fs_2_dmri_xfm.mat' hcp_pbX_wf.connect(dmri2fs_xfm, 'out_matrix_file', invt_dmri2fs, 'in_file') # Extract thalamus seed masks from aparc+aseg.nii.gz file # Here 10 is the left thalamus, and 49 is the right thalamus thal_seed_mask = pe.MapNode(fs.Binarize(), iterfield=['match', 'binary_file'], name='thal_seed_mask') #thal_seed_mask.inputs.subject_dir = 'aparcaseg' thal_seed_mask.inputs.match = [[10], [49]] thal_seed_mask.inputs.binary_file = ['lft_thal.nii.gz', 'rt_thal.nii.gz'] hcp_pbX_wf.connect(datasource, 'aparcaseg', thal_seed_mask, 'in_file') #Next we need to avoid the ventricles by creating an -avoid_mask #There are no left and right 3rd and 4th ventricle, so we are making one mask avoid_mask = pe.Node( fs.Binarize(), #out_type='nii.gz', name='avoid_mask') #avoid_mask.inputs.subject_dir = 'aparcaseg' avoid_mask.inputs.match = [ 4, 14, 15, 43, 72 ] #lft_lat_ven, 3rd_ven, 4th_ven, rgt_lat_ven, 5th_ven avoid_mask.inputs.binary_file = 'ventricles.nii.gz' hcp_pbX_wf.connect(datasource, 'aparcaseg', avoid_mask, 'in_file') # Extract cortical target masks from aparc+aseg.nii.gz file # The ".match" is the freesurfer label and the binary_file is the label/name ctx_targ_mask = pe.MapNode(fs.Binarize(), iterfield=['match', 'binary_file'], name='ctx_targ_mask') #ctx_targ_mask.inputs.subject_dir = 'aparcaseg' ctx_targ_mask.inputs.match = [[1024], [1022], [1003, 1028, 1027, 1012, 1019, 1020, 1032], [1031, 1029, 1008], [1009, 1015, 1033, 1035, 1034, 1030], [1011], [1017], [1002], [1014], [1026], [1028], [1023, 1025, 1010], [1005, 1013, 1021], [1007], [1006], [1016], [17], [18], [26], [2024], [2022], [2003, 2028, 2027, 2012, 2019, 2020, 2032], [2031, 2029, 2008], [2009, 2015, 2033, 2035, 2034, 2030], [2011], [2017], [2002], [2014], [2026], [2028], [2023, 2025, 2010], [2005, 2013, 2021], [2007], [2006], [2016], [53], [54], [58]] ctx_targ_mask.inputs.binary_file = [ 'ctx_lh_precentral.nii.gz', 'ctx_lh_postcentral.nii.gz', 'ctx_lh_latfront.nii.gz', 'ctx_lh_parietal.nii.gz', 'ctx_lh_temporal.nii.gz', 'ctx_lh_occipital.nii.gz', 'ctx_lh_paracentral.nii.gz', 'ctx_lh_caudantcing.nii.gz', 'ctx_lh_medorbfront.nii.gz', 'ctx_lh_rostantcing.nii.gz', 'ctx_lh_superfront.nii.gz', 'ctx_lh_medpost.nii.gz', 'ctx_lh_medoccipital.nii.gz', 'ctx_lh_fusiform.nii.gz', 'ctx_lh_entorhinal.nii.gz', 'ctx_lh_parahippocampal.nii.gz', 'lh_hpc.nii.gz', 'lh_amy.nii.gz', 'lh_nacc.nii.gz', 'ctx_rh_precentral.nii.gz', 'ctx_rh_postcentral.nii.gz', 'ctx_rh_latfront.nii.gz', 'ctx_rh_parietal.nii.gz', 'ctx_rh_temporal.nii.gz', 'ctx_rh_occipital.nii.gz', 'ctx_rh_paracentral.nii.gz', 'ctx_rh_caudantcing.nii.gz', 'ctx_rh_medorbfront.nii.gz', 'ctx_rh_rostantcing.nii.gz', 'ctx_rh_superfront.nii.gz', 'ctx_rh_medpost.nii.gz', 'ctx_rh_medoccipital.nii.gz', 'ctx_rh_fusiform.nii.gz', 'ctx_rh_entorhinal.nii.gz', 'ctx_rh_parahippocampal.nii.gz', 'rh_hpc.nii.gz', 'rh_amy.nii.gz', 'rh_nacc.nii.gz' ] hcp_pbX_wf.connect(datasource, 'aparcaseg', ctx_targ_mask, 'in_file') # Create a flirt node to apply inverse transform to seeds # Basically you convert the masks (seeds) that were in freesurfer space to the DWI space seedxfm_fs2dmri = pe.MapNode(fsl.FLIRT(), iterfield=['in_file'], name='seedxfm_fs2dmri') seedxfm_fs2dmri.inputs.apply_xfm = True seedxfm_fs2dmri.inputs.interp = 'nearestneighbour' hcp_pbX_wf.connect(thal_seed_mask, 'binary_file', seedxfm_fs2dmri, 'in_file') hcp_pbX_wf.connect(datasource, 'dmri_brain', seedxfm_fs2dmri, 'reference') hcp_pbX_wf.connect(invt_dmri2fs, 'out_file', seedxfm_fs2dmri, 'in_matrix_file') # Create a flirt node to apply inverse transform to targets # You do the same as the previous node, but to the target masks targxfm_fs2dmri = pe.MapNode(fsl.FLIRT(), iterfield=['in_file'], name='targxfm_fs2dmri') targxfm_fs2dmri.inputs.apply_xfm = True targxfm_fs2dmri.inputs.interp = 'nearestneighbour' hcp_pbX_wf.connect(ctx_targ_mask, 'binary_file', targxfm_fs2dmri, 'in_file') hcp_pbX_wf.connect(datasource, 'dmri_brain', targxfm_fs2dmri, 'reference') hcp_pbX_wf.connect(invt_dmri2fs, 'out_file', targxfm_fs2dmri, 'in_matrix_file') #Apply the inverse transform for the avoid masks from freesurfer to DWI space avoidmaskxfm_fs2dmri = pe.Node(fsl.FLIRT(), name='avoidmaskxfm_fs2dmri') avoidmaskxfm_fs2dmri.inputs.apply_xfm = True avoidmaskxfm_fs2dmri.inputs.interp = 'nearestneighbour' hcp_pbX_wf.connect(avoid_mask, 'binary_file', avoidmaskxfm_fs2dmri, 'in_file') hcp_pbX_wf.connect(datasource, 'dmri_brain', avoidmaskxfm_fs2dmri, 'reference') hcp_pbX_wf.connect(invt_dmri2fs, 'out_file', avoidmaskxfm_fs2dmri, 'in_matrix_file') # Compute motion regressors (save file with 1st and 2nd derivatives) #make_targ_lists = pe.Node(util.Function(input_names=['in_files'], # output_names='out_list', # function=create_two_lists), # name='make_targ_lists') #hcp_pbX_wf.connect(targxfm_fs2dmri, 'out_file', make_targ_lists, 'in_files') #PROBTRACKX NODE pbx2 = pe.MapNode( fsl.ProbTrackX2(), iterfield=['seed', 'target_masks'], #Should I have included avoid_mp here? name='pbx2') pbx2.inputs.c_thresh = 0.2 pbx2.inputs.n_steps = 2000 pbx2.inputs.step_length = 0.5 pbx2.inputs.n_samples = 25000 pbx2.inputs.opd = True pbx2.inputs.os2t = True pbx2.inputs.loop_check = True #pbx2.plugin_args = {'bsub_args': '-q PQ_madlab'} #old way new way below pbx2.plugin_args = { 'sbatch_args': ('-p IB_40C_1.5T --qos pq_madlab --account iacc_madlab -N 1 -n 6') } hcp_pbX_wf.connect(datasource, 'merged_thsamples', pbx2, 'thsamples') hcp_pbX_wf.connect(datasource, 'merged_phsamples', pbx2, 'phsamples') hcp_pbX_wf.connect(datasource, 'merged_fsamples', pbx2, 'fsamples') hcp_pbX_wf.connect(seedxfm_fs2dmri, 'out_file', pbx2, 'seed') hcp_pbX_wf.connect(targxfm_fs2dmri, ('out_file', hemispherize), pbx2, 'target_masks') #hcp_pbX_wf.connect(make_targ_lists, 'out_list', pbx2, 'target_masks') hcp_pbX_wf.connect(avoidmaskxfm_fs2dmri, 'out_file', pbx2, 'avoid_mp') hcp_pbX_wf.connect(datasource, 'mask', pbx2, 'mask') # Create a findthebiggest node to do hard segmentation between # seeds and targets #basically this segments the seed region on the basis of outputs of probtrackX when classification targets are being used. findthebiggest = pe.MapNode(fsl.FindTheBiggest(), iterfield=['in_files'], name='findthebiggest') hcp_pbX_wf.connect(pbx2, 'targets', findthebiggest, 'in_files') # Create a datasink node to save outputs. datasink = pe.Node(interface=nio.DataSink(), name='datasink') datasink.inputs.base_directory = os.path.abspath(sink_directory) datasink.inputs.container = subject_id + '/' + 'thal_seed' hcp_pbX_wf.connect(pbx2, 'log', datasink, 'hcpprobX.log') hcp_pbX_wf.connect(pbx2, 'fdt_paths', datasink, 'hcpprobX.fdt') hcp_pbX_wf.connect(pbx2, 'way_total', datasink, 'hcpprobX.waytotal') hcp_pbX_wf.connect(pbx2, 'targets', datasink, 'hcpprobX.targets') hcp_pbX_wf.connect(findthebiggest, 'out_file', datasink, 'hcpprobX.fbiggest.@biggestsegmentation') #hcp_pbX_wf.connect(thal_seed_mask, 'binary_file', datasink, 'hcpprobX.thal_mask') hcp_pbX_wf.connect(seedxfm_fs2dmri, 'out_file', datasink, 'hcpprobX.seed_masks') #from seed_xsfm(out_file) to datasink "seed_files" #do we need this - > emu_pbX_wf.connect(datasource, 'ref_b0', datasink, 'emuprobX.b0') #do we need this - > emu_pbX_wf.connect(thal_seed_mask, 'binary_file', datasink, 'emuprobX.thal_mask') return hcp_pbX_wf