/
Cluster_DTI_TBSS_Wax.py
193 lines (126 loc) · 6.68 KB
/
Cluster_DTI_TBSS_Wax.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
#-----------------------------------------------------------------------------------------------------
# In[2]:
#This is an alternative script to process diffusion data
#The key concept here from Diffusion_20_Octuber_Nipype is to register B0 to anatomical image
#Then use the transformation from resting state preprocessing to transfer the images to anatomical images
#The registration with FA_template was horrible
#This script was done after restripping the skull to remove extra parts of the skull
#The overlap with the VBM template is quite good, yet not so satifactory
#So, here I am trying with the WAX FA template downsized to 2mm
from nipype import config
cfg = dict(execution={'remove_unnecessary_outputs': False})
config.update_config(cfg)
import nipype.interfaces.fsl as fsl
import nipype.interfaces.afni as afni
import nipype.interfaces.ants as ants
# import nipype.interfaces.spm as spm
import nipype.interfaces.utility as utility
from nipype.interfaces.utility import IdentityInterface, Function
from os.path import join as opj
from nipype.interfaces.io import SelectFiles, DataSink
from nipype.pipeline.engine import Workflow, Node, MapNode
import numpy as np
import matplotlib.pyplot as plt
#-----------------------------------------------------------------------------------------------------
# In[2]:
experiment_dir = '/home/in/aeed/TBSS'
map_list= [ 'CHARMED_AD' ,'CHARMED_FA' ,'CHARMED_FR' , 'CHARMED_IAD', 'CHARMED_MD', 'CHARMED_RD',
'Diffusion_20_AD' , 'Diffusion_20_FA', 'Diffusion_20_MD' , 'Diffusion_20_RD',
'Kurtosis_AD' , 'Kurtosis_AWF' , 'Kurtosis_MD' , 'Kurtosis_RD' , 'Kurtosis_TORT',
'Kurtosis_AK' , 'Kurtosis_FA' , 'Kurtosis_MK' , 'Kurtosis_RK',
'NODDI_FICVF' , 'NODDI_ODI'
]
# map_list = ['229', '230', '365', '274']
output_dir = 'DTI_TBSS_Wax'
working_dir = 'DTI_TBSS_workingdir_Wax_Template'
DTI_TBSS_Wax = Workflow (name = 'DTI_TBSS_Wax')
DTI_TBSS_Wax.base_dir = opj(experiment_dir, working_dir)
#-----------------------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------------------
# In[3]:
# Infosource - a function free node to iterate over the list of subject names
infosource = Node(IdentityInterface(fields=['map_id']),
name="infosource")
infosource.iterables = [('map_id', map_list)]
#-----------------------------------------------------------------------------------------------------
# In[4]:
templates = {
'all_skeleton' : 'Waxholm_Template/*/{map_id}/All_*_skeletonised.nii.gz',
'skeleton_mask' : 'Waxholm_Template/*/{map_id}/mean_FA_skeleton_mask.nii.gz',
'all_image' : 'Waxholm_Template/*/{map_id}/All_{map_id}_WAX.nii.gz',
'mean_FA' : 'Waxholm_Template/*/{map_id}/mean_FA.nii.gz',
}
selectfiles = Node(SelectFiles(templates,
base_directory=experiment_dir),
name="selectfiles")
#-----------------------------------------------------------------------------------------------------
# In[5]:
datasink = Node(DataSink(), name = 'datasink')
datasink.inputs.container = output_dir
datasink.inputs.base_directory = experiment_dir
substitutions = [('_map_id_', ' ')]
datasink.inputs.substitutions = substitutions
#-----------------------------------------------------------------------------------------------------
#Design with two contrasts only
design = '/home/in/aeed/TBSS/Design_TBSS.mat'
contrast = '/home/in/aeed/TBSS/Design_TBSS.con'
#-----------------------------------------------------------------------------------------------------
#randomise on the skeletonised data
randomise_tbss = Node(fsl.Randomise(), name = 'randomise_tbss')
randomise_tbss.inputs.design_mat = design
randomise_tbss.inputs.tcon = contrast
randomise_tbss.inputs.num_perm = 10000
randomise_tbss.inputs.tfce2D = True
randomise_tbss.inputs.vox_p_values = True
randomise_tbss.inputs.base_name = 'TBSS_'
#-----------------------------------------------------------------------------------------------------
#smoothing the images
def nilearn_smoothing(image):
import nilearn
from nilearn.image import smooth_img
import numpy as np
import os
kernel = [4.3,4.3,16]
smoothed_img = smooth_img(image, kernel)
smoothed_img.to_filename('smoothed_all.nii.gz')
smoothed_output = os.path.abspath('smoothed_all.nii.gz')
return smoothed_output
nilearn_smoothing = Node(name = 'nilearn_smoothing',
interface = Function(input_names = ['image'],
output_names = ['smoothed_output'],
function = nilearn_smoothing))
#-----------------------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------------------
#mask only FA values > 0.2 to gurantee it is WM
thresh_FA = Node(fsl.Threshold(), name = 'thresh_FA')
thresh_FA.inputs.thresh = 0.2
#-----------------------------------------------------------------------------------------------------
#binarize this mask
binarize_FA = Node(fsl.UnaryMaths(), name = 'binarize_FA')
binarize_FA.inputs.operation = 'bin'
binarize_FA.inputs.output_datatype = 'char'
#-----------------------------------------------------------------------------------------------------
#randomise on the smoothed all images
randomise_VBA = Node(fsl.Randomise(), name = 'randomise_vba')
randomise_VBA.inputs.design_mat = design
randomise_VBA.inputs.tcon = contrast
randomise_VBA.inputs.num_perm = 10000
randomise_VBA.inputs.tfce = True
randomise_VBA.inputs.vox_p_values = True
randomise_VBA.inputs.base_name = 'VBA_'
#-----------------------------------------------------------------------------------------------------
DTI_TBSS_Wax.connect ([
(infosource, selectfiles,[('map_id','map_id')]),
(selectfiles, randomise_tbss, [('all_skeleton','in_file')]),
(selectfiles, randomise_tbss, [('skeleton_mask','mask')]),
(selectfiles, nilearn_smoothing, [('all_image','image')]),
(nilearn_smoothing, randomise_VBA, [('smoothed_output','in_file')]),
(selectfiles, thresh_FA, [('mean_FA','in_file')]),
(thresh_FA, binarize_FA, [('out_file','in_file')]),
(binarize_FA, randomise_VBA, [('out_file','mask')])
])
DTI_TBSS_Wax.write_graph(graph2use='flat')
DTI_TBSS_Wax.run(plugin='SLURM')
# DTI_workflow.run(plugin='SLURM')