def _get_series_len(bold_fname): from niworkflows.interfaces.registration import _get_vols_to_discard img = nb.load(bold_fname) if len(img.shape) < 4: return 1 skip_vols = _get_vols_to_discard(img) return img.shape[3] - skip_vols
def _get_series_len(bold_fname): from niworkflows.interfaces.registration import _get_vols_to_discard img = nb.load(bold_fname) if len(img.shape) < 4: return 1 skip_vols = _get_vols_to_discard(img) return img.shape[3] - skip_vols
def _get_series_len(bold_fname): from niworkflows.interfaces.registration import _get_vols_to_discard if isinstance(bold_fname, list): # Multi-echo data bold_fname = bold_fname[0] img = nb.load(bold_fname) if len(img.shape) < 4: return 1 skip_vols = _get_vols_to_discard(img) return img.shape[3] - skip_vols
def main(): parser = argparse.ArgumentParser( description='Extract confounds from WM and CSF') parser.add_argument('t1', type=str, help='T1 image') parser.add_argument('t1_mask', type=str, help='Binary brain mask') parser.add_argument('wm_tpm', type=str, help='White matter tissue probability map') parser.add_argument('csf_tpm', type=str, help='CSF tissue probability map') parser.add_argument('bold', type=str, help='BOLD image') parser.add_argument('bold_mask', type=str, help='BOLD mask') parser.add_argument('bold_json', type=str, help='BOLD JSON metadat file') parser.add_argument('out_basename', type=str, help='Output file path with basename') parser.add_argument('--workdir', type=str, help='Path to use for workdir' ' this path must already exist') args = parser.parse_args() t1 = args.t1 t1_mask = args.t1_mask wm_tpm = args.wm_tpm csf_tpm = args.csf_tpm bold = args.bold bold_mask = args.bold_mask bold_json = args.bold_json outbase = args.out_basename workdir = args.workdir # Get TR with open(bold_json, 'r') as j: metadata = json.load(j) # Extract the number of volumes to discard ref_im = nib.load(bold) skipvol = _get_vols_to_discard(ref_im) # Set up confound workflow confound_dir = os.path.join(workdir, 'confound_wf') try: os.makedirs(confound_dir) except OSError: pass confound_wf = init_confound_wf(t1, t1_mask, wm_tpm, csf_tpm, bold, bold_mask, metadata['RepetitionTime'], skipvol) confound_wf.base_dir = confound_dir # Node to export file to destination directory ef_confounds = pe.Node(nio.ExportFile(clobber=True), name='export_confounds') ef_confounds.inputs.out_file = f'{outbase}_confounds.tsv' ef_wm = pe.Node(nio.ExportFile(clobber=True), name='export_wm') ef_wm.inputs.out_file = f'{outbase}_wm_roi.nii.gz' ef_csf = pe.Node(nio.ExportFile(clobber=True), name='export_csf') ef_csf.inputs.out_file = f'{outbase}_csf_roi.nii.gz' ef_acc = pe.Node(nio.ExportFile(clobber=True), name='export_acc') ef_acc.inputs.out_file = f'{outbase}_acc_roi.nii.gz' ef_acc_met = pe.Node(nio.ExportFile(clobber=True), name='export_acc_meta') ef_acc_met.inputs.out_file = f'{outbase}_confounds.json' # Set up wrapper workflow for export main_dir = os.path.join(workdir, 'main_wf') try: os.makedirs(main_dir) except OSError: pass wf = pe.Workflow(name='main_wf') wf.base_dir = main_dir wf.connect([(confound_wf, ef_confounds, [('outputnode.confounds_file', 'in_file')]), (confound_wf, ef_wm, [('outputnode.wm_roi', 'in_file')]), (confound_wf, ef_csf, [('outputnode.csf_roi', 'in_file')]), (confound_wf, ef_acc, [('outputnode.acc_roi', 'in_file')]), (confound_wf, ef_acc_met, [('outputnode.confounds_metadata', 'in_file')])]) wf.run()