def _list_outputs(self): outputs = self.output_spec().get() if not isdefined(self.inputs.out_file): prefix = self._gen_fname(self.inputs.in_file, suffix="_pval") outputtype = self.inputs.outputtype if outputtype == "AFNI": ext = ".HEAD" suffix = "+tlrc" else: ext = Info.output_type_to_ext(outputtype) suffix = "" else: prefix = self.inputs.out_file ext_ind = max([ prefix.lower().rfind(".nii.gz"), prefix.lower().rfind(".nii") ]) if ext_ind == -1: ext = ".HEAD" suffix = "+tlrc" else: ext = prefix[ext_ind:] suffix = "" # All outputs should be in the same directory as the prefix out_dir = os.path.dirname(os.path.abspath(prefix)) outputs["out_file"] = (fname_presuffix( prefix, suffix=suffix, use_ext=False, newpath=out_dir) + ext) return outputs
def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename<suffix><ext>. If change_ext is True, it will use the extensions specified in <instance>inputs.outputtype. Parameters ---------- basename : str Filename to base the new filename on. cwd : str Path to prefix to the new filename. (default is os.getcwd()) suffix : str Suffix to add to the `basename`. (default is '') change_ext : bool Flag to change the filename extension to the AFNI output type. (default True) Returns ------- fname : str New filename based on given parameters. """ if basename == '': msg = 'Unable to generate filename for command %s. ' % self.cmd msg += 'basename is not set!' raise ValueError(msg) if cwd is None: cwd = os.getcwd() if ext is None: ext = Info.outputtype_to_ext(self.inputs.outputtype) if change_ext: if suffix: suffix = ''.join((suffix, ext)) else: suffix = ext if suffix is None: suffix = '' fname = fname_presuffix(basename, suffix=suffix, use_ext=False, newpath=cwd) return fname
def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename<suffix><ext>. If change_ext is True, it will use the extensions specified in <instance>inputs.outputtype. Parameters ---------- basename : str Filename to base the new filename on. cwd : str Path to prefix to the new filename. (default is os.getcwd()) suffix : str Suffix to add to the `basename`. (default is '') change_ext : bool Flag to change the filename extension to the AFNI output type. (default True) Returns ------- fname : str New filename based on given parameters. """ if basename == '': msg = 'Unable to generate filename for command %s. ' % self.cmd msg += 'basename is not set!' raise ValueError(msg) if cwd is None: cwd = os.getcwd() if ext is None: ext = Info.outputtype_to_ext(self.inputs.outputtype) if change_ext: if suffix: suffix = ''.join((suffix, ext)) else: suffix = ext if suffix is None: suffix = '' fname = fname_presuffix(basename, suffix = suffix, use_ext = False, newpath = cwd) return fname
def functional_preprocessing( subject_list, inputs, outputs, workingdir, output_type, fwhm, hpfilter, lpfilter, tr, label, outlabel, name="%s_preprocessing_p2"): """todo""" ##### # Setup pipeline ##### if name.find("%s") != -1: name = name % label else: print "ERROR: You must have a '%s' in the name for the label" raise SystemExit(2) func_preproc_name = "func_preproc_%s_%ism_%ihp_%ilp" % (outlabel, int(fwhm*10), hpfilter*(hpfilter>0), lpfilter*(lpfilter>0)) preproc = create_func_preproc_workflow(name, func_preproc_name) preproc.base_dir = workingdir preproc.inputs.inputspec.fwhm = fwhm preproc.inputs.inputspec.highpass = float(hpfilter)/(2*tr) preproc.inputs.inputspec.lowpass = float(lpfilter)/(2*tr) ## save for later inputnode = preproc.get_node("inputspec") outputnode = preproc.get_node("outputspec") ###### # Setup data source ###### # File extension ext = Info.outputtype_to_ext(output_type) afni.AFNICommand.set_default_outputtype(output_type) # Subject ID Node subinfo = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name='subinfo', iterables=('subject_id', subject_list)) # Location of input data datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func', 'func_mask'], sort_filelist=True), name='datasource') datasource.inputs.base_directory=os.path.abspath(inputs.basedir) datasource.inputs.template = "*" datasource.inputs.field_template = dict( func = os.path.join("%s", inputs.funcdir, 'run_[0-9]*', inputs.infunc), func_mask = os.path.join("%s", inputs.funcdir, 'run_[0-9]*', inputs.inmask) ) datasource.inputs.template_args = dict( func = [['subject_id']], func_mask = [['subject_id']] ) # Get the number of runs for each participant (for renaming purposes) datasource.inputs.subject_id = subject_list ds = datasource.run() runs1 = [ len(x) for x in tolist(ds.outputs.func) ] runs2 = [ len(x) for x in tolist(ds.outputs.func_mask) ] for i in xrange(len(runs1)): if runs1[i] != runs2[i]: print "ERROR: mismatch in number of functionals and masks" raise SystemExit(2) max_runs = max(runs1) # Link inputs preproc.connect(subinfo, 'subject_id', datasource, 'subject_id') preproc.connect(datasource, 'func', inputnode, 'func') preproc.connect(datasource, 'func_mask', inputnode, 'func_mask') ###### # Setup data sink ###### # Datasink ## will get: "base_directory/subject_id/output_anatdir" datasink = pe.Node(interface=nio.DataSink(), name='datasink') datasink.inputs.base_directory = outputs.basedir ## substitute map stuff preprocsinksubs = get_mapnode_substitutions(preproc, outputnode, max_runs) datasink.inputs.substitutions = preprocsinksubs # replace subject_id stuff with functional scan datasink.inputs.regexp_substitutions = (r"_subject_id_(\w|\d)+", outputs.funcdir) ## connect preproc.connect(subinfo, 'subject_id', datasink, 'container') output_fields = outputnode.outputs.get() for field in output_fields: preproc.connect(outputnode, field, datasink, "@%s" % field) return preproc
def register( subject_list, inputs, outputs, workingdir, output_type, standard, fnirt, interp, search, func_label, name="registration"): ##### # Setup workflow ##### if hasattr(inputs, "coplanar"): have_coplanar = True else: have_coplanar = False # highres2standard h2s = create_highres2standard_workflow("%s_highres2standard" % name, search, fnirt) h2s.base_dir = workingdir h2s_inputnode = h2s.get_node("inputspec") h2s_outputnode = h2s.get_node("outputspec") # func2standard f2s = create_func2standard_workflow("%s_%s2standard" % (name, func_label), have_coplanar, search, fnirt) f2s.base_dir = workingdir f2s_inputnode = f2s.get_node("inputspec") f2s_outputnode = f2s.get_node("outputspec") ###### # Setup data source ###### # File extension ext = Info.outputtype_to_ext(output_type) afni.AFNICommand.set_default_outputtype(output_type) # Subject ID Node subinfo = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name='subinfo', iterables=('subject_id', subject_list)) # Location of input data outfields = ['func', 'highres'] if have_coplanar: outfields.append('coplanar') if fnirt: outfields.append('highres_head') datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=outfields), name='datasource') datasource.inputs.base_directory = op.abspath(op.expanduser(inputs.basedir)) datasource.inputs.template = "*" datasource.inputs.field_template = dict( func = os.path.join("%s", inputs.func), highres = os.path.join("%s", inputs.highres) ) datasource.inputs.template_args = dict( func = [['subject_id']], highres = [['subject_id']] ) if have_coplanar: datasource.inputs.field_template['coplanar'] = os.path.join("%s", inputs.coplanar) datasource.inputs.template_args['coplanar'] = [['subject_id']] if fnirt: datasource.inputs.field_template['highres_head'] = os.path.join("%s", inputs.highres_head) datasource.inputs.template_args['highres_head'] = [['subject_id']] ###### # Link Inputs ###### # highres2standard h2s_inputnode.inputs.interp = interp h2s_inputnode.inputs.standard = standard h2s.connect([ (subinfo, datasource, [('subject_id', 'subject_id')]), (datasource, h2s_inputnode, [('highres', 'highres')]) ]) if fnirt: h2s.connect(datasource, 'highres_head', h2s_inputnode, 'highres_head') h2s_inputnode.inputs.standard_head = inputs.standard_head h2s_inputnode.inputs.standard_mask = inputs.standard_mask h2s_inputnode.inputs.fnirt_config = inputs.fnirt_config # func2standard f2s_inputnode.inputs.interp = interp f2s_inputnode.inputs.standard = standard path = op.join(outputs.basedir, "%s", outputs.highres, "highres2standard.mat") f2s.connect([ (subinfo, datasource, [('subject_id', 'subject_id')]), (datasource, f2s_inputnode, [('func', 'func'), ('highres', 'highres')]), (subinfo, f2s_inputnode, [(('subject_id', regpath, path), 'highres2standard_mat')]) ]) if have_coplanar: f2s.connect(datasource, 'coplanar', f2s_inputnode, 'coplanar') if fnirt: path = op.join(outputs.basedir, "%s", outputs.highres, "highres2standard_warp.*") f2s.connect(subinfo, ('subject_id', regpath, path), f2s_inputnode, 'highres2standard_warp') ###### # Setup data sink ###### # highres2standard ## setup h2s_datasink = pe.Node(interface=nio.DataSink(), name="datasink") h2s_datasink.inputs.base_directory = op.abspath(op.expanduser(outputs.basedir)) h2s.connect(subinfo, 'subject_id', h2s_datasink, 'container') h2s_datasink.inputs.regexp_substitutions = (r"_subject_id_(\w|\d)+", outputs.highres) ## link outfields = h2s_outputnode.outputs.get() for outfield in outfields: h2s.connect(h2s_outputnode, outfield, h2s_datasink, "@%s" % outfield) # func2standard ## setup f2s_datasink = pe.Node(interface=nio.DataSink(), name="datasink") f2s_datasink.inputs.base_directory = op.abspath(op.expanduser(outputs.basedir)) f2s.connect(subinfo, 'subject_id', f2s_datasink, 'container') f2s_datasink.inputs.regexp_substitutions = (r"_subject_id_(\w|\d)+", outputs.func) ## link outfields = f2s_outputnode.outputs.get() for outfield in outfields: f2s.connect(f2s_outputnode, outfield, f2s_datasink, "@%s" % outfield) return [h2s, f2s]
def functional_preprocessing( subject_list, inputs, outputs, workingdir, output_type, fwhm, hpfilter, lpfilter, tr, motion_nstages, label, whichvol="middle", timeshift=False, tpattern=None, delete_vols=0, name="%s_preprocessing"): """todo""" ##### # Setup pipeline ##### if name.find("%s") != -1: name = name % label else: print "ERROR: You must have a '%s' in the name for the label" raise SystemExit(2) func_preproc_name = "func_preproc_%ism_%ihp_%ilp" % (int(fwhm*10), hpfilter*(hpfilter>0), lpfilter*(lpfilter>0)) preproc = create_func_preproc_workflow(name, whichvol, timeshift, tpattern, delete_vols, func_preproc_name) preproc.base_dir = workingdir preproc.inputs.inputspec.fwhm = fwhm preproc.inputs.inputspec.highpass = float(hpfilter)/(2*tr) preproc.inputs.inputspec.lowpass = float(lpfilter)/(2*tr) preproc.inputs.inputspec.motion_nstages = motion_nstages ## save for later inputnode = preproc.get_node("inputspec") outputnode = preproc.get_node("outputspec") ###### # Setup data source ###### # File extension ext = Info.outputtype_to_ext(output_type) afni.AFNICommand.set_default_outputtype(output_type) # Subject ID Node subinfo = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name='subinfo', iterables=('subject_id', subject_list)) # Location of input data datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['func'], sort_filelist=True), name='datasource') datasource.inputs.base_directory=os.path.abspath(inputs.basedir) datasource.inputs.template = os.path.join("%s", inputs.func) datasource.inputs.template_args = dict(func=[['subject_id']]) # Get the number of runs for each participant (for renaming purposes) datasource.inputs.subject_id = subject_list ds = datasource.run() runs = [ len(x) for x in tolist(ds.outputs.func) ] # list with number of runs per subject max_runs = max(runs) # Link inputs preproc.connect(subinfo, 'subject_id', datasource, 'subject_id') preproc.connect(datasource, 'func', inputnode, 'func') ###### # Setup data sink ###### # Datasink ## will get: "base_directory/subject_id/output_anatdir" datasink = pe.Node(interface=nio.DataSink(), name='datasink') datasink.inputs.base_directory = outputs.basedir ## substitute map stuff preprocsinksubs = get_mapnode_substitutions(preproc, outputnode, max_runs, unmap=["func_mc_ref", "example_func_all", "func_mask_all", "motion_all", "motion_01", "motion_02", "motion_03", "motion_04", "motion_05", "motion_06"]) datasink.inputs.substitutions = preprocsinksubs # replace subject_id stuff with functional scan datasink.inputs.regexp_substitutions = (r"_subject_id_(\w|\d)+", outputs.func) ## connect preproc.connect(subinfo, 'subject_id', datasink, 'container') output_fields = outputnode.outputs.get() for field in output_fields: preproc.connect(outputnode, field, datasink, "@%s" % field) return preproc