def _run_interface(self, runtime): d = dict(worldmat=self.inputs.worldmat, src=self.inputs.src, trg=self.inputs.trg, output_file=self.inputs.output_file) #this is your MATLAB code template script = Template("""worldmat = '$worldmat'; src = '$src'; trg = '$trg'; output_file = '$output_file' worldmat2flirtmap(worldmat, src, trg, output_file); exit; """).substitute(d) # mfile = True will create an .m file with your script and executed. # Alternatively # mfile can be set to False which will cause the matlab code to be # passed # as a commandline argument to the matlab executable # (without creating any files). # This, however, is less reliable and harder to debug # (code will be reduced to # a single line and stripped of any comments). mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): from nipype.interfaces.matlab import MatlabCommand def islist(i): if not isinstance(i,list): i = [str(i)] return i else: I = [] for l in i: if not l.endswith('.par'): I.append(str(l)) else: shutil.copy2(l,l+'.txt') I.append(l+'.txt') return I info = {} info["functional_files"] = islist(self.inputs.functional_files) info["structural_files"] = islist(self.inputs.structural_files) info["csf_mask"] = islist(self.inputs.csf_mask) info["white_mask"] = islist(self.inputs.white_mask) info["grey_mask"] = islist(self.inputs.grey_mask) info["TR"] = float(self.inputs.tr) info["realignment_parameters"] = islist(self.inputs.realignment_parameters) info["outliers"] = islist(self.inputs.outliers) info["norm_components"] = islist(self.inputs.norm_components) info["filename"] = '%s/conn_%s.mat'%(os.getcwd(),self.inputs.project_name) info["n_subjects"] = int(self.inputs.n_subjects) conn_inputs = os.path.abspath('inputs_to_conn.mat') sio.savemat(conn_inputs, {"in":info}) print "saved conn_inputs.mat file" script="""load %s; batch=bips_load_conn(in); conn_batch(batch)"""%conn_inputs mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): """This is where you implement your script""" d = dict(in_dwis=self.inputs.in_dwis, in_mask=self.inputs.in_mask, in_bvals=self.inputs.in_bvals, in_bvecs=self.inputs.in_bvecs, in_b0threshold=self.inputs.in_b0threshold, in_fname=self.inputs.in_fname, in_noddi_toolbox=getNoddiToolBoxPath(), in_noddi_path=getNoddiPath(), extra_noddi_args=getExtraArgs( self.inputs.noise_scaling_factor, self.inputs.tissue_type, self.inputs.matlabpoolsize)) # this is your MATLAB code template script = Template(""" addpath(genpath('$in_noddi_path')); addpath(genpath('$in_noddi_toolbox')); in_dwis = '$in_dwis'; in_mask = '$in_mask'; in_bvals = '$in_bvals'; in_bvecs = '$in_bvecs'; in_b0threshold = $in_b0threshold; in_fname = '$in_fname'; [~,~,~,~,~,~,~] = noddi_fitting(in_dwis, in_mask, in_bvals, in_bvecs, in_b0threshold, in_fname $extra_noddi_args); exit; """).substitute(d) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): from nipype.interfaces.spm.base import scans_for_fname,scans_for_fnames from nipype.utils.filemanip import filename_to_list,list_to_filename # setup parameters input_dir = "." in_files = "{" asl_first = str(self.inputs.first_image_type) TR = str(self.inputs.TR) # convert images to cell array string in matlab for f in sorted(scans_for_fnames(filename_to_list(self.inputs.in_files))): in_files += "'"+f+"',\n" input_dir = os.path.dirname(f) in_files = in_files[:-2]+"}" self.input_dir = input_dir d = dict(in_files=in_files,in_dir=input_dir,first_image_type=asl_first,TR =TR) myscript = Template(""" warning('off','all'); cd('$in_dir'); input = char($in_files); asl_script(input,$first_image_type,0,$TR); exit; """).substitute(d) mlab = MatlabCommand(script=myscript,matlab_cmd="matlab -nodesktop -nosplash",mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): # @UnusedVariable script = "CreateROI('{}', '{}', '{}');".format( self.inputs.in_file, self.inputs.brain_mask, self._gen_outfilename()) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): from nipype.interfaces.spm.base import scans_for_fname,scans_for_fnames from nipype.utils.filemanip import filename_to_list,list_to_filename # setup parameters d = dict() d['in_file'] = str(self.inputs.in_file).replace("[", "{").replace("]","}") d['out_file'] = str(self.inputs.out_file) myscript = Template(""" warning('off','all'); input = $in_file; output = '$out_file'; if isstr(input) input = {input}; end for in = input in = in{1}; [pathstr,name,ext] = fileparts(in); if strcmp(ext,'.txt') plot_realignment_parameters(in,output); elseif strcmp(ext,'.mat') plot_design_matrix(in,output); elseif strcmp(ext,'.nii') || strcmp(ext,'.img') nifti2jpeg(in,'-axial -histogram'); system(['jpeg2ps ' output ' ' pathstr '/*.jpg' ]); end end exit; """).substitute(d) mlab = MatlabCommand(script=myscript, matlab_cmd="matlab -nodesktop -nosplash",mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): d = dict(in_file=self.inputs.in_file,mask_file=self.inputs.mask_file,out_file=self.inputs.out_file) script = Template("""addpath('/home/sharad/fcon1000/lib/');in_file = '$in_file';mask_file = '$mask_file';out_file = '$out_file';reho(in_file,mask_file,out_file);exit;""").substitute(d) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _matlab_cmd_update(self): # MatlabCommand has to be created here, # because matlab_cmb is not a proper input # and can be set only during init self.mlab = MatlabCommand(matlab_cmd=self.inputs.matlab_cmd, mfile=self.inputs.mfile, paths=self.inputs.paths, uses_mcr=self.inputs.use_mcr) self.mlab.inputs.script_file = 'pyscript_%s.m' % \ self.__class__.__name__.split('.')[-1].lower()
def _run_interface(self, runtime): # @UnusedVariable script = """ SaveParamsAsNIfTI('{params}', '{roi}', '{brain_mask}', '{prefix}'); """.format( params=self.inputs.params_file, roi=self.inputs.roi_file, brain_mask=self.inputs.brain_mask_file, prefix=self.inputs.output_prefix) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def run_matlab_cmd(cmd): delim = '????????' # A string that won't occur in the Matlab splash matlab_cmd = MatlabCommand( script=("fprintf('{}'); fprintf({}); exit;".format(delim, cmd))) tmp_dir = tempfile.mkdtemp() try: result = matlab_cmd.run(cwd=tmp_dir) return result.runtime.stdout.split(delim)[1] finally: shutil.rmtree(tmp_dir)
def _run_interface(self, runtime): d = dict(in_file=self.inputs.in_file, out_folder=self.inputs.out_folder, subject_id=self.inputs.subject_id) #this is your MATLAB code template script = Template(""" cd '$out_folder' WaveletDespike('$in_file','$subject_id')""").substitute(d) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): # @UnusedVariable script = """ protocol = FSL2Protocol('{bvals}', '{bvecs}'); noddi = MakeModel('{model}'); batch_fitting('{roi}', protocol, noddi, '{out_file}', {nthreads}); """.format( bvecs=self.inputs.bvecs_file, bvals=self.inputs.bvals_file, model=self.inputs.model, roi=self.inputs.roi_file, out_file=self._gen_outfilename(), nthreads=self.inputs.nthreads) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): from nipype.interfaces.spm.base import scans_for_fname,scans_for_fnames from nipype.utils.filemanip import filename_to_list,list_to_filename # setup parameters d = dict() d['in_files'] = str(self.inputs.in_files).replace("[", "{").replace("]","}") d['roi_names'] = str(self.inputs.roi_names).replace("[", "{").replace("]","}") d['task_name'] = str(self.inputs.task_name) d['out_file'] = str(self.inputs.out_file) myscript = Template(""" warning('off','all'); in_files = $in_files; roi_names = $roi_names; task_name = '$task_name'; out_file = '$out_file'; % sanity check if length(roi_names) ~= length(in_files) fprint('Error: number of 1D average files not the same as their names'); exit; end % import 1D ROI average files into N vectors roi_img = cell(1,length(in_files)); for i=1:length(in_files) roi_img{i} = importdata(in_files{i}); end % calculate Z-scores fid = fopen(out_file, 'wt'); for i=1:length(roi_img) for j=1:length(roi_img) if i ~= j z=atanh(corr(roi_img{i},roi_img{j})); fprintf(fid,'%s,%s_%s,%d,N/A,Z_Score\\n',task_name,roi_names{i},roi_names{j},z); end end end fclose(fid); exit; """).substitute(d) mlab = MatlabCommand(script=myscript, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): d = dict(in_file=self.inputs.in_file, out_file=self.inputs.out_file) # this is your MATLAB code template script = Template("""oned = load('$in_file'); bpf = bandpass(oned, [0.01 0.08]); bpfdt = detrend(bpf, 2); save('$out_file', 'bpfdt', '-ascii'); exit;""").substitute(d) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def bias_field_correction(filename, pathSPM12): """ Renzo recommended to perform a bias field correction using SPM12 before doing the segmentation with FreeSurfer. FreeSurfer recommends Bias FWHM = 18 and Sampling distance = 2 for MEMPRAGE at 7 T, which is also set here. Outputs are saved in the input folder. Inputs: *filename: path of input image. *prefix: Defined prefix for the output image. *pathSPM12: path to SPM12 toolbox. created by Daniel Haenelt Date created: 01-11-2018 Last modified: 17-12-2018 """ import os from nipype.interfaces.spm import NewSegment from nipype.interfaces.matlab import MatlabCommand # set matlab path to SPM12 folder MatlabCommand.set_default_paths(pathSPM12) # get path of filename path = os.path.dirname(filename) # bias field correction os.chdir(path) bias = NewSegment() bias.inputs.channel_files = os.path.join(filename) bias.inputs.channel_info = (0.001, 18, (True, True)) bias.inputs.affine_regularization = "mni" bias.inputs.sampling_distance = 2 bias.inputs.use_v8struct = True bias.inputs.warping_regularization = [0, 0.001, 0.5, 0.05, 0.2] bias.inputs.write_deformation_fields = [False, False] bias.inputs.mfile = True tissue1 = ((os.path.join(pathSPM12, "tpm/TPM.nii"), 1), 1, (False, False), (False, False)) tissue2 = ((os.path.join(pathSPM12, "tpm/TPM.nii"), 2), 1, (False, False), (False, False)) tissue3 = ((os.path.join(pathSPM12, "tpm/TPM.nii"), 3), 2, (False, False), (False, False)) tissue4 = ((os.path.join(pathSPM12, "tpm/TPM.nii"), 4), 3, (False, False), (False, False)) tissue5 = ((os.path.join(pathSPM12, "tpm/TPM.nii"), 5), 4, (False, False), (False, False)) tissue6 = ((os.path.join(pathSPM12, "tpm/TPM.nii"), 6), 2, (False, False), (False, False)) bias.inputs.tissues = [ tissue1, tissue2, tissue3, tissue4, tissue5, tissue6 ] bias.run()
def _run_interface(self, runtime): a = dict(in_file_a=self.inputs.in_file_a, in_file_b=self.inputs.in_file_b, out_file=self.inputs.out_file) # this is your MATLAB code template conscript = Template("""moco = load('$in_file_a'); csf = load('$in_file_b'); regmodel = horzcat(csf, moco); save('$out_file', 'regmodel', '-ascii'); exit;""").substitute(a) z = MatlabCommand(script=conscript, mfile=True) res = z.run() return res.runtime
def _run_interface(self, runtime): # setup parameters in_file = "'"+str(self.inputs.in_file)+"'" ref_file = "'"+str(self.inputs.ref_file)+"'" d = dict(in_file=in_file,ref_file=ref_file) myscript = Template(""" warning('off','all'); cbf = cbfmap_base_pCASL($in_file,$ref_file); save([dirname($in_file) '/CBF_pCASL.mat'],'cbf'); exit; """).substitute(d) mlab = MatlabCommand(script=myscript,matlab_cmd="matlab -nodesktop -nosplash",mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): from nipype.interfaces.spm.base import scans_for_fname,scans_for_fnames from nipype.utils.filemanip import filename_to_list,list_to_filename # setup parameters voi_file = str(self.inputs.voi_file) voi_name = "'"+str(self.inputs.voi_name)+"'" subject = "'"+str(self.inputs.subject)+"'" spm_file = "'"+str(self.inputs.spm_mat_file)+"'" contrast = str(self.inputs.comp_contrasts) directory = os.path.dirname(re.sub("[\[\]']","",spm_file)) d = dict(voi_file=voi_file,voi_name=voi_name,subject=subject, spm_file=spm_file,directory=directory,contrast=contrast) myscript = Template(""" warning('off','all'); % copy input spm = $spm_file; load(spm); SPM.VResMS.fname = [SPM.swd '/ResMS.img']; SPM.xVol.VRpv.fname = [SPM.swd '/RPV.img']; SPM.VM.fname = ls([SPM.swd '/../*/mask.img']); for i=1:length(SPM.Vbeta) if strcmp(SPM.Vbeta(i).fname(1),'/') == 0 SPM.Vbeta(i).fname = [SPM.swd '/' SPM.Vbeta(i).fname]; end end SPM.swd = '$directory'; save(spm,'SPM'); cd('$directory'); %dos(['ln -sf ' path '/../*/*.[ih]* .']); load('ppi_master_template.mat') P.CompContrasts = $contrast; P.VOI=char($voi_file); P.Region=char($voi_name); P.subject=char($subject); P.directory=char('$directory'); P.FLmask =1; P.equalroi = 0; mat = [char($subject),'_analysis_',char($voi_name),'.mat']; save(mat,'P'); PPPI(mat); exit; """).substitute(d) mlab = MatlabCommand(script=myscript, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): """Creates a dictionary to insert infile and outfile name, runs the matlab commands specified and saves the runtime variables""" d = dict(in_file=self.inputs.in_file, out_file=self.inputs.out_file) # this is your MATLAB code template script = Template("""oned = load('$in_file'); bpf = bandpass(oned, [0.01 0.08]); bpfdt = detrend(bpf, 2); save('$out_file', 'bpfdt', '-ascii'); exit; """).substitute(d) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): # @UnusedVariable self.working_dir = os.path.abspath(os.getcwd()) script = ("set_param(0,'CharacterEncoding','UTF-8');\n" "addpath(genpath('{matlab_dir}'));\n" "fillholes('{in_file}', '{out_file}');\n" "exit;\n").format( in_file=self.inputs.in_file, out_file=os.path.join(os.getcwd(), self._gen_filename('out_file')), matlab_dir=os.path.abspath( os.path.join( os.path.dirname(nianalysis.interfaces.__file__), 'resources', 'matlab', 'qsm'))) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _matlab_cmd_update(self): # MatlabCommand has to be created here, # because matlab_cmb is not a proper input # and can be set only during init self.mlab = MatlabCommand(matlab_cmd=self.inputs.matlab_cmd, mfile=self.inputs.mfile, paths=self.inputs.paths) self.mlab.inputs.script_file = 'pyscript_%s.m' % \ self.__class__.__name__.split('.')[-1].lower()
def _run_interface(self, runtime): # @UnusedVariable self.working_dir = os.path.abspath(os.getcwd()) script = ( "set_param(0,'CharacterEncoding','UTF-8');\n" "addpath(genpath('{matlab_dir}'));\n" "QSM('{in_dir}', '{mask_file}', '{out_dir}', {echo_times}, {num_channels});\n" "exit;").format(in_dir=self.inputs.in_dir, mask_file=self.inputs.mask_file, out_dir=self.working_dir, echo_times=self.inputs.echo_times, num_channels=self.inputs.num_channels, matlab_dir=os.path.abspath( os.path.join( os.path.dirname( nianalysis.interfaces.__file__), 'resources', 'matlab', 'qsm'))) mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def _run_interface(self, runtime): from nipype.interfaces.spm.base import scans_for_fname,scans_for_fnames from nipype.utils.filemanip import filename_to_list,list_to_filename # setup parameters d = dict() d['source'] = str(self.inputs.source) d['brain'] = str(self.inputs.brain_mask) d['white'] = str(self.inputs.white_mask) d['movement'] = str(self.inputs.movement) d['regressors'] = "'"+str(self.inputs.regressors)+"'" #d['directory'] = os.path.dirname(re.sub("[\[\]']","",d['source'])) myscript = Template(""" warning('off','all'); mCompCor($source,$white,$brain,$movement,$regressors); exit; """).substitute(d) mlab = MatlabCommand(script=myscript, mfile=True) result = mlab.run() return result.runtime
def version(matlab_cmd=None): """Returns the path to the SPM directory in the Matlab path If path not found, returns None. Parameters ---------- matlab_cmd : String specifying default matlab command default None, will look for environment variable MATLABCMD and use if found, otherwise falls back on MatlabCommand default of 'matlab -nodesktop -nosplash' Returns ------- spm_path : string representing path to SPM directory returns None of path not found """ if matlab_cmd is None: try: matlab_cmd = os.environ['MATLABCMD'] except: matlab_cmd = 'matlab -nodesktop -nosplash' mlab = MatlabCommand(matlab_cmd=matlab_cmd) mlab.inputs.script = """ if isempty(which('spm')), throw(MException('SPMCheck:NotFound','SPM not in matlab path')); end; spm_path = spm('dir'); [name, version] = spm('ver'); fprintf(1, 'NIPYPE path:%s|name:%s|release:%s', spm_path, name, version); exit; """ mlab.inputs.mfile = False try: out = mlab.run() except (IOError, RuntimeError), e: # if no Matlab at all -- exception could be raised # No Matlab -- no spm logger.debug(str(e)) return None
def version( matlab_cmd = None ): """Returns the path to the SPM directory in the Matlab path If path not found, returns None. Parameters ---------- matlab_cmd : String specifying default matlab command default None, will look for environment variable MATLABCMD and use if found, otherwise falls back on MatlabCommand default of 'matlab -nodesktop -nosplash' Returns ------- spm_path : string representing path to SPM directory returns None of path not found """ if matlab_cmd is None: try: matlab_cmd = os.environ['MATLABCMD'] except: matlab_cmd = 'matlab -nodesktop -nosplash' mlab = MatlabCommand(matlab_cmd = matlab_cmd) mlab.inputs.script = """ if isempty(which('spm')), throw(MException('SPMCheck:NotFound','SPM not in matlab path')); end; spm_path = spm('dir'); [name, version] = spm('ver'); fprintf(1, 'NIPYPE path:%s|name:%s|release:%s', spm_path, name, version); exit; """ mlab.inputs.mfile = False try: out = mlab.run() except (IOError,RuntimeError), e: # if no Matlab at all -- exception could be raised # No Matlab -- no spm logger.debug(str(e)) return None
def satisfied(self): if self.test_func is None: return True # No test available script = ( "try\n" " {}\n" "catch E\n" " fprintf(E.identifier);\n" "end\n".format(self.test_func)) result = MatlabCommand(script=script, mfile=True).run() output = result.runtime.stdout return output != 'MATLAB:UndefinedFunction'
def _run_interface(self, runtime): d = dict(in_file=self.inputs.in_file, out_file=self.inputs.out_file) # This is your MATLAB code template script = Template("""in_file = '$in_file'; out_file = '$out_file'; ConmapTxt2Mat(in_file, out_file); exit; """).substitute(d) # mfile = True will create an .m file with your script and executed. # Alternatively # mfile can be set to False which will cause the matlab code to be # passed # as a commandline argument to the matlab executable # (without creating any files). # This, however, is less reliable and harder to debug # (code will be reduced to # a single line and stripped of any comments). mlab = MatlabCommand(script=script, mfile=True) result = mlab.run() return result.runtime
def version( matlab_cmd = None ): """Returns the path to the SPM directory in the Matlab path If path not found, returns None. Parameters ---------- matlab_cmd : String specifying default matlab command default None, will look for environment variable MATLABCMD and use if found, otherwise falls back on MatlabCommand default of 'matlab -nodesktop -nosplash' Returns ------- spm_path : string representing path to SPM directory returns None of path not found """ if matlab_cmd is None: try: matlab_cmd = os.environ['MATLABCMD'] except: matlab_cmd = 'matlab -nodesktop -nosplash' mlab = MatlabCommand(matlab_cmd = matlab_cmd) mlab.inputs.script_file = 'spminfo' mlab.inputs.script = """ if isempty(which('spm')), throw(MException('SPMCheck:NotFound','SPM not in matlab path')); end; spm_path = spm('dir'); fprintf(1, 'NIPYPE %s', spm_path); """ out = mlab.run() if out.runtime.returncode == 0: spm_path = sd._strip_header(out.runtime.stdout) else: logger.debug(out.runtime.stderr) return None return spm_path
def test_MatlabCommand_inputs(): input_map = dict( args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), ignore_exception=dict( nohash=True, usedefault=True, ), logfile=dict(argstr='-logfile %s', ), mfile=dict(usedefault=True, ), nodesktop=dict( argstr='-nodesktop', nohash=True, usedefault=True, ), nosplash=dict( argstr='-nosplash', nohash=True, usedefault=True, ), paths=dict(), postscript=dict(usedefault=True, ), prescript=dict(usedefault=True, ), script=dict( argstr='-r "%s;exit"', mandatory=True, position=-1, ), script_file=dict(usedefault=True, ), single_comp_thread=dict( argstr='-singleCompThread', nohash=True, ), terminal_output=dict( mandatory=True, nohash=True, ), uses_mcr=dict( nohash=True, xor=['nodesktop', 'nosplash', 'single_comp_thread'], ), ) inputs = MatlabCommand.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def run_m_script(m_file): """ Runs a matlab m file for SPM, determining automatically if it must be launched with SPM or SPM Standalone If launch with spm standalone, the line 'spm_jobman('run', matlabbatch)' must be removed because unnecessary Args: m_file: (str) path to Matlab m file Returns: output_mat_file: (str) path to the SPM.mat file needed in SPM analysis """ import platform from os import system from os.path import abspath, basename, dirname, isfile, join from nipype.interfaces.matlab import MatlabCommand, get_matlab_command import clinica.pipelines.statistics_volume.statistics_volume_utils as utls from clinica.utils.spm import spm_standalone_is_available assert isinstance(m_file, str), "[Error] Argument must be a string" if not isfile(m_file): raise FileNotFoundError("[Error] File " + m_file + "does not exist") assert m_file[-2:] == ".m", ( "[Error] " + m_file + " is not a Matlab file (extension must be .m)") # Generate command line to run if spm_standalone_is_available(): utls.delete_last_line(m_file) # SPM standalone must be run directly from its root folder if platform.system().lower().startswith("darwin"): # Mac OS cmdline = ( "cd $SPMSTANDALONE_HOME && ./run_spm12.sh $MCR_HOME batch " + m_file) elif platform.system().lower().startswith("linux"): # Linux OS cmdline = "$SPMSTANDALONE_HOME/run_spm12.sh $MCR_HOME batch " + m_file else: raise SystemError("Clinica only support Mac OS and Linux") system(cmdline) else: MatlabCommand.set_default_matlab_cmd(get_matlab_command()) matlab = MatlabCommand() if platform.system().lower().startswith("linux"): matlab.inputs.args = "-nosoftwareopengl" matlab.inputs.paths = dirname(m_file) matlab.inputs.script = basename(m_file)[:-2] matlab.inputs.single_comp_thread = False matlab.inputs.logfile = abspath("./matlab_output.log") matlab.run() output_mat_file = abspath( join(dirname(m_file), "..", "2_sample_t_test", "SPM.mat")) if not isfile(output_mat_file): raise RuntimeError("Output matrix " + output_mat_file + " was not produced") return output_mat_file
def test_MatlabCommand_inputs(): input_map = dict(args=dict(argstr='%s', ), environ=dict(nohash=True, usedefault=True, ), ignore_exception=dict(nohash=True, usedefault=True, ), logfile=dict(argstr='-logfile %s', ), mfile=dict(usedefault=True, ), nodesktop=dict(argstr='-nodesktop', nohash=True, usedefault=True, ), nosplash=dict(argstr='-nosplash', nohash=True, usedefault=True, ), paths=dict(), postscript=dict(usedefault=True, ), prescript=dict(usedefault=True, ), script=dict(argstr='-r "%s;exit"', mandatory=True, position=-1, ), script_file=dict(usedefault=True, ), single_comp_thread=dict(argstr='-singleCompThread', nohash=True, ), terminal_output=dict(mandatory=True, nohash=True, ), uses_mcr=dict(nohash=True, xor=['nodesktop', 'nosplash', 'single_comp_thread'], ), ) inputs = MatlabCommand.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def run_matlab(caps_dir, output_dir, subjects_visits_tsv, pipeline_parameters): """ Wrap the call of SurfStat using clinicasurfstat.m Matlab script. Args: caps_dir (str): CAPS directory containing surface-based features output_dir (str): Output directory that will contain outputs of clinicasurfstat.m subjects_visits_tsv (str): TSV file containing the GLM information pipeline_parameters (dict): parameters of StatisticsSurface pipeline """ import os from nipype.interfaces.matlab import MatlabCommand, get_matlab_command import clinica.pipelines as clinica_pipelines from clinica.utils.check_dependency import check_environment_variable from clinica.pipelines.statistics_surface.statistics_surface_utils import covariates_to_design_matrix, get_string_format_from_tsv path_to_matlab_script = os.path.join( os.path.dirname(clinica_pipelines.__path__[0]), 'lib', 'clinicasurfstat') freesurfer_home = check_environment_variable('FREESURFER_HOME', 'FreeSurfer') MatlabCommand.set_default_matlab_cmd(get_matlab_command()) matlab = MatlabCommand() matlab.inputs.paths = path_to_matlab_script matlab.inputs.script = """ clinicasurfstat('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', %d, '%s', %.3f, '%s', %.3f, '%s', %.3f); """ % (os.path.join(caps_dir, 'subjects'), output_dir, subjects_visits_tsv, covariates_to_design_matrix(pipeline_parameters['contrast'], pipeline_parameters['covariates']), pipeline_parameters['contrast'], get_string_format_from_tsv(subjects_visits_tsv), pipeline_parameters['glm_type'], pipeline_parameters['group_label'], freesurfer_home, pipeline_parameters['custom_file'], pipeline_parameters['measure_label'], 'sizeoffwhm', pipeline_parameters['full_width_at_half_maximum'], 'thresholduncorrectedpvalue', 0.001, 'thresholdcorrectedpvalue', 0.05, 'clusterthreshold', pipeline_parameters['cluster_threshold']) # This will create a file: pyscript.m , the pyscript.m is the default name matlab.inputs.mfile = True # This will stop running with single thread matlab.inputs.single_comp_thread = False matlab.inputs.logfile = 'group-' + pipeline_parameters[ 'group_label'] + '_matlab.log' # cprint("Matlab logfile is located at the following path: %s" % matlab.inputs.logfile) # cprint("Matlab script command = %s" % matlab.inputs.script) # cprint("MatlabCommand inputs flag: single_comp_thread = %s" % matlab.inputs.single_comp_thread) # cprint("MatlabCommand choose which matlab to use(matlab_cmd): %s" % get_matlab_command()) matlab.run() return output_dir
def run_m_script(m_file): """ Runs a matlab m file for SPM, determining automatically if it must be launched with SPM or SPM Standalone If launch with spm standalone, the line 'spm_jobman('run', matlabbatch)' must be removed because unnecessary Args: m_file: (str) path to Matlab m file Returns: output_mat_file: (str) path to the SPM.mat file needed in SPM analysis """ from os.path import isfile, dirname, basename, abspath, join from os import system from clinica.utils.spm import use_spm_standalone import clinica.pipelines.statistics_volume.statistics_volume_utils as utls from nipype.interfaces.matlab import MatlabCommand, get_matlab_command import platform assert isinstance(m_file, str), '[Error] Argument must be a string' if not isfile(m_file): raise FileNotFoundError('[Error] File ' + m_file + 'does not exist') assert m_file[-2:] == '.m', '[Error] ' + m_file + ' is not a Matlab file (extension must be .m)' # Generate command line to run if use_spm_standalone(): utls.delete_last_line(m_file) # SPM standalone must be run directly from its root folder if platform.system().lower().startswith('darwin'): # Mac OS cmdline = 'cd $SPMSTANDALONE_HOME && ./run_spm12.sh $MCR_HOME batch ' + m_file elif platform.system().lower().startswith('linux'): # Linux OS cmdline = '$SPMSTANDALONE_HOME/run_spm12.sh $MCR_HOME batch ' + m_file else: raise SystemError('Clinica only support Mac OS and Linux') system(cmdline) else: MatlabCommand.set_default_matlab_cmd(get_matlab_command()) matlab = MatlabCommand() if platform.system().lower().startswith('linux'): matlab.inputs.args = '-nosoftwareopengl' matlab.inputs.paths = dirname(m_file) matlab.inputs.script = basename(m_file)[:-2] matlab.inputs.single_comp_thread = False matlab.inputs.logfile = abspath('./matlab_output.log') matlab.run() output_mat_file = abspath(join(dirname(m_file), '..', '2_sample_t_test', 'SPM.mat')) if not isfile(output_mat_file): raise RuntimeError('Output matrix ' + output_mat_file + ' was not produced') return output_mat_file
from nipype.interfaces.fsl.epi import ApplyTOPUP, TOPUP from nipype.interfaces.freesurfer import Resample, Binarize, MRIConvert from nipype.algorithms.confounds import CompCor from nipype.interfaces.afni.preprocess import Bandpass from nipype.interfaces.afni.utils import AFNItoNIFTI from nipype.interfaces.ants import ApplyTransforms, Registration from nipype.algorithms.misc import Gunzip from pandas import DataFrame, Series #set output file type for FSL to NIFTI from nipype.interfaces.fsl.preprocess import FSLCommand FSLCommand.set_default_output_type('NIFTI') # MATLAB setup - Specify path to current SPM and the MATLAB's default mode from nipype.interfaces.matlab import MatlabCommand MatlabCommand.set_default_paths('~/spm12') MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # Set study variables setup='sherlock' sample='6mo' #6mo or newborn sequence='spiral'#spiral or mux6 if setup=='sherlock': studyhome = '/oak/stanford/groups/iang/BABIES_data/BABIES_rest' raw_data = studyhome + '/subjDir/all' output_dir = studyhome + '/processed/preproc' workflow_dir = studyhome + '/workflows' elif setup=='Cat': studyhome = '/Users/catcamacho/Box/SNAP/BABIES/BABIES_rest' raw_data = studyhome + '/rest_raw'
from os.path import join as opj from nipype.interfaces.afni import Despike from nipype.interfaces.freesurfer import (BBRegister, ApplyVolTransform, Binarize, MRIConvert, FSCommand) from nipype.interfaces.spm import (SliceTiming, Realign, Smooth, Level1Design, EstimateModel, EstimateContrast) from nipype.interfaces.utility import Function, IdentityInterface from nipype.interfaces.io import FreeSurferSource, SelectFiles, DataSink from nipype.algorithms.rapidart import ArtifactDetect from nipype.algorithms.misc import TSNR, Gunzip from nipype.algorithms.modelgen import SpecifySPMModel from nipype.pipeline.engine import Workflow, Node, MapNode # MATLAB - Specify path to current SPM and the MATLAB's default mode from nipype.interfaces.matlab import MatlabCommand MatlabCommand.set_default_paths('/usr/local/MATLAB/R2014a/toolbox/spm12') MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # FreeSurfer - Specify the location of the freesurfer folder fs_dir = '~/nipype_tutorial/freesurfer' FSCommand.set_default_subjects_dir(fs_dir) ### # Specify variables experiment_dir = '~/nipype_tutorial' # location of experiment folder subject_list = ['sub001', 'sub002', 'sub003', 'sub004', 'sub005', 'sub006', 'sub007', 'sub008', 'sub009', 'sub010'] # list of subject identifiers output_dir = 'output_fMRI_example_1st' # name of 1st-level output folder
### # Import modules from os.path import join as opj from nipype.interfaces.io import SelectFiles, DataSink from nipype.interfaces.spm import (OneSampleTTestDesign, EstimateModel, EstimateContrast, Threshold) from nipype.interfaces.utility import IdentityInterface from nipype.pipeline.engine import Workflow, Node # Specification to MATLAB from nipype.interfaces.matlab import MatlabCommand MatlabCommand.set_default_paths('/usr/local/MATLAB/R2014a/toolbox/spm12') MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") ### # Specify variables experiment_dir = '~/nipype_tutorial' # location of experiment folder output_dir = 'output_fMRI_example_2nd_ants' # name of 2nd-level output folder input_dir_norm = 'output_fMRI_example_norm_ants' # name of norm output folder working_dir = 'workingdir_fMRI_example_2nd_ants' # name of working directory subject_list = [ 'sub001', 'sub002', 'sub003', 'sub004', 'sub005', 'sub006', 'sub007', 'sub008', 'sub009', 'sub010' ] # list of subject identifiers contrast_list = [ 'con_0001', 'con_0002', 'con_0003', 'con_0004', 'ess_0005', 'ess_0006' ] # list of contrast identifiers ### # Specify 2nd-Level Analysis Nodes
config.update_config(cfg) import nipype.interfaces.fsl as fsl import nipype.interfaces.afni as afni import nipype.interfaces.ants as ants import nipype.interfaces.spm as spm from nipype.interfaces.utility import IdentityInterface, Function, Select, Merge from os.path import join as opj from nipype.interfaces.io import SelectFiles, DataSink from nipype.pipeline.engine import Workflow, Node, MapNode import numpy as np import matplotlib.pyplot as plt from nipype.interfaces.matlab import MatlabCommand MatlabCommand.set_default_paths('/media/amr/HDD/Sofwares/spm12/') MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # import nipype.interfaces.matlab as mlab # mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # mlab.MatlabCommand.set_default_paths('/home/amr/Documents/MATLAB/toolbox/spm8') #----------------------------------------------------------------------------------------------------- # In[2]: experiment_dir = '/media/amr/HDD/Work/Stimulation' subject_list = ['003','005','008','011','018','019','020', '059', '060','062','063','066'] session_list = ['run001', 'run002', 'run003']
def _run_interface(self, runtime): in_files = self.inputs.in_files data_dir = op.join(os.getcwd(), 'origdata') if not op.exists(data_dir): os.makedirs(data_dir) all_names = [] print 'Multiple ({n}) input images detected! Copying to {d}...'.format( n=len(self.inputs.in_files), d=data_dir) for in_file in self.inputs.in_files: path, name, ext = split_filename(in_file) shutil.copyfile(in_file, op.join(data_dir, name) + ext) if ext == '.img': shutil.copyfile( op.join(path, name) + '.hdr', op.join(data_dir, name) + '.hdr') elif ext == '.hdr': shutil.copyfile( op.join(path, name) + '.img', op.join(data_dir, name) + '.img') all_names.append(name) print 'Copied!' input_files_as_str = op.join( data_dir, os.path.commonprefix(all_names) + '*' + ext) number_of_components = self.inputs.desired_number_of_components output_dir = os.getcwd() prefix = self.inputs.prefix d = dict(output_dir=output_dir, prefix=prefix, number_of_components=number_of_components, in_files=input_files_as_str) variables = Template(""" %% After entering the parameters, use icatb_batch_file_run(inputFile); modalityType = 'fMRI'; which_analysis = 1; perfType = 1; keyword_designMatrix = 'no'; dataSelectionMethod = 4; input_data_file_patterns = {'$in_files'}; dummy_scans = 0; outputDir = '$output_dir'; prefix = '$prefix'; maskFile = []; group_pca_type = 'subject specific'; backReconType = 'gica'; %% Data Pre-processing options % 1 - Remove mean per time point % 2 - Remove mean per voxel % 3 - Intensity normalization % 4 - Variance normalization preproc_type = 3; pcaType = 1; pca_opts.stack_data = 'yes'; pca_opts.precision = 'single'; pca_opts.tolerance = 1e-4; pca_opts.max_iter = 1000; numReductionSteps = 2; doEstimation = 0; estimation_opts.PC1 = 'mean'; estimation_opts.PC2 = 'mean'; estimation_opts.PC3 = 'mean'; numOfPC1 = $number_of_components; numOfPC2 = $number_of_components; numOfPC3 = 0; %% Scale the Results. Options are 0, 1, 2, 3 and 4 % 0 - Don't scale % 1 - Scale to Percent signal change % 2 - Scale to Z scores % 3 - Normalize spatial maps using the maximum intensity value and multiply timecourses using the maximum intensity value % 4 - Scale timecourses using the maximum intensity value and spatial maps using the standard deviation of timecourses scaleType = 0; algoType = 1; refFunNames = {'Sn(1) right*bf(1)', 'Sn(1) left*bf(1)'}; refFiles = {which('ref_default_mode.nii'), which('ref_left_visuomotor.nii'), which('ref_right_visuomotor.nii')}; %% ICA Options - Name by value pairs in a cell array. Options will vary depending on the algorithm. See icatb_icaOptions for more details. Some options are shown below. %% Infomax - {'posact', 'off', 'sphering', 'on', 'bias', 'on', 'extended', 0} %% FastICA - {'approach', 'symm', 'g', 'tanh', 'stabilization', 'on'} icaOptions = {'posact', 'off', 'sphering', 'on', 'bias', 'on', 'extended', 0}; """).substitute(d) file = open('input_batch.m', 'w') file.writelines(variables) file.close() script = """param_file = icatb_read_batch_file('input_batch.m'); load(param_file); global FUNCTIONAL_DATA_FILTER; global ZIP_IMAGE_FILES; FUNCTIONAL_DATA_FILTER = '*.nii'; ZIP_IMAGE_FILES = 'No'; icatb_runAnalysis(sesInfo, 1);""" result = MatlabCommand(script=script, mfile=True, prescript=[''], postscript=['']) r = result.run() return runtime
def _run_interface(self, runtime): list_path = op.abspath("SubjectList.lst") pet_path, _ = nifti_to_analyze(self.inputs.pet_file) t1_path, _ = nifti_to_analyze(self.inputs.t1_file) f = open(list_path, 'w') f.write("%s;%s" % (pet_path, t1_path)) f.close() orig_t1 = nb.load(self.inputs.t1_file) orig_affine = orig_t1.get_affine() gm_uint8 = switch_datatype(self.inputs.grey_matter_file) gm_path, _ = nifti_to_analyze(gm_uint8) iflogger.info("Writing to %s" % gm_path) fixed_roi_file, fixed_wm, fixed_csf, remap_dict = fix_roi_values( self.inputs.roi_file, self.inputs.grey_matter_binary_mask, self.inputs.white_matter_file, self.inputs.csf_file, self.inputs.use_fs_LUT) rois_path, _ = nifti_to_analyze(fixed_roi_file) iflogger.info("Writing to %s" % rois_path) iflogger.info("Writing to %s" % fixed_wm) iflogger.info("Writing to %s" % fixed_csf) wm_uint8 = switch_datatype(fixed_wm) wm_path, _ = nifti_to_analyze(wm_uint8) iflogger.info("Writing to %s" % wm_path) csf_uint8 = switch_datatype(fixed_csf) csf_path, _ = nifti_to_analyze(csf_uint8) iflogger.info("Writing to %s" % csf_path) if self.inputs.use_fs_LUT: fs_dir = os.environ['FREESURFER_HOME'] LUT = op.join(fs_dir, "FreeSurferColorLUT.txt") dat_path = write_config_dat( fixed_roi_file, LUT, remap_dict) else: dat_path = write_config_dat( fixed_roi_file) iflogger.info("Writing to %s" % dat_path) d = dict( list_path=list_path, gm_path=gm_path, wm_path=wm_path, csf_path=csf_path, rois_path=rois_path, dat_path=dat_path, X_PSF=self.inputs.x_dir_point_spread_function_FWHM, Y_PSF=self.inputs.y_dir_point_spread_function_FWHM, Z_PSF=self.inputs.z_dir_point_spread_function_FWHM) script = Template(""" filelist = '$list_path'; gm = '$gm_path'; wm = '$wm_path'; csf = '$csf_path'; rois = '$rois_path'; dat = '$dat_path'; x_fwhm = '$X_PSF'; y_fwhm = '$Y_PSF'; z_fwhm = '$Z_PSF'; runbatch_nogui(filelist, gm, wm, csf, rois, dat, x_fwhm, y_fwhm, z_fwhm) """).substitute(d) mlab = MatlabCommand(script=script, mfile=True, prescript=[''], postscript=['']) result = mlab.run() _, foldername, _ = split_filename(self.inputs.pet_file) occu_MG_img = glob.glob("pve_%s/r_volume_Occu_MG.img" % foldername)[0] analyze_to_nifti(occu_MG_img, affine=orig_affine) occu_meltzer_img = glob.glob( "pve_%s/r_volume_Occu_Meltzer.img" % foldername)[0] analyze_to_nifti(occu_meltzer_img, affine=orig_affine) meltzer_img = glob.glob("pve_%s/r_volume_Meltzer.img" % foldername)[0] analyze_to_nifti(meltzer_img, affine=orig_affine) MG_rousset_img = glob.glob( "pve_%s/r_volume_MGRousset.img" % foldername)[0] analyze_to_nifti(MG_rousset_img, affine=orig_affine) MGCS_img = glob.glob("pve_%s/r_volume_MGCS.img" % foldername)[0] analyze_to_nifti(MGCS_img, affine=orig_affine) virtual_PET_img = glob.glob( "pve_%s/r_volume_Virtual_PET.img" % foldername)[0] analyze_to_nifti(virtual_PET_img, affine=orig_affine) centrum_semiovalue_WM_img = glob.glob( "pve_%s/r_volume_CSWMROI.img" % foldername)[0] analyze_to_nifti(centrum_semiovalue_WM_img, affine=orig_affine) alfano_alfano_img = glob.glob( "pve_%s/r_volume_AlfanoAlfano.img" % foldername)[0] analyze_to_nifti(alfano_alfano_img, affine=orig_affine) alfano_cs_img = glob.glob("pve_%s/r_volume_AlfanoCS.img" % foldername)[0] analyze_to_nifti(alfano_cs_img, affine=orig_affine) alfano_rousset_img = glob.glob( "pve_%s/r_volume_AlfanoRousset.img" % foldername)[0] analyze_to_nifti(alfano_rousset_img, affine=orig_affine) mg_alfano_img = glob.glob("pve_%s/r_volume_MGAlfano.img" % foldername)[0] analyze_to_nifti(mg_alfano_img, affine=orig_affine) mask_img = glob.glob("pve_%s/r_volume_Mask.img" % foldername)[0] analyze_to_nifti(mask_img, affine=orig_affine) PSF_img = glob.glob("pve_%s/r_volume_PSF.img" % foldername)[0] analyze_to_nifti(PSF_img) try: rousset_mat_file = glob.glob( "pve_%s/r_volume_Rousset.mat" % foldername)[0] except IndexError: # On Ubuntu using pve64, the matlab file is saved with a capital M rousset_mat_file = glob.glob( "pve_%s/r_volume_Rousset.Mat" % foldername)[0] shutil.copyfile(rousset_mat_file, op.abspath("r_volume_Rousset.mat")) results_text_file = glob.glob( "pve_%s/r_volume_pve.txt" % foldername)[0] shutil.copyfile(results_text_file, op.abspath("r_volume_pve.txt")) results_matlab_mat = op.abspath("%s_pve.mat" % foldername) results_numpy_npz = op.abspath("%s_pve.npz" % foldername) out_data = parse_pve_results(results_text_file) sio.savemat(results_matlab_mat, mdict=out_data) np.savez(results_numpy_npz, **out_data) return result.runtime
class SPMCommand(BaseInterface): """Extends `BaseInterface` class to implement SPM specific interfaces. WARNING: Pseudo prototype class, meant to be subclassed """ input_spec = SPMCommandInputSpec _jobtype = 'basetype' _jobname = 'basename' _matlab_cmd = None _paths = None _use_mcr = None def __init__(self, **inputs): super(SPMCommand, self).__init__(**inputs) self.inputs.on_trait_change( self._matlab_cmd_update, ['matlab_cmd', 'mfile', 'paths', 'use_mcr']) self._check_mlab_inputs() self._matlab_cmd_update() @classmethod def set_mlab_paths(cls, matlab_cmd=None, paths=None, use_mcr=None): cls._matlab_cmd = matlab_cmd cls._paths = paths cls._use_mcr = use_mcr def _matlab_cmd_update(self): # MatlabCommand has to be created here, # because matlab_cmb is not a proper input # and can be set only during init self.mlab = MatlabCommand(matlab_cmd=self.inputs.matlab_cmd, mfile=self.inputs.mfile, paths=self.inputs.paths, uses_mcr=self.inputs.use_mcr) self.mlab.inputs.script_file = 'pyscript_%s.m' % \ self.__class__.__name__.split('.')[-1].lower() @property def jobtype(self): return self._jobtype @property def jobname(self): return self._jobname def _check_mlab_inputs(self): if not isdefined(self.inputs.matlab_cmd) and self._matlab_cmd: self.inputs.matlab_cmd = self._matlab_cmd if not isdefined(self.inputs.paths) and self._paths: self.inputs.paths = self._paths if not isdefined(self.inputs.use_mcr) and self._use_mcr: self.inputs.use_mcr = self._use_mcr def _run_interface(self, runtime): """Executes the SPM function using MATLAB.""" self.mlab.inputs.script = self._make_matlab_command( deepcopy(self._parse_inputs())) results = self.mlab.run() runtime.returncode = results.runtime.returncode if self.mlab.inputs.uses_mcr: if 'Skipped' in results.runtime.stdout: self.raise_exception(runtime) runtime.stdout = results.runtime.stdout runtime.stderr = results.runtime.stderr runtime.merged = results.runtime.merged return runtime def _list_outputs(self): """Determine the expected outputs based on inputs.""" raise NotImplementedError def _format_arg(self, opt, spec, val): """Convert input to appropriate format for SPM.""" if spec.is_trait_type(traits.Bool): return int(val) else: return val def _parse_inputs(self, skip=()): spmdict = {} metadata = dict(field=lambda t: t is not None) for name, spec in self.inputs.traits(**metadata).items(): if skip and name in skip: continue value = getattr(self.inputs, name) if not isdefined(value): continue field = spec.field if '.' in field: fields = field.split('.') dictref = spmdict for f in fields[:-1]: if f not in dictref.keys(): dictref[f] = {} dictref = dictref[f] dictref[fields[-1]] = self._format_arg(name, spec, value) else: spmdict[field] = self._format_arg(name, spec, value) return [spmdict] def _reformat_dict_for_savemat(self, contents): """Encloses a dict representation within hierarchical lists. In order to create an appropriate SPM job structure, a Python dict storing the job needs to be modified so that each dict embedded in dict needs to be enclosed as a list element. Examples -------- >>> a = SPMCommand()._reformat_dict_for_savemat(dict(a=1,b=dict(c=2,d=3))) >>> print a [{'a': 1, 'b': [{'c': 2, 'd': 3}]}] """ newdict = {} try: for key, value in contents.items(): if isinstance(value, dict): if value: newdict[key] = self._reformat_dict_for_savemat(value) # if value is None, skip else: newdict[key] = value return [newdict] except TypeError: print 'Requires dict input' def _generate_job(self, prefix='', contents=None): """Recursive function to generate spm job specification as a string Parameters ---------- prefix : string A string that needs to get contents : dict A non-tuple Python structure containing spm job information gets converted to an appropriate sequence of matlab commands. """ jobstring = '' if contents is None: return jobstring if isinstance(contents, list): for i, value in enumerate(contents): if prefix.endswith(")"): newprefix = "%s,%d)" % (prefix[:-1], i + 1) else: newprefix = "%s(%d)" % (prefix, i + 1) jobstring += self._generate_job(newprefix, value) return jobstring if isinstance(contents, dict): for key, value in contents.items(): newprefix = "%s.%s" % (prefix, key) jobstring += self._generate_job(newprefix, value) return jobstring if isinstance(contents, np.ndarray): if contents.dtype == np.dtype(object): if prefix: jobstring += "%s = {...\n" % (prefix) else: jobstring += "{...\n" for i, val in enumerate(contents): if isinstance(val, np.ndarray): jobstring += self._generate_job(prefix=None, contents=val) elif isinstance(val, str): jobstring += '\'%s\';...\n' % (val) else: jobstring += '%s;...\n' % str(val) jobstring += '};\n' else: for i, val in enumerate(contents): for field in val.dtype.fields: if prefix: newprefix = "%s(%d).%s" % (prefix, i + 1, field) else: newprefix = "(%d).%s" % (i + 1, field) jobstring += self._generate_job(newprefix, val[field]) return jobstring if isinstance(contents, str): jobstring += "%s = '%s';\n" % (prefix, contents) return jobstring jobstring += "%s = %s;\n" % (prefix, str(contents)) return jobstring def _make_matlab_command(self, contents, postscript=None): """Generates a mfile to build job structure Parameters ---------- contents : list a list of dicts generated by _parse_inputs in each subclass cwd : string default os.getcwd() Returns ------- mscript : string contents of a script called by matlab """ cwd = os.getcwd() mscript = """ %% Generated by nipype.interfaces.spm if isempty(which('spm')), throw(MException('SPMCheck:NotFound','SPM not in matlab path')); end [name, ver] = spm('ver'); fprintf('SPM version: %s Release: %s\\n',name, ver); fprintf('SPM path: %s\\n',which('spm')); spm('Defaults','fMRI'); if strcmp(spm('ver'),'SPM8'), spm_jobman('initcfg');end\n """ if self.mlab.inputs.mfile: if self.jobname in [ 'st', 'smooth', 'preproc', 'preproc8', 'fmri_spec', 'fmri_est', 'factorial_design', 'defs' ]: # parentheses mscript += self._generate_job( 'jobs{1}.%s{1}.%s(1)' % (self.jobtype, self.jobname), contents[0]) else: #curly brackets mscript += self._generate_job( 'jobs{1}.%s{1}.%s{1}' % (self.jobtype, self.jobname), contents[0]) else: jobdef = { 'jobs': [{ self.jobtype: [{ self.jobname: self.reformat_dict_for_savemat(contents[0]) }] }] } savemat(os.path.join(cwd, 'pyjobs_%s.mat' % self.jobname), jobdef) mscript += "load pyjobs_%s;\n\n" % self.jobname mscript += """ if strcmp(spm('ver'),'SPM8'), jobs=spm_jobman('spm5tospm8',{jobs}); end spm_jobman(\'run_nogui\',jobs);\n """ if postscript is not None: mscript += postscript return mscript
import os import socket from nipype.interfaces.matlab import MatlabCommand if socket.gethostname() == 'malin': os.environ['MATLABCMD'] = "/opt/matlab/R2015b/bin/matlab -nodesktop -nosplash" MatlabCommand.set_default_paths('/opt/matlab/R2015b/toolbox/spm12') MatlabCommand.set_default_matlab_cmd("/opt/matlab/R2015b/bin/matlab -nodesktop -nosplash") TPM = '/opt/matlab/R2015b/toolbox/spm12/tpm/TPM.nii' # os.environ['MATLABCMD'] = "/opt/matlab/R2012a/bin/matlab -nodesktop -nosplash" # MatlabCommand.set_default_paths('/opt/matlab/R2012a/toolbox/spm12') # MatlabCommand.set_default_matlab_cmd("/opt/matlab/R2012a/bin/matlab -nodesktop -nosplash") elif socket.gethostname() == 'cala': os.environ['MATLABCMD'] = "/opt/matlab/64bit/R2015a/bin/matlab -nodesktop -nosplash" MatlabCommand.set_default_paths('/opt/matlab/64bit/R2015a/toolbox/spm12') MatlabCommand.set_default_matlab_cmd("/opt/matlab/64bit/R2015a/bin/matlab -nodesktop -nosplash") TPM = '/opt/matlab/64bit/R2015a/toolbox/spm12/tpm/TPM.nii' def display_crash_files(crashfile, rerun=False): from nipype.utils.filemanip import loadcrash crash_data = loadcrash(crashfile) node = crash_data['node'] tb = crash_data['traceback'] print("\n") print("File: %s"%crashfile) print("Node: %s"%node) if node.base_dir: print("Working directory: %s" % node.output_dir()) else: print("Node crashed before execution") print("\n")
import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import nipype.algorithms.rapidart as ra # artifact detection import nipype.algorithms.modelgen as model # model specification from nipype.algorithms.misc import Gunzip from nipype import Node, Workflow, MapNode from nipype import SelectFiles from os.path import join as opj os.chdir('/home/or/Documents/dicom_niix') import readConditionFiles_r_aPTSD from nipype.interfaces.matlab import MatlabCommand #mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") MatlabCommand.set_default_paths( '/home/or/Downloads/spm12/') # set default SPM12 path in my computer. # Specify the location of the data. data_dir = os.path.abspath('/media/Data/FromHPC/output/fmriprep') from bids.grabbids import BIDSLayout layout = BIDSLayout(data_dir) checkGet = layout.get(type="bold", extensions="nii.gz") checkGet[0].subject layout.get(type="bold", task="3", session="1", extensions="nii.gz")[0].filename # Specify the subject directories subject_list = ['1063', '1072', '1206', '1244'] #,'1273' ,'1291', '1305', '1340', '1345', '1346'] # Map field names to individual subject runs. task_list = ['3', '4', '5', '6']
# SET PATHS AND SUBJECTS # ====================================================================== # define paths depending on the operating system (OS) platform: project = 'highspeed' # initialize empty paths: path_root = None sub_list = None # path to the project root: project_name = 'highspeed-glm' path_root = os.getenv('PWD').split(project_name)[0] + project_name if 'darwin' in sys.platform: path_spm = '/Users/Shared/spm12' path_matlab = '/Applications/MATLAB_R2017a.app/bin/matlab -nodesktop -nosplash' # set paths for spm: spm.SPMCommand.set_mlab_paths(paths=path_spm, matlab_cmd=path_matlab) MatlabCommand.set_default_paths(path_spm) MatlabCommand.set_default_matlab_cmd(path_matlab) sub_list = ['sub-01'] elif 'linux' in sys.platform: # path_matlab = '/home/mpib/wittkuhn/spm12.simg eval \$SPMMCRCMD' # path_matlab = opj('/home', 'beegfs', 'wittkuhn', 'tools', 'spm', 'spm12.simg eval \$SPMMCRCMD') singularity_cmd = 'singularity run -B /home/mpib/wittkuhn -B /mnt/beegfs/home/wittkuhn /home/mpib/wittkuhn/highspeed/highspeed-glm/tools/spm/spm12.simg' singularity_spm = 'eval \$SPMMCRCMD' path_matlab = ' '.join([singularity_cmd, singularity_spm]) spm.SPMCommand.set_mlab_paths(matlab_cmd=path_matlab, use_mcr=True) # grab the list of subjects from the bids data set: layout = BIDSLayout(opj(path_root, 'bids')) # get all subject ids: sub_list = sorted(layout.get_subjects()) # create a template to add the "sub-" prefix to the ids sub_template = ['sub-'] * len(sub_list)
Created on Wed Aug 14 09:23:21 2019 @author: brianne """ #!/usr/bin/env python # coding: utf-8 # Get the Node and Workflow object from nipype import Node, Workflow, pipeline import nipype.interfaces.utility as util # utility (Needed?) # Specify which SPM to use (useful for the SPM8 comparison testing) from nipype.interfaces.matlab import MatlabCommand as mlabcmd mlabcmd.set_default_paths('/usr/local/MATLAB/tools/spm12') # Use nipype's version of collecting and inputing files. from nipype import SelectFiles, DataSink, config #config.enable_debug_mode() config.set( 'execution', 'stop_on_first_crash', 'true' ) # Doesn't mean the whole pipeline will run properly if set to false, but can run through a couple times and hopefully hit the stragglers. Sometimes, it's because the scan doesn' exist, but the template is not flexible enough to catch it. #config.set('execution', 'keep_inputs', 'true') #config.set('execution', 'keep_unnecessary_files','true') config.set('execution', 'hash_method', 'timestamp') #config.set('execution', 'poll_sleep_duration','3') import os import glob import os.path as op
from nipype.interfaces.matlab import MatlabCommand import matplotlib.pyplot as plt import numpy as np from nipype.pipeline.engine import Workflow, Node, MapNode from nipype.interfaces.io import SelectFiles, DataSink from os.path import join as opj from nipype.interfaces.utility import IdentityInterface, Function, Select, Merge import nipype.interfaces.spm as spm import nipype.interfaces.ants as ants import nipype.interfaces.afni as afni import nipype.interfaces.fsl as fsl from nipype import config cfg = dict(execution={'remove_unnecessary_outputs': False}) config.update_config(cfg) MatlabCommand.set_default_paths('/Users/amr/Downloads/spm12') MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # import nipype.interfaces.matlab as mlab # mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # mlab.MatlabCommand.set_default_paths('/home/amr/Documents/MATLAB/toolbox/spm8') # ======================================================================================================== # In[2]: experiment_dir = '/media/amr/Amr_4TB/Work/stimulation' subject_list = [ '005', '007', '008', '010', '013', '024', '025', '026', '027', '028', '038', '040', '041', '042', '051', '052', '053', '054', '055', '059', '060', '061', '079', '081'
def _run_interface(self, runtime): list_path = op.abspath("SubjectList.lst") pet_path, _ = nifti_to_analyze(self.inputs.pet_file) t1_path, _ = nifti_to_analyze(self.inputs.t1_file) f = open(list_path, 'w') f.write("%s;%s" % (pet_path, t1_path)) f.close() orig_t1 = nb.load(self.inputs.t1_file) orig_affine = orig_t1.get_affine() gm_uint8 = switch_datatype(self.inputs.grey_matter_file) gm_path, _ = nifti_to_analyze(gm_uint8) iflogger.info("Writing to %s" % gm_path) fixed_roi_file, fixed_wm, fixed_csf, remap_dict = fix_roi_values( self.inputs.roi_file, self.inputs.grey_matter_binary_mask, self.inputs.white_matter_file, self.inputs.csf_file, self.inputs.use_fs_LUT) rois_path, _ = nifti_to_analyze(fixed_roi_file) iflogger.info("Writing to %s" % rois_path) iflogger.info("Writing to %s" % fixed_wm) iflogger.info("Writing to %s" % fixed_csf) wm_uint8 = switch_datatype(fixed_wm) wm_path, _ = nifti_to_analyze(wm_uint8) iflogger.info("Writing to %s" % wm_path) csf_uint8 = switch_datatype(fixed_csf) csf_path, _ = nifti_to_analyze(csf_uint8) iflogger.info("Writing to %s" % csf_path) if self.inputs.use_fs_LUT: fs_dir = os.environ['FREESURFER_HOME'] LUT = op.join(fs_dir, "FreeSurferColorLUT.txt") dat_path = write_config_dat(fixed_roi_file, LUT, remap_dict) else: dat_path = write_config_dat(fixed_roi_file) iflogger.info("Writing to %s" % dat_path) d = dict(list_path=list_path, gm_path=gm_path, wm_path=wm_path, csf_path=csf_path, rois_path=rois_path, dat_path=dat_path, X_PSF=self.inputs.x_dir_point_spread_function_FWHM, Y_PSF=self.inputs.y_dir_point_spread_function_FWHM, Z_PSF=self.inputs.z_dir_point_spread_function_FWHM) script = Template(""" filelist = '$list_path'; gm = '$gm_path'; wm = '$wm_path'; csf = '$csf_path'; rois = '$rois_path'; dat = '$dat_path'; x_fwhm = '$X_PSF'; y_fwhm = '$Y_PSF'; z_fwhm = '$Z_PSF'; runbatch_nogui(filelist, gm, wm, csf, rois, dat, x_fwhm, y_fwhm, z_fwhm) """).substitute(d) mlab = MatlabCommand(script=script, mfile=True, prescript=[''], postscript=['']) result = mlab.run() _, foldername, _ = split_filename(self.inputs.pet_file) occu_MG_img = glob.glob("pve_%s/r_volume_Occu_MG.img" % foldername)[0] analyze_to_nifti(occu_MG_img, affine=orig_affine) occu_meltzer_img = glob.glob("pve_%s/r_volume_Occu_Meltzer.img" % foldername)[0] analyze_to_nifti(occu_meltzer_img, affine=orig_affine) meltzer_img = glob.glob("pve_%s/r_volume_Meltzer.img" % foldername)[0] analyze_to_nifti(meltzer_img, affine=orig_affine) MG_rousset_img = glob.glob("pve_%s/r_volume_MGRousset.img" % foldername)[0] analyze_to_nifti(MG_rousset_img, affine=orig_affine) MGCS_img = glob.glob("pve_%s/r_volume_MGCS.img" % foldername)[0] analyze_to_nifti(MGCS_img, affine=orig_affine) virtual_PET_img = glob.glob("pve_%s/r_volume_Virtual_PET.img" % foldername)[0] analyze_to_nifti(virtual_PET_img, affine=orig_affine) centrum_semiovalue_WM_img = glob.glob("pve_%s/r_volume_CSWMROI.img" % foldername)[0] analyze_to_nifti(centrum_semiovalue_WM_img, affine=orig_affine) alfano_alfano_img = glob.glob("pve_%s/r_volume_AlfanoAlfano.img" % foldername)[0] analyze_to_nifti(alfano_alfano_img, affine=orig_affine) alfano_cs_img = glob.glob("pve_%s/r_volume_AlfanoCS.img" % foldername)[0] analyze_to_nifti(alfano_cs_img, affine=orig_affine) alfano_rousset_img = glob.glob("pve_%s/r_volume_AlfanoRousset.img" % foldername)[0] analyze_to_nifti(alfano_rousset_img, affine=orig_affine) mg_alfano_img = glob.glob("pve_%s/r_volume_MGAlfano.img" % foldername)[0] analyze_to_nifti(mg_alfano_img, affine=orig_affine) mask_img = glob.glob("pve_%s/r_volume_Mask.img" % foldername)[0] analyze_to_nifti(mask_img, affine=orig_affine) PSF_img = glob.glob("pve_%s/r_volume_PSF.img" % foldername)[0] analyze_to_nifti(PSF_img) try: rousset_mat_file = glob.glob("pve_%s/r_volume_Rousset.mat" % foldername)[0] except IndexError: # On Ubuntu using pve64, the matlab file is saved with a capital M rousset_mat_file = glob.glob("pve_%s/r_volume_Rousset.Mat" % foldername)[0] shutil.copyfile(rousset_mat_file, op.abspath("r_volume_Rousset.mat")) results_text_file = glob.glob("pve_%s/r_volume_pve.txt" % foldername)[0] shutil.copyfile(results_text_file, op.abspath("r_volume_pve.txt")) results_matlab_mat = op.abspath("%s_pve.mat" % foldername) results_numpy_npz = op.abspath("%s_pve.npz" % foldername) out_data = parse_pve_results(results_text_file) sio.savemat(results_matlab_mat, mdict=out_data) np.savez(results_numpy_npz, **out_data) return result.runtime
class SPMCommand(BaseInterface): """Extends `BaseInterface` class to implement SPM specific interfaces. WARNING: Pseudo prototype class, meant to be subclassed """ input_spec = SPMCommandInputSpec _jobtype = 'basetype' _jobname = 'basename' _matlab_cmd = None _paths = None _use_mcr = None def __init__(self, **inputs): super(SPMCommand, self).__init__(**inputs) self.inputs.on_trait_change(self._matlab_cmd_update, ['matlab_cmd', 'mfile', 'paths', 'use_mcr']) self._check_mlab_inputs() self._matlab_cmd_update() @classmethod def set_mlab_paths(cls, matlab_cmd=None, paths = None, use_mcr=None): cls._matlab_cmd = matlab_cmd cls._paths = paths cls._use_mcr = use_mcr def _matlab_cmd_update(self): # MatlabCommand has to be created here, # because matlab_cmb is not a proper input # and can be set only during init self.mlab = MatlabCommand(matlab_cmd=self.inputs.matlab_cmd, mfile=self.inputs.mfile, paths=self.inputs.paths, uses_mcr=self.inputs.use_mcr) self.mlab.inputs.script_file = 'pyscript_%s.m' % \ self.__class__.__name__.split('.')[-1].lower() @property def jobtype(self): return self._jobtype @property def jobname(self): return self._jobname def _check_mlab_inputs(self): if not isdefined(self.inputs.matlab_cmd) and self._matlab_cmd: self.inputs.matlab_cmd = self._matlab_cmd if not isdefined(self.inputs.paths) and self._paths: self.inputs.paths = self._paths if not isdefined(self.inputs.use_mcr) and self._use_mcr: self.inputs.use_mcr = self._use_mcr def _run_interface(self, runtime): """Executes the SPM function using MATLAB.""" self.mlab.inputs.script = self._make_matlab_command(deepcopy(self._parse_inputs())) results = self.mlab.run() runtime.returncode = results.runtime.returncode if self.mlab.inputs.uses_mcr: if 'Skipped' in results.runtime.stdout: self.raise_exception(runtime) runtime.stdout = results.runtime.stdout runtime.stderr = results.runtime.stderr runtime.merged = results.runtime.merged return runtime def _list_outputs(self): """Determine the expected outputs based on inputs.""" raise NotImplementedError def _format_arg(self, opt, spec, val): """Convert input to appropriate format for SPM.""" return val def _parse_inputs(self, skip=()): spmdict = {} metadata=dict(field=lambda t : t is not None) for name, spec in self.inputs.traits(**metadata).items(): if skip and name in skip: continue value = getattr(self.inputs, name) if not isdefined(value): continue field = spec.field if '.' in field: fields = field.split('.') dictref = spmdict for f in fields[:-1]: if f not in dictref.keys(): dictref[f] = {} dictref = dictref[f] dictref[fields[-1]] = self._format_arg(name, spec, value) else: spmdict[field] = self._format_arg(name, spec, value) return [spmdict] def _reformat_dict_for_savemat(self, contents): """Encloses a dict representation within hierarchical lists. In order to create an appropriate SPM job structure, a Python dict storing the job needs to be modified so that each dict embedded in dict needs to be enclosed as a list element. Examples -------- >>> a = SPMCommand()._reformat_dict_for_savemat(dict(a=1,b=dict(c=2,d=3))) >>> print a [{'a': 1, 'b': [{'c': 2, 'd': 3}]}] """ newdict = {} try: for key, value in contents.items(): if isinstance(value, dict): if value: newdict[key] = self._reformat_dict_for_savemat(value) # if value is None, skip else: newdict[key] = value return [newdict] except TypeError: print 'Requires dict input' def _generate_job(self, prefix='', contents=None): """Recursive function to generate spm job specification as a string Parameters ---------- prefix : string A string that needs to get contents : dict A non-tuple Python structure containing spm job information gets converted to an appropriate sequence of matlab commands. """ jobstring = '' if contents is None: return jobstring if isinstance(contents, list): for i,value in enumerate(contents): newprefix = "%s(%d)" % (prefix, i+1) jobstring += self._generate_job(newprefix, value) return jobstring if isinstance(contents, dict): for key,value in contents.items(): newprefix = "%s.%s" % (prefix, key) jobstring += self._generate_job(newprefix, value) return jobstring if isinstance(contents, np.ndarray): if contents.dtype == np.dtype(object): if prefix: jobstring += "%s = {...\n"%(prefix) else: jobstring += "{...\n" for i,val in enumerate(contents): if isinstance(val, np.ndarray): jobstring += self._generate_job(prefix=None, contents=val) elif isinstance(val,str): jobstring += '\'%s\';...\n'%(val) else: jobstring += '%s;...\n'%str(val) jobstring += '};\n' else: for i,val in enumerate(contents): for field in val.dtype.fields: if prefix: newprefix = "%s(%d).%s"%(prefix, i+1, field) else: newprefix = "(%d).%s"%(i+1, field) jobstring += self._generate_job(newprefix, val[field]) return jobstring if isinstance(contents, str): jobstring += "%s = '%s';\n" % (prefix,contents) return jobstring jobstring += "%s = %s;\n" % (prefix,str(contents)) return jobstring def _make_matlab_command(self, contents, postscript=None): """Generates a mfile to build job structure Parameters ---------- contents : list a list of dicts generated by _parse_inputs in each subclass cwd : string default os.getcwd() Returns ------- mscript : string contents of a script called by matlab """ cwd = os.getcwd() mscript = """ %% Generated by nipype.interfaces.spm if isempty(which('spm')), throw(MException('SPMCheck:NotFound','SPM not in matlab path')); end [name, ver] = spm('ver'); fprintf('SPM version: %s Release: %s\\n',name, ver); fprintf('SPM path: %s\\n',which('spm')); spm('Defaults','fMRI'); if strcmp(spm('ver'),'SPM8'), spm_jobman('initcfg');end\n """ if self.mlab.inputs.mfile: if self.jobname in ['st','smooth','preproc','preproc8','fmri_spec','fmri_est', 'factorial_design'] : # parentheses mscript += self._generate_job('jobs{1}.%s{1}.%s(1)' % (self.jobtype,self.jobname), contents[0]) else: #curly brackets mscript += self._generate_job('jobs{1}.%s{1}.%s{1}' % (self.jobtype,self.jobname), contents[0]) else: jobdef = {'jobs':[{self.jobtype:[{self.jobname:self.reformat_dict_for_savemat (contents[0])}]}]} savemat(os.path.join(cwd,'pyjobs_%s.mat'%self.jobname), jobdef) mscript += "load pyjobs_%s;\n\n" % self.jobname mscript += """ if strcmp(spm('ver'),'SPM8'), jobs=spm_jobman('spm5tospm8',{jobs}); end spm_jobman(\'run_nogui\',jobs);\n """ if postscript is not None: mscript += postscript return mscript
def _run_interface(self, runtime): path, name, ext = split_filename(self.inputs.time_course_image) data_dir = op.abspath('./matching') copy_to = op.join(data_dir, 'components') if not op.exists(copy_to): os.makedirs(copy_to) copy_to = op.join(copy_to, name) shutil.copyfile(self.inputs.time_course_image, copy_to + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', copy_to + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', copy_to + '.img') time_course_file = copy_to + '.img' path, name, ext = split_filename(self.inputs.ica_mask_image) shutil.copyfile(self.inputs.ica_mask_image, op.join(data_dir, name) + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', op.join(data_dir, name) + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', op.join(data_dir, name) + '.img') mask_file = op.abspath(self.inputs.ica_mask_image) repetition_time = self.inputs.repetition_time component_file = op.abspath(self.inputs.in_file) coma_rest_lib_path = op.abspath(self.inputs.coma_rest_lib_path) component_index = self.inputs.component_index if isdefined(self.inputs.out_stats_file): path, name, ext = split_filename(self.inputs.out_stats_file) if not ext == '.mat': ext = '.mat' out_stats_file = op.abspath(name + ext) else: if isdefined(self.inputs.subject_id): out_stats_file = op.abspath( self.inputs.subject_id + '_IC_' + str(self.inputs.component_index) + '.mat') else: out_stats_file = op.abspath( 'IC_' + str(self.inputs.component_index) + '.mat') d = dict( component_file=component_file, IC=component_index, time_course_file=time_course_file, mask_name=mask_file, Tr=repetition_time, coma_rest_lib_path=coma_rest_lib_path, out_stats_file=out_stats_file) script = Template(""" restlib_path = '$coma_rest_lib_path'; setup_restlib_paths(restlib_path); Tr = $Tr; out_stats_file = '$out_stats_file'; component_file = '$component_file'; maskName = '$mask_name'; maskData = load_nii(maskName); dataCompSpatial = load_nii(component_file) time_course_file = '$time_course_file' timeData = load_nii(time_course_file) IC = $IC [feature dataZ temporalData] = computeFingerprintSpaceTime(dataCompSpatial.img,timeData.img(:,IC),maskData.img,Tr); save '$out_stats_file' """).substitute(d) result = MatlabCommand(script=script, mfile=True, prescript=[''], postscript=['']) r = result.run() print 'Saving stats file as {s}'.format(s=out_stats_file) return runtime
""" Created on Tue Sep 24 23:49:09 2019 Reference: https://github.com/poldracklab/ds003-post-fMRIPrep-analysis/blob/master/workflows.py @author: rj299 """ import nipype.interfaces.io as nio # Data i/o from nipype.interfaces import spm from nipype import Node, Workflow, MapNode import nipype.interfaces.utility as util # utility from nipype import SelectFiles import os from nipype.interfaces.matlab import MatlabCommand MatlabCommand.set_default_paths('/home/rj299/project/MATLAB/toolbox/spm12/' ) # set default SPM12 path in my computer. #%% Gourp analysis - based on SPM - should consider the fsl Randomize option (other script) # OneSampleTTestDesign - creates one sample T-Test Design onesamplettestdes = Node(spm.OneSampleTTestDesign(), name="onesampttestdes") # EstimateModel - estimates the model level2estimate = Node(spm.EstimateModel(estimation_method={'Classical': 1}), name="level2estimate") # EstimateContrast - estimates group contrast level2conestimate = Node(spm.EstimateContrast(group_contrast=True), name="level2conestimate") cont1 = ['Group', 'T', ['mean'], [1]] level2conestimate.inputs.contrasts = [cont1]
def _run_interface(self, runtime): in_files = self.inputs.in_files data_dir = op.join(os.getcwd(),'origdata') if not op.exists(data_dir): os.makedirs(data_dir) all_names = [] print 'Multiple ({n}) input images detected! Copying to {d}...'.format(n=len(self.inputs.in_files), d=data_dir) for in_file in self.inputs.in_files: path, name, ext = split_filename(in_file) shutil.copyfile(in_file, op.join(data_dir, name) + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', op.join(data_dir, name) + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', op.join(data_dir, name) + '.img') all_names.append(name) print 'Copied!' input_files_as_str = op.join(data_dir, os.path.commonprefix(all_names) + '*' + ext) number_of_components = self.inputs.desired_number_of_components output_dir = os.getcwd() prefix = self.inputs.prefix d = dict(output_dir=output_dir, prefix=prefix, number_of_components=number_of_components, in_files=input_files_as_str) variables = Template(""" %% After entering the parameters, use icatb_batch_file_run(inputFile); modalityType = 'fMRI'; which_analysis = 1; perfType = 1; keyword_designMatrix = 'no'; dataSelectionMethod = 4; input_data_file_patterns = {'$in_files'}; dummy_scans = 0; outputDir = '$output_dir'; prefix = '$prefix'; maskFile = []; group_pca_type = 'subject specific'; backReconType = 'gica'; %% Data Pre-processing options % 1 - Remove mean per time point % 2 - Remove mean per voxel % 3 - Intensity normalization % 4 - Variance normalization preproc_type = 3; pcaType = 1; pca_opts.stack_data = 'yes'; pca_opts.precision = 'single'; pca_opts.tolerance = 1e-4; pca_opts.max_iter = 1000; numReductionSteps = 2; doEstimation = 0; estimation_opts.PC1 = 'mean'; estimation_opts.PC2 = 'mean'; estimation_opts.PC3 = 'mean'; numOfPC1 = $number_of_components; numOfPC2 = $number_of_components; numOfPC3 = 0; %% Scale the Results. Options are 0, 1, 2, 3 and 4 % 0 - Don't scale % 1 - Scale to Percent signal change % 2 - Scale to Z scores % 3 - Normalize spatial maps using the maximum intensity value and multiply timecourses using the maximum intensity value % 4 - Scale timecourses using the maximum intensity value and spatial maps using the standard deviation of timecourses scaleType = 0; algoType = 1; refFunNames = {'Sn(1) right*bf(1)', 'Sn(1) left*bf(1)'}; refFiles = {which('ref_default_mode.nii'), which('ref_left_visuomotor.nii'), which('ref_right_visuomotor.nii')}; %% ICA Options - Name by value pairs in a cell array. Options will vary depending on the algorithm. See icatb_icaOptions for more details. Some options are shown below. %% Infomax - {'posact', 'off', 'sphering', 'on', 'bias', 'on', 'extended', 0} %% FastICA - {'approach', 'symm', 'g', 'tanh', 'stabilization', 'on'} icaOptions = {'posact', 'off', 'sphering', 'on', 'bias', 'on', 'extended', 0}; """).substitute(d) file = open('input_batch.m', 'w') file.writelines(variables) file.close() script = """param_file = icatb_read_batch_file('input_batch.m'); load(param_file); global FUNCTIONAL_DATA_FILTER; global ZIP_IMAGE_FILES; FUNCTIONAL_DATA_FILTER = '*.nii'; ZIP_IMAGE_FILES = 'No'; icatb_runAnalysis(sesInfo, 1);""" result = MatlabCommand(script=script, mfile=True, prescript=[''], postscript=['']) r = result.run() return runtime
def _run_interface(self, runtime): path, name, ext = split_filename(self.inputs.time_course_image) data_dir = op.abspath('./matching') copy_to = op.join(data_dir, 'components') if not op.exists(copy_to): os.makedirs(copy_to) copy_to = op.join(copy_to, name) shutil.copyfile(self.inputs.time_course_image, copy_to + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', copy_to + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', copy_to + '.img') data_dir = op.abspath('./matching/components') in_files = self.inputs.in_files if len(self.inputs.in_files) > 1: print 'Multiple ({n}) input images detected! Copying to {d}...'.format(n=len(self.inputs.in_files), d=data_dir) for in_file in self.inputs.in_files: path, name, ext = split_filename(in_file) shutil.copyfile(in_file, op.join(data_dir, name) + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', op.join(data_dir, name) + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', op.join(data_dir, name) + '.img') print 'Copied!' elif isdefined(self.inputs.in_file4d): print 'Single four-dimensional image selected. Splitting and copying to {d}'.format(d=data_dir) in_files = nb.four_to_three(self.inputs.in_file4d) for in_file in in_files: path, name, ext = split_filename(in_file) shutil.copyfile(in_file, op.join(data_dir, name) + ext) print 'Copied!' else: raise Exception('Single functional image provided. Ending...') in_files = self.inputs.in_files nComponents = len(in_files) repetition_time = self.inputs.repetition_time coma_rest_lib_path = op.abspath(self.inputs.coma_rest_lib_path) data_dir = op.abspath('./matching') if not op.exists(data_dir): os.makedirs(data_dir) path, name, ext = split_filename(self.inputs.ica_mask_image) copy_to = op.join(data_dir, 'components') if not op.exists(copy_to): os.makedirs(copy_to) copy_to = op.join(copy_to, name) shutil.copyfile(self.inputs.ica_mask_image, copy_to + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', copy_to + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', copy_to + '.img') mask_file = op.abspath(self.inputs.ica_mask_image) out_stats_file = op.abspath(self.inputs.out_stats_file) d = dict( out_stats_file=out_stats_file, data_dir=data_dir, mask_name=mask_file, timecourse=op.abspath(self.inputs.time_course_image), subj_id=self.inputs.subject_id, nComponents=nComponents, Tr=repetition_time, coma_rest_lib_path=coma_rest_lib_path) script = Template(""" restlib_path = '$coma_rest_lib_path'; setup_restlib_paths(restlib_path) namesTemplate = {'rAuditory_corr','rCerebellum_corr','rDMN_corr','rECN_L_corr','rECN_R_corr','rSalience_corr','rSensorimotor_corr','rVisual_lateral_corr','rVisual_medial_corr','rVisual_occipital_corr'}; indexNeuronal = 1:$nComponents; nCompo = $nComponents; out_stats_file = '$out_stats_file'; Tr = $Tr; data_dir = '$data_dir' mask_name = '$mask_name' subj_id = '$subj_id' time_course_name = '$timecourse' [dataAssig maxGoF] = selectionMatchClassification(data_dir, subj_id, mask_name, time_course_name, namesTemplate,indexNeuronal,nCompo,Tr,restlib_path) for i=1:size(dataAssig,1) str{i} = sprintf('Template %d: %s to component %d with GoF %f is neuronal %d prob=%f',dataAssig(i,1),namesTemplate{i},dataAssig(i,2),dataAssig(i,3),dataAssig(i,4),dataAssig(i,5)); disp(str{i}); end maxGoF templates = dataAssig(:,1) components = dataAssig(:,2) gofs = dataAssig(:,3) neuronal_bool = dataAssig(:,4) neuronal_prob = dataAssig(:,5) save '$out_stats_file' """).substitute(d) print 'Saving stats file as {s}'.format(s=out_stats_file) result = MatlabCommand(script=script, mfile=True, prescript=[''], postscript=['']) r = result.run() return runtime
def _run_interface(self, runtime): data_dir = op.abspath('./denoise/components') if not os.path.exists(data_dir): os.makedirs(data_dir) in_files = self.inputs.in_files if len(self.inputs.in_files) > 1: print 'Multiple ({n}) input images detected! Copying to {d}...'.format(n=len(self.inputs.in_files), d=data_dir) for in_file in self.inputs.in_files: path, name, ext = split_filename(in_file) shutil.copyfile(in_file, op.join(data_dir, name) + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', op.join(data_dir, name) + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', op.join(data_dir, name) + '.img') print 'Copied!' in_files = self.inputs.in_files elif isdefined(self.inputs.in_file4d): print 'Single four-dimensional image selected. Splitting and copying to {d}'.format(d=data_dir) in_files = nb.four_to_three(self.inputs.in_file4d) for in_file in in_files: path, name, ext = split_filename(in_file) shutil.copyfile(in_file, op.join(data_dir, name) + ext) print 'Copied!' else: print 'Single functional image provided. Ending...' in_files = self.inputs.in_files nComponents = len(in_files) path, name, ext = split_filename(self.inputs.time_course_image) shutil.copyfile(self.inputs.time_course_image, op.join(data_dir, name) + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', op.join(data_dir, name) + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', op.join(data_dir, name) + '.img') data_dir = op.abspath('./denoise') path, name, ext = split_filename(self.inputs.ica_mask_image) shutil.copyfile(self.inputs.ica_mask_image, op.join(data_dir, name) + ext) if ext == '.img': shutil.copyfile(op.join(path, name) + '.hdr', op.join(data_dir, name) + '.hdr') elif ext == '.hdr': shutil.copyfile(op.join(path, name) + '.img', op.join(data_dir, name) + '.img') mask_file = op.join(data_dir, name) repetition_time = self.inputs.repetition_time neuronal_image = op.abspath(self.inputs.out_neuronal_image) non_neuronal_image = op.abspath(self.inputs.out_non_neuronal_image) coma_rest_lib_path = op.abspath(self.inputs.coma_rest_lib_path) d = dict( data_dir=data_dir, mask_name=mask_file, nComponents=nComponents, Tr=repetition_time, nameNeuronal=neuronal_image, nameNonNeuronal=non_neuronal_image, coma_rest_lib_path=coma_rest_lib_path) script = Template(""" restlib_path = '$coma_rest_lib_path'; setup_restlib_paths(restlib_path) dataDir = '$data_dir'; maskName = '$mask_name'; nCompo = $nComponents; Tr = $Tr; nameNeuronalData = '$nameNeuronal'; nameNonNeuronalData = '$nameNonNeuronal'; denoiseImage(dataDir,maskName,nCompo,Tr,nameNeuronalData,nameNonNeuronalData, restlib_path); """).substitute(d) result = MatlabCommand(script=script, mfile=True, prescript=[''], postscript=['']) r = result.run() print 'Neuronal component image saved as {n}'.format(n=neuronal_image) print 'Non-neuronal component image saved as {n}'.format(n=non_neuronal_image) return runtime