Beispiel #1
0
    def _run_interface(self, runtime):
        from pyacwereg import viz

        if self.inputs.view == 'all':
            view = ['axial', 'coronal', 'sagittal']
        else:
            view = np.atleast_1d(self.inputs.view).tolist()

        label = None
        if isdefined(self.inputs.label):
            label = self.inputs.label

        slices = None
        if isdefined(self.inputs.slices):
            slices = self.inputs.slices

        if self.inputs.split:
            self._out_file = []
            for i, v in enumerate(view):
                o = op.abspath('%s%04d.pdf' % (self.inputs.out_file, i))
                viz.slices_gridplot(self.inputs.in_files, view=v,
                                    out_file=o, label=label[i],
                                    slices=slices)
                self._out_file.append(o)
        else:
            self._out_file = [op.abspath(self.inputs.out_file + '.pdf')]
            viz.slices_gridplot(self.inputs.in_files, view=view,
                                out_file=self._out_file, label=label,
                                slices=slices)
        return runtime
Beispiel #2
0
 def _check_mlab_inputs(self):
     if not isdefined(self.inputs.matlab_cmd) and self._matlab_cmd:
         self.inputs.matlab_cmd = self._matlab_cmd
     if not isdefined(self.inputs.paths) and self._paths:
         self.inputs.paths = self._paths
     if not isdefined(self.inputs.use_mcr) and self._use_mcr:
         self.inputs.use_mcr = self._use_mcr
Beispiel #3
0
 def _list_outputs(self):
     outputs = self.output_spec().get()
     outputs['out_file'] = self.inputs.out_file
     if not isdefined(outputs['out_file']) and isdefined(self.inputs.in_file):
         outputs['out_file']=self._gen_fname(self.inputs.in_file,
                                             suffix='_sigloss')
     return outputs
Beispiel #4
0
    def _list_outputs(self):
        """Execute this module.
"""
        outdir = self.inputs.base_directory
        if not isdefined(outdir):
            outdir = os.path.abspath('.')
        
        if isdefined(self.inputs.container):
            print "container defined", self.inputs.container
            outdir = os.path.join(outdir, self.inputs.container)
            print outdir

        cwd = os.getcwd()
        dst = self._get_dst(os.path.join(cwd,self.inputs.json_name+'.json'))
        print "dst = ", dst
        outdir = os.path.join(outdir,dst)
        print "new outdir = ", outdir
        outdir = self._substitute(outdir)
        print "substituted outdir = ", outdir

        if not os.path.exists(outdir):
            try:
                os.makedirs(outdir)
            except OSError, inst:
                if 'File exists' in inst:
                    pass
                else:
                    raise(inst)
Beispiel #5
0
 def _run_interface(self, runtime):
     mask_erode_mm = self.inputs.mask_erode_mm
     if not isdefined(mask_erode_mm):
         mask_erode_mm = None
     erode_mm = self.inputs.erode_mm
     if not isdefined(erode_mm):
         erode_mm = None
     mask_erode_prop = self.inputs.mask_erode_prop
     if not isdefined(mask_erode_prop):
         mask_erode_prop = None
     erode_prop = self.inputs.erode_prop
     if not isdefined(erode_prop):
         erode_prop = None
     roi_file, eroded_mask = _tpm2roi(
         self.inputs.in_tpm,
         self.inputs.in_mask,
         mask_erode_mm,
         erode_mm,
         mask_erode_prop,
         erode_prop,
         self.inputs.prob_thresh,
         newpath=runtime.cwd,
     )
     self._results['roi_file'] = roi_file
     self._results['eroded_mask'] = eroded_mask
     return runtime
Beispiel #6
0
    def _list_outputs(self):
        outputs = self.output_spec().get()

        #if isdefined(self.inputs.output_csv_file):
            
            #write to a csv file and assign a value to self.coherence_file (a
            #file name + path)

        #Always defined (the arrays):
        outputs['coherence_array']=self.coherence
        outputs['timedelay_array']=self.delay
        
        #Conditional
        if isdefined(self.inputs.output_csv_file) and hasattr(self,'coherence'):
            # we need to make a function that we call here that writes the
            # coherence values to this file "coherence_csv" and makes the
            # time_delay csv file??
            self._make_output_files()
            outputs['coherence_csv']=fname_presuffix(self.inputs.output_csv_file,suffix='_coherence')
        
            outputs['timedelay_csv']=fname_presuffix(self.inputs.output_csv_file,suffix='_delay')
            
        if isdefined(self.inputs.output_figure_file) and hasattr(self,
                                                                 'coherence'):
            self._make_output_figures()
            outputs['coherence_fig'] = fname_presuffix(self.inputs.output_figure_file,suffix='_coherence')
            outputs['timedelay_fig'] = fname_presuffix(self.inputs.output_figure_file,suffix='_delay')
      
        return outputs
Beispiel #7
0
    def _list_outputs(self):
        outputs = self.output_spec().get()
        outputs['out_file'] = os.path.join(os.getcwd(),
                                    self.inputs.out_base + '.nii.gz')
        if not (isdefined(self.inputs.no_fmapreg) and self.inputs.no_fmapreg) and isdefined(self.inputs.fmap):
            outputs['out_1vol'] = os.path.join(os.getcwd(),
                                    self.inputs.out_base + '_1vol.nii.gz')
            outputs['fmap2str_mat'] = os.path.join(os.getcwd(),
                                        self.inputs.out_base + '_fieldmap2str.mat')
            outputs['fmap2epi_mat'] = os.path.join(os.getcwd(),
                                        self.inputs.out_base + '_fieldmaprads2epi.mat')
            outputs['fmap_epi'] = os.path.join(os.getcwd(),
                                        self.inputs.out_base + '_fieldmaprads2epi.nii.gz')
            outputs['fmap_str'] = os.path.join(os.getcwd(),
                                        self.inputs.out_base + '_fieldmaprads2str.ni    `i.gz')
            outputs['fmapmag_str'] = os.path.join(os.getcwd(),
                                        self.inputs.out_base + '_fieldmap2str.nii.gz')
            outputs['shiftmap'] = os.path.join(os.getcwd(),
                                    self.inputs.out_base + '_fieldmaprads2epi_shift.nii.gz')
            outputs['fullwarp'] = os.path.join(os.getcwd(),
                                    self.inputs.out_base + '_warp.nii.gz')
            outputs['epi2str_inv'] = os.path.join(os.getcwd(),
                                    self.inputs.out_base + '_inv.mat')

        outputs['epi2str_mat'] = os.path.join(os.getcwd(),
                                    self.inputs.out_base + '.mat')
        outputs['wmedge'] = os.path.join(os.getcwd(),
                                    self.inputs.out_base + '_fast_wmedge.nii.gz')
        outputs['wmseg'] = os.path.join(os.getcwd(),
                                    self.inputs.out_base + '_fast_wmseg.nii.gz')

        return outputs
Beispiel #8
0
    def _run_interface(self, runtime):
        in_file_ref = Path(self.inputs.in_file)

        if isdefined(self.inputs.out_file):
            in_file_ref = Path(self.inputs.out_file)

        fname = in_file_ref.name.rstrip(
            ''.join(in_file_ref.suffixes))
        out_file = (Path(runtime.cwd) / ('plot_%s_contours.svg' % fname)).resolve()
        self._results['out_file'] = str(out_file)

        vmax = None if not isdefined(self.inputs.vmax) else self.inputs.vmax
        vmin = None if not isdefined(self.inputs.vmin) else self.inputs.vmin

        plot_segmentation(
            self.inputs.in_file,
            self.inputs.in_contours,
            out_file=str(out_file),
            cut_coords=self.inputs.cut_coords,
            display_mode=self.inputs.display_mode,
            levels=self.inputs.levels,
            colors=self.inputs.colors,
            saturate=self.inputs.saturate,
            vmin=vmin, vmax=vmax)

        return runtime
Beispiel #9
0
def modify_paths(object, relative=True, basedir=None):
    """Modify filenames in a data structure to either full paths or relative paths
    """
    if not basedir:
        basedir = os.getcwd()
    if isinstance(object, dict):
        out = {}
        for key, val in sorted(object.items()):
            if isdefined(val):
                out[key] = modify_paths(val, relative=relative,
                                        basedir=basedir)
    elif isinstance(object, (list,tuple)):
        out = []
        for val in object:
            if isdefined(val):
                out.append(modify_paths(val, relative=relative,
                                        basedir=basedir))
        if isinstance(object, tuple):
            out = tuple(out)
    else:
        if isdefined(object):
            if isinstance(object, str) and os.path.isfile(object):
                if relative:
                    if config.getboolean('execution','use_relative_paths'):
                        out = relpath(object,start=basedir)
                    else:
                        out = object
                else:
                    out = os.path.abspath(os.path.join(basedir,object))
                if not os.path.exists(out):
                    raise FileNotFoundError('File %s not found'%out)
            else:
                out = object
    return out
Beispiel #10
0
    def _list_outputs(self):
        outputs = self._outputs().get()

        jobtype = self.inputs.jobtype
        if jobtype.startswith('est'):
            outputs['normalization_parameters'] = []
            for imgf in filename_to_list(self.inputs.source):
                outputs['normalization_parameters'].append(fname_presuffix(imgf, suffix='_sn.mat', use_ext=False))
            outputs['normalization_parameters'] = list_to_filename(outputs['normalization_parameters'])

        if self.inputs.jobtype == "estimate":
            if isdefined(self.inputs.apply_to_files):
                outputs['normalized_files'] = self.inputs.apply_to_files
            outputs['normalized_source'] = self.inputs.source
        elif 'write' in self.inputs.jobtype:
            outputs['normalized_files'] = []
            if isdefined(self.inputs.apply_to_files):
                for imgf in filename_to_list(self.inputs.apply_to_files):
                    outputs['normalized_files'].append(fname_presuffix(imgf, prefix='w'))

            if isdefined(self.inputs.source):
                outputs['normalized_source'] = []
                for imgf in filename_to_list(self.inputs.source):
                    outputs['normalized_source'].append(fname_presuffix(imgf, prefix='w'))

        return outputs
 def _format_arg(self, opt, spec, val):
     if opt == 'moving_image':
         retval = []
         for ii in range(len(self.inputs.moving_image)):
             retval.append('--metric "%s[%s,%s,%d,%d]"' % (self.inputs.metric, self.inputs.fixed_image[ii], self.inputs.moving_image[ii],self.inputs.metric_weight,self.inputs.radius))
         return " ".join(retval)
     elif opt == 'moving_image_mask':
         return '--masks "[%s,%s]"' % (self.inputs.fixed_image_mask, self.inputs.moving_image_mask)
     elif opt == 'initial_moving_transform':
         if isdefined(self.inputs.invert_initial_moving_transform) and self.inputs.invert_initial_moving_transform:
             return '--initial-moving-transform "[%s,1]"' % self.inputs.initial_moving_transform
         else:
             return '--initial-moving-transform "[%s,0]"' % self.inputs.initial_moving_transform
     elif opt == "number_of_iterations":
         convergence_iter = "x".join([str(i) for i in self.inputs.number_of_iterations])
         return '--convergence "[%s,%g,%d]"' % (convergence_iter,
                                             self.inputs.convergence_threshold,
                                             self.inputs.convergence_window_size)
     elif opt == 'output_transform_prefix':
         if isdefined(self.inputs.output_inverse_warped_image) and self.inputs.output_inverse_warped_image:
             return '--output "[%s,%s,%s]"' % (self.inputs.output_transform_prefix, self.inputs.output_warped_image, self.inputs.output_inverse_warped_image )
         elif isdefined(self.inputs.output_warped_image) and self.inputs.output_warped_image:
             return '--output "[%s,%s]"'     % (self.inputs.output_transform_prefix, self.inputs.output_warped_image )
         else:
             return '--output %s' % self.inputs.output_transform_prefix
     return super(antsRegistration, self)._format_arg(opt, spec, val)
Beispiel #12
0
 def _list_outputs(self):
     outputs = self._outputs().get()
     outputs["out_file"] = self.inputs.out_file
     if not isdefined(outputs["out_file"]):
         source = self.inputs.source_file
         # Some recon-all files don't have a proper extension (e.g. "lh.thickness")
         # so we have to account for that here
         bad_extensions = [".%s" % e for e in ["area", "mid", "pial", "avg_curv", "curv", "inflated",
                                               "jacobian_white", "orig", "nofix", "smoothwm", "crv",
                                               "sphere", "sulc", "thickness", "volume", "white"]]
         use_ext = True
         if split_filename(source)[2] in bad_extensions:
             source = source + ".stripme"
             use_ext = False
         ext = ""
         if isdefined(self.inputs.target_type):
             ext = "." + filemap[self.inputs.target_type]
             use_ext = False
         outputs["out_file"] = fname_presuffix(source,
                                               suffix=".%s%s" % (self.inputs.target_subject, ext),
                                               newpath=os.getcwd(),
                                               use_ext=use_ext)
     else:
         outputs["out_file"] = os.path.abspath(self.inputs.out_file)
     return outputs
Beispiel #13
0
 def _format_arg(self, name, trait_spec, value):
     if name == 'syn_file':
         if (isdefined(self.inputs.bvec_file) == False) or (isdefined(self.inputs.b0_file) == False):
             return ""
         else:
             return trait_spec.argstr % value
     return super(DwiTool, self)._format_arg(name, trait_spec, value)
Beispiel #14
0
	def _gen_outfilename(self):
		out_file = self.inputs.out_file
		
		if not isdefined(out_file) and isdefined(self.inputs.in_file):
			out_file = self._gen_fname(self.inputs.in_file, suffix="_surf")
			#out_file = self._gen_fname(self.inputs.in_file, suffix="_surf")
		return out_file
Beispiel #15
0
    def _parse_inputs( self, skip=None ):
        if skip is None:
            skip = []

        if not isdefined(self.inputs.out_base ):
            self.inputs.out_base = './nipypetu'

        self.inputs.out_base = os.path.abspath(self.inputs.out_base)

        if isdefined( self.inputs.encoding_file ):
            skip.append( 'encoding_direction' )
            skip.append( 'readout_times' )
        else:
            encdir = 'y'
            enctimes = None

            if isdefined( self.inputs.encoding_direction ):
                encdir = self.inputs.encoding_direction

            if isdefined( self.inputs.readout_times ):
                enctimes = self.inputs.readout_times

            self.inputs.encoding_file = self._generate_encfile( encdir, enctimes )

        return super(TOPUP, self)._parse_inputs(skip=skip)
Beispiel #16
0
    def _list_outputs(self):
        outputs = self._outputs().get()

        jobtype = self.inputs.jobtype
        if jobtype.startswith("est"):
            outputs["normalization_parameters"] = []
            for imgf in filename_to_list(self.inputs.source):
                outputs["normalization_parameters"].append(fname_presuffix(imgf, suffix="_sn.mat", use_ext=False))
            outputs["normalization_parameters"] = list_to_filename(outputs["normalization_parameters"])

        if self.inputs.jobtype == "estimate":
            if isdefined(self.inputs.apply_to_files):
                outputs["normalized_files"] = self.inputs.apply_to_files
            outputs["normalized_source"] = self.inputs.source
        elif "write" in self.inputs.jobtype:
            outputs["normalized_files"] = []
            if isdefined(self.inputs.apply_to_files):
                for imgf in filename_to_list(self.inputs.apply_to_files):
                    outputs["normalized_files"].append(fname_presuffix(imgf, prefix="w"))

            if isdefined(self.inputs.source):
                outputs["normalized_source"] = []
                for imgf in filename_to_list(self.inputs.source):
                    outputs["normalized_source"].append(fname_presuffix(imgf, prefix="w"))

        return outputs
Beispiel #17
0
    def _list_outputs(self):

        outputs = self.output_spec().get()
        _in = self.inputs

        if isdefined(_in.out_reg_file):
            outputs["out_reg_file"] = op.abspath(_in.out_reg_file)
        elif _in.source_file:
            suffix = "_bbreg_%s.dat" % _in.subject_id
            outputs["out_reg_file"] = fname_presuffix(_in.source_file, suffix=suffix, use_ext=False)

        if isdefined(_in.registered_file):
            if isinstance(_in.registered_file, bool):
                outputs["registered_file"] = fname_presuffix(_in.source_file, suffix="_bbreg")
            else:
                outputs["registered_file"] = op.abspath(_in.registered_file)

        if isdefined(_in.out_fsl_file):
            if isinstance(_in.out_fsl_file, bool):
                suffix = "_bbreg_%s.mat" % _in.subject_id
                out_fsl_file = fname_presuffix(_in.source_file, suffix=suffix, use_ext=False)
                outputs["out_fsl_file"] = out_fsl_file
            else:
                outputs["out_fsl_file"] = op.abspath(_in.out_fsl_file)

        outputs["min_cost_file"] = outputs["out_reg_file"] + ".mincost"
        return outputs
Beispiel #18
0
    def _run_interface(self, runtime):
        out_file = op.abspath(self._gen_outfilename())

        settings = dict(mask=None,
                        rician=(self.inputs.noise_model == 'rician'))

        if isdefined(self.inputs.in_mask):
            settings['mask'] = nb.load(self.inputs.in_mask).get_data()

        if isdefined(self.inputs.patch_radius):
            settings['patch_radius'] = self.inputs.patch_radius

        if isdefined(self.inputs.block_radius):
            settings['block_radius'] = self.inputs.block_radius

        noise_mask = None
        if isdefined(self.inputs.in_mask):
            noise_mask = nb.load(self.inputs.noise_mask).get_data()

        _, s = nlmeans_proxy(self.inputs.in_file,
                             settings,
                             noise_mask=noise_mask,
                             out_file=out_file)
        iflogger.info(('Denoised image saved as {i}, estimated '
                      'sigma={s}').format(i=out_file, s=s))
        return runtime
Beispiel #19
0
    def _list_outputs(self):
        outputs = self.output_spec().get()
        if not isdefined(self.inputs.out_dir):
            out_dir = self._gen_filename("out_dir")
        else:
            out_dir = self.inputs.out_dir

        outputs['log'] = os.path.abspath(os.path.join(out_dir, 'probtrackx.log'))
        #utputs['way_total'] = os.path.abspath(os.path.join(out_dir, 'waytotal'))
        if isdefined(self.inputs.opd == True):
            if isinstance(self.inputs.seed, list) and isinstance(self.inputs.seed[0], list):
                outputs['fdt_paths'] = []
                for seed in self.inputs.seed:
                    outputs['fdt_paths'].append(
                            os.path.abspath(
                                self._gen_fname("fdt_paths_%s" % ("_".join([str(s) for s in seed])),
                                                cwd=out_dir, suffix='')))
            else:
                outputs['fdt_paths'] = os.path.abspath(self._gen_fname("fdt_paths",
                                               cwd=out_dir, suffix=''))

        # handle seeds-to-target output files
        if isdefined(self.inputs.target_masks):
            outputs['targets'] = []
            for target in self.inputs.target_masks:
                outputs['targets'].append(os.path.abspath(
                                                self._gen_fname('seeds_to_' + os.path.split(target)[1],
                                                cwd=out_dir,
                                                suffix='')))
        if isdefined(self.inputs.verbose) and self.inputs.verbose == 2:
            outputs['particle_files'] = [os.path.abspath(
                                            os.path.join(out_dir, 'particle%d' % i))
                                            for i in range(self.inputs.n_samples)]
        return outputs
Beispiel #20
0
    def _post_run_hook(self, runtime):
        outputs = self.aggregate_outputs(runtime=runtime)
        mri_dir = None
        if isdefined(self.inputs.subject_id):
            mri_dir = os.path.join(self.inputs.subjects_dir,
                                   self.inputs.subject_id, 'mri')

        if isdefined(self.inputs.reference_file):
            target_file = self.inputs.reference_file
        else:
            target_file = os.path.join(mri_dir, 'brainmask.mgz')

        # Apply transform for simplicity
        mri_vol2vol = fs.ApplyVolTransform(
            source_file=self.inputs.source_file,
            target_file=target_file,
            lta_file=outputs.out_lta_file,
            interp='nearest')
        res = mri_vol2vol.run()

        self._fixed_image = target_file
        self._moving_image = res.outputs.transformed_file
        if mri_dir is not None:
            self._contour = os.path.join(mri_dir, 'ribbon.mgz')
        NIWORKFLOWS_LOG.info(
            'Report - setting fixed (%s) and moving (%s) images',
            self._fixed_image, self._moving_image)

        return super(MRICoregRPT, self)._post_run_hook(runtime)
    def _run_interface(self, runtime):
        mask = None
        if isdefined(self.inputs.in_mask):
            mask = self.inputs.in_mask

        title = self.inputs.title
        if isdefined(self.inputs.subject):
            title += ', subject %s' % self.inputs.subject

        if isdefined(self.inputs.metadata):
            title += ' (' + '_'.join(self.inputs.metadata) + ')'

        if isdefined(self.inputs.figsize):
            fig = plot_mosaic(
                self.inputs.in_file,
                title=title,
                overlay_mask=mask,
                figsize=self.inputs.figsize)
        else:
            fig = plot_mosaic(
                self.inputs.in_file,
                title=title,
                overlay_mask=mask)

        fig.savefig(self.inputs.out_file, dpi=self.inputs.dpi)

        return runtime
Beispiel #22
0
 def _list_outputs(self):
     outputs = self._outputs().get()
     outputs['outlier_files'] = []
     outputs['intensity_files'] = []
     outputs['statistic_files'] = []
     if isdefined(self.inputs.use_norm) and self.inputs.use_norm:
         outputs['norm_files'] = []
     if isdefined(self.inputs.save_plot) and self.inputs.save_plot:
         outputs['plot_files'] = []
     for i, f in enumerate(filename_to_list(self.inputs.realigned_files)):
         outlierfile, intensityfile, statsfile, normfile, plotfile = self._get_output_filenames(f, os.getcwd())
         outputs['outlier_files'].insert(i, outlierfile)
         outputs['intensity_files'].insert(i, intensityfile)
         outputs['statistic_files'].insert(i, statsfile)
         if isdefined(self.inputs.use_norm) and self.inputs.use_norm:
             outputs['norm_files'].insert(i, normfile)
         if isdefined(self.inputs.save_plot) and self.inputs.save_plot:
             outputs['plot_files'].insert(i, plotfile)
     '''
     outputs['outlier_files'] = list_to_filename(outputs['outlier_files'])
     outputs['intensity_files'] = list_to_filename(outputs['intensity_files'])
     outputs['statistic_files'] = list_to_filename(outputs['statistic_files'])
     if isdefined(self.inputs.use_norm) and self.inputs.use_norm:
         outputs['norm_files'] = list_to_filename(outputs['norm_files'])
     if isdefined(self.inputs.save_plot) and self.inputs.save_plot:
         outputs['plot_files'] = list_to_filename(outputs['plot_files'])
     '''
     return outputs
Beispiel #23
0
    def _get_dst(self, src):
        path, fname = os.path.split(src)
        if self.inputs.parameterization:
            dst = path
            if isdefined(self.inputs.strip_dir):
                dst = dst.replace(self.inputs.strip_dir, '')

            if not isdefined(self.inputs.container):
                folders = [folder for folder in dst.split(os.path.sep) if
                            folder.startswith('_')]
            else:
                folders = [folder for folder in dst.split(os.path.sep) if
                           (folder.startswith('_') and not self.inputs.container in folder)]

            dst = os.path.sep.join(folders).replace('_','')


        else:
            if fname:
                dst = fname
            else:
                dst = path.split(os.path.sep)[-1]
        try:
            if dst[0] == os.path.sep(dst):
                dst = dst[1:]
        except:
            pass
        return dst
Beispiel #24
0
 def _list_outputs(self):
     outputs = self.output_spec().get()
     outputs["out_reg_file"] = self.inputs.out_reg_file
     if not isdefined(self.inputs.out_reg_file) and self.inputs.source_file:
         outputs["out_reg_file"] = fname_presuffix(self.inputs.source_file, suffix="_robustreg.lta", use_ext=False)
     prefices = dict(src=self.inputs.source_file, trg=self.inputs.target_file)
     suffices = dict(
         registered_file=("src", "_robustreg", True),
         weights_file=("src", "_robustweights", True),
         half_source=("src", "_halfway", True),
         half_targ=("trg", "_halfway", True),
         half_weights=("src", "_halfweights", True),
         half_source_xfm=("src", "_robustxfm.lta", False),
         half_targ_xfm=("trg", "_robustxfm.lta", False),
     )
     for name, sufftup in suffices.items():
         value = getattr(self.inputs, name)
         if isdefined(value):
             if isinstance(value, bool):
                 outputs[name] = fname_presuffix(
                     prefices[sufftup[0]], suffix=sufftup[1], newpath=os.getcwd(), use_ext=sufftup[2]
                 )
             else:
                 outputs[name] = value
     return outputs
Beispiel #25
0
 def _generate_design(self, infolist=None):
     """Generate design specification for a typical fmri paradigm
     """
     realignment_parameters = []
     if isdefined(self.inputs.realignment_parameters):
         for parfile in self.inputs.realignment_parameters:
             realignment_parameters.append(np.loadtxt(parfile))
     outliers = []
     if isdefined(self.inputs.outlier_files):
         for filename in self.inputs.outlier_files:
             try:
                 outindices = np.loadtxt(filename, dtype=int)
             except IOError:
                 outliers.append([])
             else:
                 if outindices.size == 1:
                     outliers.append([outindices.tolist()])
                 else:
                     outliers.append(outindices.tolist())
     if infolist is None:
         if isdefined(self.inputs.subject_info):
             infolist = self.inputs.subject_info
         else:
             infolist = gen_info(self.inputs.event_files)
     self._sessinfo = self._generate_standard_design(
         infolist,
         functional_runs=self.inputs.functional_runs,
         realignment_parameters=realignment_parameters,
         outliers=outliers,
     )
Beispiel #26
0
 def _run_interface(self, runtime):
     if not isdefined(self.inputs.screenshot_stem):
         stem = "%s_%s_%s" % (
                 self.inputs.subject_id, self.inputs.hemi, self.inputs.surface)
     else:
         stem = self.inputs.screenshot_stem
         stem_args = self.inputs.stem_template_args
         if isdefined(stem_args):
             args = tuple([getattr(self.inputs, arg) for arg in stem_args])
             stem = stem % args
     # Check if the DISPLAY variable is set -- should avoid crashes (might not?)
     if not "DISPLAY" in os.environ:
         raise RuntimeError("Graphics are not enabled -- cannot run tksurfer")
     runtime.environ["_SNAPSHOT_STEM"] = stem
     self._write_tcl_script()
     runtime = super(SurfaceSnapshots, self)._run_interface(runtime)
     # If a display window can't be opened, this will crash on
     # aggregate_outputs.  Let's try to parse stderr and raise a
     # better exception here if that happened.
     errors = ["surfer: failed, no suitable display found",
               "Fatal Error in tksurfer.bin: could not open display"]
     for err in errors:
         if err in runtime.stderr:
             self.raise_exception(runtime)
     # Tksurfer always (or at least always when you run a tcl script)
     # exits with a nonzero returncode.  We have to force it to 0 here.
     runtime.returncode = 0
     return runtime
 def _run_interface(self, runtime):
             
     print 'in plot_coclass'
     
     coclass_matrix_file = self.inputs.coclass_matrix_file
     labels_file = self.inputs.labels_file
     list_value_range = self.inputs.list_value_range
         
     
     print 'loading coclass'
     coclass_mat = np.load(coclass_matrix_file)
     
     
     if isdefined(labels_file):
         
         print 'loading labels'
         labels = [line.strip() for line in open(labels_file)]
         
     else :
         labels = []
         
     if not isdefined(list_value_range):
     
         list_value_range = [np.amin(coclass_mat),np.amax(coclass_mat)]
     
     print 'plotting heatmap'
     
     path,fname,ext = split_f(coclass_matrix_file)
     
     plot_coclass_matrix_file =  os.path.abspath('heatmap_' + fname + '.eps')
     
     plot_ranged_cormat(plot_coclass_matrix_file,coclass_mat,labels,fix_full_range = list_value_range)
     
     return runtime
Beispiel #28
0
    def _list_outputs(self):
        outputs = self._outputs().get()

        jobtype = self.inputs.jobtype
        if jobtype.startswith('est'):
            outputs['normalization_parameters'] = []
            for imgf in filename_to_list(self.inputs.source):
                outputs['normalization_parameters'].append(fname_presuffix(imgf, suffix='_sn.mat', use_ext=False))
            outputs['normalization_parameters'] = list_to_filename(outputs['normalization_parameters'])

        if self.inputs.jobtype == "estimate":
            if isdefined(self.inputs.apply_to_files):
                outputs['normalized_files'] = self.inputs.apply_to_files
            outputs['normalized_source'] = self.inputs.source
        elif 'write' in self.inputs.jobtype:
            outputs['normalized_files'] = []
            if isdefined(self.inputs.apply_to_files):
                filelist = filename_to_list(self.inputs.apply_to_files)
                for f in filelist:
                    if isinstance(f, list):
                        run = [fname_presuffix(in_f, prefix=self.inputs.out_prefix) for in_f in f]
                    else:
                        run = [fname_presuffix(f, prefix=self.inputs.out_prefix)]
                    outputs['normalized_files'].extend(run)
            if isdefined(self.inputs.source):
                outputs['normalized_source'] = fname_presuffix(self.inputs.source, prefix=self.inputs.out_prefix)

        return outputs
Beispiel #29
0
	def _run_interface(self, runtime):
		tracks, header = trk.read(self.inputs.in_file)
		if not isdefined(self.inputs.data_dims):
			data_dims = header['dim']
		else:
			data_dims = self.inputs.data_dims

		if not isdefined(self.inputs.voxel_dims):
			voxel_size = header['voxel_size']
		else:
			voxel_size = self.inputs.voxel_dims

		affine = header['vox_to_ras']

		streams = ((ii[0]) for ii in tracks)
		data = density_map(streams, data_dims, voxel_size)
		if data.max() < 2**15:
		   data = data.astype('int16')

		img = nb.Nifti1Image(data,affine)
		out_file = op.abspath(self.inputs.out_filename)
		nb.save(img, out_file)
		iflogger.info('Track density map saved as {i}'.format(i=out_file))
		iflogger.info('Data Dimensions {d}'.format(d=data_dims))
		iflogger.info('Voxel Dimensions {v}'.format(v=voxel_size))
		return runtime
Beispiel #30
0
 def _list_outputs(self):
     outputs = self.output_spec().get()
     outfile = self.inputs.binary_file
     if not isdefined(outfile):
         if isdefined(self.inputs.out_type):
             outfile = fname_presuffix(self.inputs.in_file,
                                       newpath=os.getcwd(),
                                       suffix='.'.join(('_thresh',
                                                       self.inputs.out_type)),
                                       use_ext=False)
         else:
             outfile = fname_presuffix(self.inputs.in_file,
                                       newpath=os.getcwd(),
                                       suffix='_thresh')
     outputs['binary_file'] = outfile
     value = self.inputs.count_file
     if isdefined(value):
         if isinstance(value, bool):
             if value:
                 outputs['count_file'] = fname_presuffix(self.inputs.in_file,
                                                         suffix='_count.txt',
                                                         newpath=os.getcwd(),
                                                         use_ext=False)
         else:
             outputs['count_file'] = value
     return outputs
Beispiel #31
0
    def _post_run_hook(self, runtime):
        """ generates a report showing slices from each axis """

        brain_extraction_mask = self.aggregate_outputs(
            runtime=runtime).BrainExtractionMask

        if (isdefined(self.inputs.keep_temporary_files)
                and self.inputs.keep_temporary_files == 1):
            self._anat_file = self.aggregate_outputs(
                runtime=runtime).N4Corrected0
        else:
            self._anat_file = self.inputs.anatomical_image
        self._mask_file = brain_extraction_mask
        self._seg_files = [brain_extraction_mask]
        self._masked = False

        NIWORKFLOWS_LOG.info(
            'Generating report for ANTS BrainExtraction. file "%s", mask "%s"',
            self._anat_file,
            self._mask_file,
        )

        return super(BrainExtractionRPT, self)._post_run_hook(runtime)
Beispiel #32
0
    def _generate_segment(self):
        if not isdefined(self.inputs.subjects_dir):
            freesurfer_status = 'Not run'
        else:
            recon = fs.ReconAll(subjects_dir=self.inputs.subjects_dir,
                                subject_id=self.inputs.subject_id,
                                T1_files=self.inputs.t1w,
                                flags='-noskullstrip')
            if recon.cmdline.startswith('echo'):
                freesurfer_status = 'Pre-existing directory'
            else:
                freesurfer_status = 'Run by sMRIPrep'

        t2w_seg = ''
        if self.inputs.t2w:
            t2w_seg = '(+ {:d} T2-weighted)'.format(len(self.inputs.t2w))

        return SUBJECT_TEMPLATE.format(subject_id=self.inputs.subject_id,
                                       n_t1s=len(self.inputs.t1w),
                                       t2w=t2w_seg,
                                       output_spaces=', '.join(
                                           self.inputs.output_spaces),
                                       freesurfer_status=freesurfer_status)
Beispiel #33
0
    def _check_len(self, name, new):
        if name == "keys":
            nitems = len(new)
            if len(set(new)) != nitems:
                raise ValueError(
                    'Found duplicated entries in the index of ordered keys')

        if not isdefined(self.inputs.keys):
            return

        if name == "key" and new not in self.inputs.keys:
            raise ValueError('Selected key "%s" not found in the index' % new)

        if name in self._fields:
            if isinstance(new, str) or len(new) < 1:
                raise ValueError(
                    'Trying to set an invalid value (%s) for input "%s"' %
                    (new, name))

            if len(new) != len(self.inputs.keys):
                raise ValueError(
                    'Length of value (%s) for input field "%s" does not match '
                    'the length of the indexing list.' % (new, name))
Beispiel #34
0
    def _list_outputs(self):
        """Create a Bunch which contains all possible files generated
        by running the interface.  Some files are always generated, others
        depending on which ``inputs`` options are set.

        Returns
        -------
        outputs : Bunch object
            Bunch object containing all possible files generated by
            interface object.

            If None, file was not generated
            Else, contains path, filename of generated outputfile

        """
        outputs = self._outputs().get()
        ext = Info.output_type_to_ext(self.inputs.output_type)
        outbase = 'vol*'
        if isdefined(self.inputs.out_base_name):
            outbase = '%s*' % self.inputs.out_base_name
        outputs['out_files'] = sorted(glob(os.path.join(os.getcwd(),
                                                    outbase + ext)))
        return outputs
Beispiel #35
0
    def _run_interface(self, runtime):
        sub = self.inputs.sub
        ses = self.inputs.ses
        task = self.inputs.task
        fwhm = self.inputs.fwhm
        run = self.inputs.run
        rec = self.inputs.rec
        acq = self.inputs.acq
        df = pd.DataFrame([], columns=metric_columns)

        for metric_name, metric_function in pvc_metrics.items():
            mse = pvc_mse(self.inputs.pvc, self.inputs.pve, fwhm)
            temp = pd.DataFrame(
                [['pvc', sub, ses, task, run, acq, rec, 02, metric_name, mse]],
                columns=metric_columns)
            df = pd.concat([df, temp])
        df.fillna(0, inplace=True)
        if not isdefined(self.inputs.out_file):
            self.inputs.out_file = self._gen_output(
                self.inputs.sub, self.inputs.ses, self.inputs.task,
                self.inputs.run, self.inputs.acq, self.inputs.rec)
        df.to_csv(self.inputs.out_file, index=False)
        return runtime
Beispiel #36
0
    def _run_interface(self, runtime):
        self._merged_file = None

        if not isdefined(self.inputs.in_files):
            return runtime

        in_imgs = [nib.load(in_file) for in_file in self.inputs.in_files]

        if len(in_imgs) == 0:
            return runtime

        outshape = first(in_imgs).shape
        assert all(in_img.shape == outshape for in_img in in_imgs)

        in_data = [np.asanyarray(in_img.dataobj).astype(np.bool) for in_img in in_imgs]
        outarr = np.logical_and.reduce(in_data)

        outimg = new_img_like(first(in_imgs), outarr)

        self._merged_file = op.abspath(f"merged.nii.gz")
        nib.save(outimg, self._merged_file)

        return runtime
Beispiel #37
0
    def _post_run_hook(self, runtime):
        self._fixed_image = self.inputs.after
        self._moving_image = self.inputs.before
        if self.inputs.base == 'before':
            resampled_after = nli.resample_to_img(self._fixed_image, self._moving_image)
            fname = fname_presuffix(self._fixed_image, suffix='_resampled', newpath=runtime.cwd)
            resampled_after.to_filename(fname)
            self._fixed_image = fname
        else:
            resampled_before = nli.resample_to_img(self._moving_image, self._fixed_image)
            fname = fname_presuffix(self._moving_image, suffix='_resampled', newpath=runtime.cwd)
            resampled_before.to_filename(fname)
            self._moving_image = fname
        self._contour = self.inputs.wm_seg if isdefined(self.inputs.wm_seg) else None
        NIWORKFLOWS_LOG.info(
            'Report - setting before (%s) and after (%s) images',
            self._fixed_image, self._moving_image)

        runtime = super(ResampleBeforeAfterRPT, self)._post_run_hook(runtime)
        NIWORKFLOWS_LOG.info('Successfully created report (%s)', self._out_report)
        os.unlink(fname)

        return runtime
Beispiel #38
0
    def _run_interface(self, runtime):
        models = self.inputs.model
        if not isinstance(models, list):
            layout = gb.BIDSLayout(self.inputs.bids_dir)

            if not isdefined(models):
                models = layout.get(type='model')
                if not models:
                    raise ValueError("No models found")
            elif models == 'default':
                models = ba.auto_model(layout)

        models = [_ensure_model(m) for m in models]

        if self.inputs.selectors:
            # This is almost certainly incorrect
            models = [model for model in models
                      if all(val in model['input'].get(key, [val])
                             for key, val in self.inputs.selectors.items())]

        self._results['model_spec'] = models

        return runtime
Beispiel #39
0
    def _run_interface(self, runtime):
        function_handle = create_function_from_source(self.inputs.function_str)

        args = {}
        for name in self._input_names:
            value = getattr(self.inputs, name)
            if isdefined(value):
                args[name] = value

        out = function_handle(**args)

        if len(self._output_names) == 1:
            self._out[self._output_names[0]] = out
        else:
            if isinstance(out,
                          tuple) and (len(out) != len(self._output_names)):
                raise RuntimeError('Mismatch in number of expected outputs')

            else:
                for idx, name in enumerate(self._output_names):
                    self._out[name] = out[idx]

        return runtime
Beispiel #40
0
    def _list_outputs(self):
        outputs = self.output_spec().get()

        includelist = [
            re.match(self.inputs.pattern, in_value) is None
            for in_value in self.inputs.keys
        ]

        for field in self._fields:
            valuelist = getattr(self.inputs, field)
            if not isdefined(valuelist):  # require all inputs
                return outputs

        for field in self._fields:
            valuelist = getattr(self.inputs, field)
            if not isinstance(valuelist, list):
                valuelist = [valuelist]
            outputs[field] = [
                value for include, value in zip(includelist, valuelist)
                if include
            ]

        return outputs
Beispiel #41
0
    def set_mapnode_substitutions(self, n_runs):
        """Find mapnode names and add datasink substitutions to sort by run."""

        # First determine top-level mapnode names
        mapnode_names = find_mapnodes(self.wf)

        # Then determine mapnode names for each nested workflow
        # Note that this currently only works for one level of nesting
        nested_workflows = find_nested_workflows(self.wf)
        for wf in nested_workflows:
            mapnode_names.extend(find_mapnodes(wf))

        # Build a list of substitution tuples
        substitutions = []
        for r in reversed(range(n_runs)):
            for name in mapnode_names:
                substitutions.append(("_%s%d" % (name, r), "run_%d" % (r + 1)))

        # Set the substitutions attribute on the DataSink node
        if isdefined(self.sink_node.inputs.substitutions):
            self.sink_node.inputs.substitutions.extend(substitutions)
        else:
            self.sink_node.inputs.substitutions = substitutions
Beispiel #42
0
    def _run_interface(self, runtime):
        from scipy.ndimage.morphology import binary_dilation
        from nilearn.masking import compute_epi_mask

        orig_file_nii = nb.load(self.inputs.in_file)
        in_file_data = orig_file_nii.get_fdata()

        # pad the data to avoid the mask estimation running into edge effects
        in_file_data_padded = np.pad(in_file_data, (1, 1),
                                     "constant",
                                     constant_values=(0, 0))

        padded_nii = nb.Nifti1Image(in_file_data_padded, orig_file_nii.affine,
                                    orig_file_nii.header)

        mask_nii = compute_epi_mask(padded_nii, exclude_zeros=True)

        mask_data = np.asanyarray(mask_nii.dataobj).astype(np.uint8)
        if isdefined(self.inputs.dilation):
            mask_data = binary_dilation(mask_data).astype(np.uint8)

        # reverse image padding
        mask_data = mask_data[1:-1, 1:-1, 1:-1]

        # exclude zero and NaN voxels
        mask_data[in_file_data == 0] = 0
        mask_data[np.isnan(in_file_data)] = 0

        better_mask = nb.Nifti1Image(mask_data, orig_file_nii.affine,
                                     orig_file_nii.header)
        better_mask.set_data_dtype(np.uint8)
        better_mask.to_filename("mask_file.nii.gz")

        self._mask_file = os.path.join(runtime.cwd, "mask_file.nii.gz")

        runtime.returncode = 0
        return super(ComputeEPIMask, self)._run_interface(runtime)
Beispiel #43
0
    def _run_interface(self, runtime):
        # Create function handle
        if self.as_module:
            import importlib
            pieces = self.inputs.function_str.split('.')
            module = '.'.join(pieces[:-1])
            function = pieces[-1]
            try:
                function_handle = getattr(importlib.import_module(module),
                                          function)
            except ImportError:
                raise RuntimeError('Could not import module: %s' %
                                   self.inputs.function_str)
        else:
            function_handle = create_function_from_source(
                self.inputs.function_str, self.imports)

        # Get function args
        args = {}
        for name in self._input_names:
            value = getattr(self.inputs, name)
            if isdefined(value):
                args[name] = value

        out = function_handle(**args)
        if len(self._output_names) == 1:
            self._out[self._output_names[0]] = out
        else:
            if isinstance(out, tuple) and \
                    (len(out) != len(self._output_names)):
                raise RuntimeError('Mismatch in number of expected outputs')

            else:
                for idx, name in enumerate(self._output_names):
                    self._out[name] = out[idx]

        return runtime
Beispiel #44
0
    def _generate_segment(self):
        dof = self.inputs.registration_dof
        stc = {
            True: 'Applied',
            False: 'Not applied',
            'TooShort': 'Skipped (too few volumes)'
        }[self.inputs.slice_timing]
        reg = {
            'FSL': [
                'FSL <code>flirt</code> with boundary-based registration'
                ' (BBR) metric - %d dof' % dof,
                'FSL <code>flirt</code> rigid registration - 6 dof'
            ],
            'FreeSurfer': [
                'FreeSurfer <code>bbregister</code> '
                '(boundary-based registration, BBR) - %d dof' % dof,
                'FreeSurfer <code>mri_coreg</code> - %d dof' % dof
            ],
        }[self.inputs.registration][self.inputs.fallback]
        if self.inputs.pe_direction is None:
            pedir = 'MISSING - Assuming Anterior-Posterior'
        else:
            pedir = {
                'i': 'Left-Right',
                'j': 'Anterior-Posterior'
            }[self.inputs.pe_direction[0]]

        if isdefined(self.inputs.confounds_file):
            with open(self.inputs.confounds_file) as cfh:
                conflist = cfh.readline().strip('\n').strip()
        return FUNCTIONAL_TEMPLATE.format(
            pedir=pedir,
            stc=stc,
            sdc=self.inputs.distortion_correction,
            registration=reg,
            confounds=re.sub(r'[\t ]+', ', ', conflist),
            tr=self.inputs.tr)
Beispiel #45
0
def prepare_contrasts(contrasts, all_regressors):
    """Make mutable copy of contrast list, and
    generate contrast design_matrix from dictionary weight mapping

    Each value in the contrasts list is a ContrastInfo object:
      ContrastInfo = namedtuple(
          'ContrastInfo', ('name', 'conditions', 'weights', 'test', 'entities')
      )
    """
    if not isdefined(contrasts):
        return []

    out_contrasts = []
    for contrast_info in contrasts:
        conds = contrast_info["conditions"]
        in_weights = np.atleast_2d(contrast_info["weights"])
        # Are any necessary values missing for contrast estimation?
        missing = len(conds) != in_weights.shape[1] or any(
            cond not in all_regressors for cond in conds)
        if missing:
            continue

        weights = np.zeros((in_weights.shape[0], len(all_regressors)),
                           dtype=in_weights.dtype)
        # Find indices of input conditions in all_regressors list
        sorter = np.argsort(all_regressors)
        indices = sorter[np.searchsorted(all_regressors, conds, sorter=sorter)]
        weights[:, indices] = in_weights

        out_contrasts.append((
            contrast_info['name'],
            weights,
            contrast_info['entities'].copy(),
            contrast_info['test'],
        ))

    return out_contrasts
Beispiel #46
0
    def _run_interface(self, runtime):
        df = pd.read_csv(self.inputs.in_file)
        out_columns = [
            'sub', 'ses', 'task', 'roi', 'metric', 'measure', 'value'
        ]
        df_out = pd.DataFrame(columns=out_columns)

        for ses, ses_df in df.groupby(['ses']):
            for task, task_df in ses_df.groupby(['task']):
                for measure, measure_name in zip(outlier_measures.values(),
                                                 outlier_measures.keys()):
                    for metric_name, metric_df in task_df.groupby(['metric']):
                        metricValues = metric_df.value.values
                        if len(metricValues.shape) == 1:
                            metricValues = metricValues.reshape(-1, 1)
                        if 'cdf' in inspect.getargspec(measure).args:
                            if 'coreg' or 'pvc' in metric_df.analysis:
                                cdf = True
                            else:
                                cdf = False
                            m = np.array(measure(metricValues, cdf=cdf))
                        else:
                            m = np.array(measure(metricValues))
                        if len(m.shape) > 1: m = m.flatten()
                        r = pd.Series(m)

                        #Get column number of the current outlier measure Reindex the test_df from 0 to the number of rows it has
                        #Get the series with the calculate the distance measure for the current measure
                        df.index = range(df.shape[0])
                        df['value'] = r
                        df['measure'] = [measure_name] * df.shape[0]
                        df_out = pd.concat([df_out, df], axis=0)
        if not isdefined(self.inputs.out_file):
            self.inputs.out_file = self._gen_output()
        df_out.to_csv(self.inputs.out_file, index=False)

        return runtime
Beispiel #47
0
    def _list_outputs(self):
        outputs = self.output_spec().get()
        self._set_outputs()
        if isdefined(self.inputs.warped_image):
            outputs["warped_image"] = self.inputs.warped_image
        if isdefined(self.inputs.inverse_warped_image):
            outputs["inverse_warped_image"] = self.inputs.inverse_warped_image
        if isdefined(self.inputs.composite_transform):
            outputs["composite_transform"] = self.inputs.composite_transform
        if isdefined(self.inputs.out_matrix):
            outputs["out_matrix"] = self.inputs.out_matrix
        if isdefined(self.inputs.out_matrix_inverse):
            outputs["out_matrix_inverse"] = self.inputs.out_matrix_inverse
        if isdefined(self.inputs.inverse_composite_transform):
            outputs[
                "inverse_composite_transform"] = self.inputs.inverse_composite_transform

        return outputs
Beispiel #48
0
    def aggregate_outputs(self, runtime=None, needed_outputs=None):
        outputs = self._outputs()
        info = runtime.stdout

        # Modified file
        if isdefined(self.inputs.copy_sform2qform) or isdefined(self.inputs.copy_qform2sform) or isdefined(self.inputs.delete_orient) or isdefined(self.inputs.force_radiological) or isdefined(self.inputs.force_neurological) or isdefined(self.inputs.swap_orient):
            outputs.out_file = self.inputs.in_file
            #outputs['out_file'] = self.inputs.in_file
            
        # Get information
        if isdefined(self.inputs.get_orient):
            outputs.orient = info
        if isdefined(self.inputs.get_sform):
            outputs.sform = info
        if isdefined(self.inputs.get_qform):
            outputs.qform= info        
        if isdefined(self.inputs.get_sformcode):
            outputs.sformcode = info
        if isdefined(self.inputs.get_qformcode):
            outputs.qformcode = info

        return outputs
Beispiel #49
0
    def _run_interface(self, runtime):
        """Generate a reportlet."""
        if isdefined(self.inputs.mif_file):
            odf_img, directions = mif2amps(self.inputs.mif_file, runtime.cwd)
        elif isdefined(self.inputs.fib_file):
            odf_img, directions = fib2amps(self.inputs.fib_file,
                                           self.inputs.background_image,
                                           runtime.cwd)
        elif isdefined(self.inputs.odf_file) and isdefined(
                self.inputs.directions_file):
            odf_img = nb.load(self.inputs.odf_file)
            directions = np.load(self.inputs.directions_file)
        else:
            raise Exception('Requires either a mif file or fib file')
        odf_4d = odf_img.get_fdata()
        sphere = HemiSphere(xyz=directions.astype(np.float))
        if not isdefined(self.inputs.background_image
                         ) or self.inputs.background_image is None:
            background_data = odf_4d.mean(3)
        else:
            background_data = nb.load(self.inputs.background_image).get_fdata()

        peak_report = op.join(runtime.cwd, 'peak_report.png')
        peak_slice_series(odf_4d,
                          sphere,
                          background_data,
                          peak_report,
                          n_cuts=self._ncuts,
                          mask_image=self.inputs.mask_file,
                          padding=self._padding)
        self._results['out_report'] = peak_report

        # Plot ODFs in interesting regions
        if isdefined(self.inputs.odf_rois):
            odf_report = op.join(runtime.cwd, 'odf_report.png')
            odf_roi_plot(odf_4d,
                         sphere,
                         background_data,
                         odf_report,
                         self.inputs.odf_rois,
                         subtract_iso=self.inputs.subtract_iso,
                         mask=self.inputs.mask_file)
            self._results['odf_report'] = odf_report
        return runtime
Beispiel #50
0
 def _cond_to_regress(self, info, nscans):
     """Converts condition information to full regressors
     """
     reg = []
     regnames = []
     for i, cond in enumerate(info.conditions):
         if hasattr(info, 'amplitudes') and info.amplitudes:
             amplitudes = info.amplitudes[i]
         else:
             amplitudes = None
         regnames.insert(len(regnames), cond)
         scaled_onsets = scale_timings(info.onsets[i],
                                       self.inputs.input_units, 'secs',
                                       self.inputs.time_repetition)
         scaled_durations = scale_timings(info.durations[i],
                                          self.inputs.input_units, 'secs',
                                          self.inputs.time_repetition)
         regressor = self._gen_regress(scaled_onsets, scaled_durations,
                                       amplitudes, nscans)
         if isdefined(self.inputs.use_temporal_deriv) and \
                 self.inputs.use_temporal_deriv:
             reg.insert(len(reg), regressor[0])
             regnames.insert(len(regnames), cond + '_D')
             reg.insert(len(reg), regressor[1])
         else:
             reg.insert(len(reg), regressor)
         # need to deal with temporal and parametric modulators
     # for sparse-clustered acquisitions enter T1-effect regressors
     nvol = self.inputs.volumes_in_cluster
     if nvol > 1:
         for i in range(nvol - 1):
             treg = np.zeros((nscans / nvol, nvol))
             treg[:, i] = 1
             reg.insert(len(reg), treg.ravel().tolist())
             regnames.insert(len(regnames), 'T1effect_%d' % i)
     return reg, regnames
Beispiel #51
0
    def _run_interface(self, runtime):
        if not isdefined(self.inputs.out_file):
            fname = os.path.splitext(os.path.basename(self.inputs.in_file))[0]
            dname = dname = os.getcwd()
            self.inputs.out_file = dname + os.sep + fname + self._suffix + '.mnc'

        temp_fn="/tmp/tmp_mnc_"+ strftime("%Y%m%d%H%M%S", gmtime())+str(np.random.randint(9999999999))+".mnc"
        shutil.copy(self.inputs.in_file, temp_fn)
        infile = volumeFromFile(self.inputs.in_file)
        for view in ['xspace','yspace','zspace']:
            #start = -1*infile.separations[infile.dimnames.index(view)]*infile.sizes[infile.dimnames.index(view)]/2
            dim = infile.dimnames.index( view )
            start = infile.starts[dim]

            run_modifHrd=ModifyHeaderCommand()
            run_modifHrd.inputs.in_file = temp_fn
            run_modifHrd.inputs.dinsert = True;
            run_modifHrd.inputs.opt_string = view+":start="+str(start);
            run_modifHrd.run()

        node_name="fixIrregularDimension"
        fixIrregular = ModifyHeaderCommand()
        #-dinsert xspace:direction_cosines=1,0,0 -dinsert yspace:direction_cosines=0,1,0 -dinsert zspace:direction_cosines=0,0,1
        fixIrregular.inputs.opt_string = " -sinsert time:spacing=\"regular__\" -sinsert time-width:spacing=\"regular__\" -sinsert xspace:spacing=\"regular__\" -sinsert yspace:spacing=\"regular__\" -sinsert zspace:spacing=\"regular__\""
        fixIrregular.inputs.in_file = temp_fn
        print( fixIrregular.cmdline )
        fixIrregular.run()

        fixCosine = FixCosinesCommand()
        fixCosine.inputs.in_file = fixIrregular.inputs.out_file
        fixCosine.inputs.keep_real_range=True
        fixCosine.inputs.dircos=True
        fixCosine.run()
        print(fixCosine.cmdline)
        shutil.copy(fixCosine.inputs.out_file, self.inputs.out_file)
        return runtime
 def _list_outputs(self):
     outputs = self._outputs().get()
     outputs['annotated_trackvis_file'] = os.path.abspath(
         self.inputs.out_tracks)
     outputs['max_maps'] = []
     outputs['mean_maps'] = []
     if isdefined(self.inputs.stat_labels) and len(
             self.inputs.stat_labels) == len(self.inputs.stat_files):
         for label in self.inputs.stat_labels:
             outputs['max_maps'].append(
                 os.path.abspath(self.inputs.out_max_map_prefix +
                                 "_%s" % label + '.nii'))
             outputs['mean_maps'].append(
                 os.path.abspath(self.inputs.out_mean_map_prefix +
                                 "_%s" % label + '.nii'))
     else:
         for i in range(len(self.inputs.stat_files)):
             outputs['max_maps'].append(
                 os.path.abspath(self.inputs.out_max_map_prefix + str(i) +
                                 '.nii'))
             outputs['mean_maps'].append(
                 os.path.abspath(self.inputs.out_mean_map_prefix + str(i) +
                                 '.nii'))
     return outputs
Beispiel #53
0
 def _list_outputs(self):
     outputs = self._outputs().get()
     outfile = self.inputs.out_file
     if not isdefined(outfile):
         _, infile1, _ = split_filename(self.inputs.in_file)
         if self.inputs.invert_xfm:
             outfile = fname_presuffix(infile1,
                                       suffix="_inv.mat",
                                       newpath=os.getcwd(),
                                       use_ext=False)
         else:
             if self.inputs.concat_xfm:
                 _, infile2, _ = split_filename(self.inputs.in_file2)
                 outfile = fname_presuffix("%s_%s" % (infile1, infile2),
                                           suffix=".mat",
                                           newpath=os.getcwd(),
                                           use_ext=False)
             else:
                 outfile = fname_presuffix(infile1,
                                           suffix="_fix.mat",
                                           newpath=os.getcwd(),
                                           use_ext=False)
     outputs["out_file"] = os.path.abspath(outfile)
     return outputs
Beispiel #54
0
    def _run_interface(self, runtime):
        if not isdefined(self.inputs.out_file):
            self.inputs.out_file = self._gen_output(self.inputs.in_file)

        temp_fn = os.getcwd()+"/tmp_mnc_"+ strftime("%Y%m%d%H%M%S", gmtime())+str(np.random.randint(9999999999))+".mnc"
        convert = nii2mnc_shCommand()
        convert.inputs.in_file=self.inputs.in_file
        convert.inputs.out_file=temp_fn

        convert.inputs.dfloat = self.inputs.dfloat 
        convert.inputs.dint = self.inputs.dint
        print(convert.cmdline)
        convert.run()

        minc2 = mincconvertCommand()
        minc2.inputs.in_file=temp_fn
        minc2.inputs.out_file=self.inputs.out_file
        minc2.inputs.two=True
        print(minc2.cmdline)
        minc2.run()

        move(minc2.inputs.out_file, self.inputs.out_file)
        os.remove(temp_fn)
        return runtime
Beispiel #55
0
def _checkinitxfm(in_bval, excl_nodiff, in_xfms=None):
    from nipype.interfaces.base import isdefined
    import numpy as np
    import os.path as op
    bvals = np.loadtxt(in_bval)

    gen_id = ((in_xfms is None) or (not isdefined(in_xfms))
              or (len(in_xfms) != len(bvals)))

    init_xfms = []
    if excl_nodiff:
        dws = np.where(bvals != 0)[0].tolist()
    else:
        dws = range(len(bvals))

    if gen_id:
        for i in dws:
            xfm_file = op.abspath('init_%04d.mat' % i)
            np.savetxt(xfm_file, np.eye(4))
            init_xfms.append(xfm_file)
    else:
        init_xfms = [in_xfms[i] for i in dws]

    return init_xfms
Beispiel #56
0
    def _list_outputs(self):
        exclude = None
        if self.inputs.strict:
            exclude = ['derivatives/', 'code/', 'sourcedata/']

        if bids_ver < version.parse('0.5'):
            raise ImportError("pybids must be >= 0.5")
        elif bids_ver >= version.parse('0.5') and bids_ver < version.parse('0.6'):
            layout = bidslayout.BIDSLayout(self.inputs.base_dir, config=self.inputs.domains, exclude=exclude)
        else:
            if self.inputs.domains is None:
                self.inputs.domains = ['bids']
            layout = bidslayout.BIDSLayout((self.inputs.base_dir, self.inputs.domains), exclude=exclude)

        # If infield is not given nm input value, silently ignore
        filters = {}
        for key in self._infields:
            value = getattr(self.inputs, key)
            if isdefined(value):
                filters[key] = value

        outputs = {}
        for key, query in self.inputs.output_query.items():
            args = query.copy()
            args.update(filters)
            filelist = layout.get(return_type=self.inputs.return_type, **args)
            if len(filelist) == 0:
                msg = 'Output key: %s returned no files' % key
                if self.inputs.raise_on_empty:
                    raise IOError(msg)
                else:
                    iflogger.warning(msg)
                    filelist = Undefined

            outputs[key] = filelist
        return outputs
Beispiel #57
0
 def _get_dst(self, src):
     # If path is directory with trailing os.path.sep,
     # then remove that for a more robust behavior
     src = src.rstrip(os.path.sep)
     path, fname = os.path.split(src)
     if self.inputs.parameterization:
         dst = path
         if isdefined(self.inputs.strip_dir):
             dst = dst.replace(self.inputs.strip_dir, '')
         folders = [
             folder for folder in dst.split(os.path.sep)
             if folder.startswith('_')
         ]
         dst = os.path.sep.join(folders)
         if fname:
             dst = os.path.join(dst, fname)
     else:
         if fname:
             dst = fname
         else:
             dst = path.split(os.path.sep)[-1]
     if dst[0] == os.path.sep:
         dst = dst[1:]
     return dst
Beispiel #58
0
    def cmdline(self):
        cmd = super(MakeMidthickness, self).cmdline
        if not isdefined(self.inputs.graymid) or len(self.inputs.graymid) < 1:
            return cmd

        # Possible graymid values inclue {l,r}h.{graymid,midthickness}
        # Prefer midthickness to graymid, require to be of the same hemisphere
        # as input
        source = None
        in_base = Path(self.inputs.in_file).name
        mt = self._associated_file(in_base, "midthickness")
        gm = self._associated_file(in_base, "graymid")

        for surf in self.inputs.graymid:
            if Path(surf).name == mt:
                source = surf
                break
            if Path(surf).name == gm:
                source = surf

        if source is None:
            return cmd

        return "cp {} {}".format(source, self._list_outputs()["out_file"])
Beispiel #59
0
    def _transform(self, array):
        design = np.loadtxt(self.inputs.design_file, dtype=np.float64, ndmin=2)

        filter_all = self.inputs.filter_all

        if filter_all is not True:
            filter_columns = self.inputs.filter_columns

        else:
            filter_columns = list(range(1, design.shape[1] + 1))

        calculate_mask = isdefined(
            self.inputs.mask) and self.inputs.mask is True

        np.nan_to_num(array,
                      copy=False)  # nans create problems further down the line

        array2 = regfilt(array,
                         design,
                         filter_columns,
                         calculate_mask=calculate_mask,
                         aggressive=self.inputs.aggressive)

        return array2
Beispiel #60
0
    def _run_interface(self, runtime):
        if not isdefined(self.inputs.out_file):
            self.inputs.out_file = self._gen_output(self.inputs.in_file)

        beast = mincbeastCommand()
        beast.inputs.out_file = "/tmp/" + str(np.random.randint(
            0, 9999999)) + "_" + os.path.basename(self.inputs.out_file)
        beast.inputs.in_file = self.inputs.in_file
        beast.inputs.library_dir = self.inputs.library_dir
        beast.inputs.voxel_size = self.inputs.voxel_size
        beast.inputs.same_resolution = self.inputs.same_resolution
        beast.inputs.median = self.inputs.median
        beast.inputs.fill = self.inputs.fill
        beast.inputs.configuration = self.inputs.configuration
        beast.run()

        resample = FixCosinesCommand()
        resample.inputs.two = True
        resample.inputs.in_file = beast.inputs.out_file
        resample.inputs.out_file = self.inputs.out_file

        print(resample.cmdline)
        resample.run()
        return runtime