def _list_outputs(self): outputs = self.output_spec().get() outputs["out_reg_file"] = self.inputs.out_reg_file if not isdefined(self.inputs.out_reg_file) and self.inputs.source_file: outputs["out_reg_file"] = fname_presuffix(self.inputs.source_file, suffix="_robustreg.lta", use_ext=False) prefices = dict(src=self.inputs.source_file, trg=self.inputs.target_file) suffices = dict( registered_file=("src", "_robustreg", True), weights_file=("src", "_robustweights", True), half_source=("src", "_halfway", True), half_targ=("trg", "_halfway", True), half_weights=("src", "_halfweights", True), half_source_xfm=("src", "_robustxfm.lta", False), half_targ_xfm=("trg", "_robustxfm.lta", False), ) for name, sufftup in suffices.items(): value = getattr(self.inputs, name) if isdefined(value): if isinstance(value, bool): outputs[name] = fname_presuffix( prefices[sufftup[0]], suffix=sufftup[1], newpath=os.getcwd(), use_ext=sufftup[2] ) else: outputs[name] = value return outputs
def _list_outputs(self): outputs = self.output_spec().get() _in = self.inputs if isdefined(_in.out_reg_file): outputs["out_reg_file"] = op.abspath(_in.out_reg_file) elif _in.source_file: suffix = "_bbreg_%s.dat" % _in.subject_id outputs["out_reg_file"] = fname_presuffix(_in.source_file, suffix=suffix, use_ext=False) if isdefined(_in.registered_file): if isinstance(_in.registered_file, bool): outputs["registered_file"] = fname_presuffix(_in.source_file, suffix="_bbreg") else: outputs["registered_file"] = op.abspath(_in.registered_file) if isdefined(_in.out_fsl_file): if isinstance(_in.out_fsl_file, bool): suffix = "_bbreg_%s.mat" % _in.subject_id out_fsl_file = fname_presuffix(_in.source_file, suffix=suffix, use_ext=False) outputs["out_fsl_file"] = out_fsl_file else: outputs["out_fsl_file"] = op.abspath(_in.out_fsl_file) outputs["min_cost_file"] = outputs["out_reg_file"] + ".mincost" return outputs
def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.in_files): outputs['realignment_parameters'] = [] for imgf in self.inputs.in_files: if isinstance(imgf,list): tmp_imgf = imgf[0] else: tmp_imgf = imgf outputs['realignment_parameters'].append(fname_presuffix(tmp_imgf, prefix='rp_', suffix='.txt', use_ext=False)) if not isinstance(imgf,list) and func_is_3d(imgf): break; #if self.inputs.jobtype == "write" or self.inputs.jobtype == "estwrite": if isinstance(self.inputs.in_files[0], list): first_image = self.inputs.in_files[0][0] else: first_image = self.inputs.in_files[0] outputs['mean_image'] = fname_presuffix(first_image, prefix='meanu') outputs['realigned_files'] = [] # get prefix for new files, or default 'u' file_prefix = self.inputs.write_prefix or 'u' for imgf in filename_to_list(self.inputs.in_files): realigned_run = [] if isinstance(imgf,list): for inner_imgf in filename_to_list(imgf): realigned_run.append(fname_presuffix(inner_imgf, prefix=file_prefix)) else: realigned_run = fname_presuffix(imgf, prefix=file_prefix) outputs['realigned_files'].append(realigned_run) return outputs
def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype if jobtype.startswith("est"): outputs["normalization_parameters"] = [] for imgf in filename_to_list(self.inputs.source): outputs["normalization_parameters"].append(fname_presuffix(imgf, suffix="_sn.mat", use_ext=False)) outputs["normalization_parameters"] = list_to_filename(outputs["normalization_parameters"]) if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): outputs["normalized_files"] = self.inputs.apply_to_files outputs["normalized_source"] = self.inputs.source elif "write" in self.inputs.jobtype: outputs["normalized_files"] = [] if isdefined(self.inputs.apply_to_files): for imgf in filename_to_list(self.inputs.apply_to_files): outputs["normalized_files"].append(fname_presuffix(imgf, prefix="w")) if isdefined(self.inputs.source): outputs["normalized_source"] = [] for imgf in filename_to_list(self.inputs.source): outputs["normalized_source"].append(fname_presuffix(imgf, prefix="w")) return outputs
def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype if jobtype.startswith('est'): outputs['normalization_parameters'] = [] for imgf in filename_to_list(self.inputs.source): outputs['normalization_parameters'].append(fname_presuffix(imgf, suffix='_sn.mat', use_ext=False)) outputs['normalization_parameters'] = list_to_filename(outputs['normalization_parameters']) if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): outputs['normalized_files'] = self.inputs.apply_to_files outputs['normalized_source'] = self.inputs.source elif 'write' in self.inputs.jobtype: outputs['normalized_files'] = [] if isdefined(self.inputs.apply_to_files): filelist = filename_to_list(self.inputs.apply_to_files) for f in filelist: if isinstance(f, list): run = [fname_presuffix(in_f, prefix=self.inputs.out_prefix) for in_f in f] else: run = [fname_presuffix(f, prefix=self.inputs.out_prefix)] outputs['normalized_files'].extend(run) if isdefined(self.inputs.source): outputs['normalized_source'] = fname_presuffix(self.inputs.source, prefix=self.inputs.out_prefix) return outputs
def _list_outputs(self): outputs = self.output_spec().get() #if isdefined(self.inputs.output_csv_file): #write to a csv file and assign a value to self.coherence_file (a #file name + path) #Always defined (the arrays): outputs['coherence_array']=self.coherence outputs['timedelay_array']=self.delay #Conditional if isdefined(self.inputs.output_csv_file) and hasattr(self,'coherence'): # we need to make a function that we call here that writes the # coherence values to this file "coherence_csv" and makes the # time_delay csv file?? self._make_output_files() outputs['coherence_csv']=fname_presuffix(self.inputs.output_csv_file,suffix='_coherence') outputs['timedelay_csv']=fname_presuffix(self.inputs.output_csv_file,suffix='_delay') if isdefined(self.inputs.output_figure_file) and hasattr(self, 'coherence'): self._make_output_figures() outputs['coherence_fig'] = fname_presuffix(self.inputs.output_figure_file,suffix='_coherence') outputs['timedelay_fig'] = fname_presuffix(self.inputs.output_figure_file,suffix='_delay') return outputs
def fieldmap_prepare_files(converted_files, rwv_rescale_intercept=0, rwv_rescale_slope=1): if len(converted_files) == 0: return "", "" import nibabel as nb, numpy as np, os, re from nipype.utils.filemanip import fname_presuffix in_pattern = "(?P<data>\d{8})_(?P<time>\d{6})(?P<site>\d{3})S(?P<subj>\d{4})" m = re.search(in_pattern, converted_files[0]) out_file = "./%(site)s_S_%(subj)s_%(data)s_%(time)s.nii.gz" % m.groupdict() niis = [nb.load(f) for f in converted_files] if not (rwv_rescale_intercept == 0 and rwv_rescale_intercept == 1): datas = [nii.get_data() * rwv_rescale_slope + rwv_rescale_intercept for nii in niis] else: datas = [nii.get_data() for nii in niis] if len(datas) == 2 and datas[0].ndim == 4 and datas[0].shape[3] == 2: # pair of complex data cplx1 = datas[0][..., 0] + 1j * datas[0][..., 1] cplx2 = datas[1][..., 0] + 1j * datas[1][..., 1] else: return phase_diff = np.mod(np.angle(cplx2) - np.angle(cplx1) + np.pi * 2, np.pi * 2) # append a zero image for FUGUE phase_diff = np.concatenate((phase_diff[..., np.newaxis], np.zeros(phase_diff.shape + (1,))), 3) phase_diff = phase_diff.astype(np.float32) mag1 = np.abs(cplx1).astype(np.float32) phasediff_name = fname_presuffix(out_file, suffix="_phasediff", newpath=os.getcwd()) mag_name = fname_presuffix(out_file, suffix="_mag", newpath=os.getcwd()) nb.save(nb.Nifti1Image(phase_diff, niis[0].get_affine()), phasediff_name) nb.save(nb.Nifti1Image(mag1, niis[0].get_affine()), mag_name) return phasediff_name, mag_name
def _make_output_figures(self): """ Generate the desired figure and save the files according to self.inputs.output_figure_file """ if self.inputs.figure_type == 'matrix': fig_coh = viz.drawmatrix_channels(self.coherence, channel_names=self.ROIs, color_anchor=0) fig_coh.savefig(fname_presuffix(self.inputs.output_figure_file, suffix='_coherence')) fig_dt = viz.drawmatrix_channels(self.delay, channel_names=self.ROIs, color_anchor=0) fig_dt.savefig(fname_presuffix(self.inputs.output_figure_file, suffix='_delay')) else: fig_coh = viz.drawgraph_channels(self.coherence, channel_names=self.ROIs) fig_coh.savefig(fname_presuffix(self.inputs.output_figure_file, suffix='_coherence')) fig_dt = viz.drawgraph_channels(self.delay, channel_names=self.ROIs) fig_dt.savefig(fname_presuffix(self.inputs.output_figure_file, suffix='_delay'))
def _list_outputs(self): outputs = self.output_spec().get() outfile = self.inputs.binary_file if not isdefined(outfile): if isdefined(self.inputs.out_type): outfile = fname_presuffix(self.inputs.in_file, newpath=os.getcwd(), suffix='.'.join(('_thresh', self.inputs.out_type)), use_ext=False) else: outfile = fname_presuffix(self.inputs.in_file, newpath=os.getcwd(), suffix='_thresh') outputs['binary_file'] = outfile value = self.inputs.count_file if isdefined(value): if isinstance(value, bool): if value: outputs['count_file'] = fname_presuffix(self.inputs.in_file, suffix='_count.txt', newpath=os.getcwd(), use_ext=False) else: outputs['count_file'] = value return outputs
def _list_outputs(self): outputs = self._outputs().get() jobtype = self.inputs.jobtype if jobtype.startswith('est'): outputs['normalization_parameters'] = [] for imgf in filename_to_list(self.inputs.source): outputs['normalization_parameters'].append(fname_presuffix(imgf, suffix='_sn.mat', use_ext=False)) outputs['normalization_parameters'] = list_to_filename(outputs['normalization_parameters']) if self.inputs.jobtype == "estimate": if isdefined(self.inputs.apply_to_files): outputs['normalized_files'] = self.inputs.apply_to_files outputs['normalized_source'] = self.inputs.source elif 'write' in self.inputs.jobtype: outputs['normalized_files'] = [] if isdefined(self.inputs.apply_to_files): for imgf in filename_to_list(self.inputs.apply_to_files): outputs['normalized_files'].append(fname_presuffix(imgf, prefix='w')) if isdefined(self.inputs.source): outputs['normalized_source'] = [] for imgf in filename_to_list(self.inputs.source): outputs['normalized_source'].append(fname_presuffix(imgf, prefix='w')) return outputs
def _list_outputs(self): outputs = self._outputs().get() if isdefined(self.inputs.in_files): outputs["realignment_parameters"] = [] for imgf in self.inputs.in_files: if isinstance(imgf, list): tmp_imgf = imgf[0] else: tmp_imgf = imgf outputs["realignment_parameters"].append( fname_presuffix(tmp_imgf, prefix="rp_", suffix=".txt", use_ext=False) ) if not isinstance(imgf, list) and func_is_3d(imgf): break if self.inputs.jobtype == "write" or self.inputs.jobtype == "estwrite": if isinstance(self.inputs.in_files[0], list): first_image = self.inputs.in_files[0][0] else: first_image = self.inputs.in_files[0] outputs["mean_image"] = fname_presuffix(first_image, prefix="mean") outputs["realigned_files"] = [] for imgf in filename_to_list(self.inputs.in_files): realigned_run = [] if isinstance(imgf, list): for inner_imgf in filename_to_list(imgf): realigned_run.append(fname_presuffix(inner_imgf, prefix="r")) else: realigned_run = fname_presuffix(imgf, prefix="r") outputs["realigned_files"].append(realigned_run) return outputs
def export_graph(graph_in, base_dir=None, show=False, use_execgraph=False, show_connectinfo=False, dotfilename='graph.dot', format='png', simple_form=True): """ Displays the graph layout of the pipeline This function requires that pygraphviz and matplotlib are available on the system. Parameters ---------- show : boolean Indicate whether to generate pygraphviz output fromn networkx. default [False] use_execgraph : boolean Indicates whether to use the specification graph or the execution graph. default [False] show_connectioninfo : boolean Indicates whether to show the edge data on the graph. This makes the graph rather cluttered. default [False] """ graph = deepcopy(graph_in) if use_execgraph: graph = generate_expanded_graph(graph) logger.debug('using execgraph') else: logger.debug('using input graph') if base_dir is None: base_dir = os.getcwd() if not os.path.exists(base_dir): os.makedirs(base_dir) outfname = fname_presuffix(dotfilename, suffix='_detailed.dot', use_ext=False, newpath=base_dir) logger.info('Creating detailed dot file: %s' % outfname) _write_detailed_dot(graph, outfname) cmd = 'dot -T%s -O %s' % (format, outfname) res = CommandLine(cmd).run() if res.runtime.returncode: logger.warn('dot2png: %s', res.runtime.stderr) pklgraph = _create_dot_graph(graph, show_connectinfo, simple_form) outfname = fname_presuffix(dotfilename, suffix='.dot', use_ext=False, newpath=base_dir) nx.write_dot(pklgraph, outfname) logger.info('Creating dot file: %s' % outfname) cmd = 'dot -T%s -O %s' % (format, outfname) res = CommandLine(cmd).run() if res.runtime.returncode: logger.warn('dot2png: %s', res.runtime.stderr) if show: pos = nx.graphviz_layout(pklgraph, prog='dot') nx.draw(pklgraph, pos) if show_connectinfo: nx.draw_networkx_edge_labels(pklgraph, pos)
def test_fname_presuffix(): fname = 'foo.nii' pth = fname_presuffix(fname, 'pre_', '_post', '/tmp') yield assert_equal, pth, '/tmp/pre_foo_post.nii' fname += '.gz' pth = fname_presuffix(fname, 'pre_', '_post', '/tmp') yield assert_equal, pth, '/tmp/pre_foo_post.nii.gz' pth = fname_presuffix(fname, 'pre_', '_post', '/tmp', use_ext=False) yield assert_equal, pth, '/tmp/pre_foo_post'
def _run_interface(self, runtime): in_files = self.inputs.in_files if not isinstance(in_files, list): in_files = [self.inputs.in_files] # Generate output average name early self._results['out_avg'] = fname_presuffix(self.inputs.in_files[0], suffix='_avg', newpath=runtime.cwd) if self.inputs.to_ras: in_files = [reorient(inf, newpath=runtime.cwd) for inf in in_files] if len(in_files) == 1: filenii = nb.load(in_files[0]) filedata = filenii.get_data() # magnitude files can have an extra dimension empty if filedata.ndim == 5: sqdata = np.squeeze(filedata) if sqdata.ndim == 5: raise RuntimeError('Input image (%s) is 5D' % in_files[0]) else: in_files = [fname_presuffix(in_files[0], suffix='_squeezed', newpath=runtime.cwd)] nb.Nifti1Image(sqdata, filenii.get_affine(), filenii.get_header()).to_filename(in_files[0]) if np.squeeze(nb.load(in_files[0]).get_data()).ndim < 4: self._results['out_file'] = in_files[0] self._results['out_avg'] = in_files[0] # TODO: generate identity out_mats and zero-filled out_movpar return runtime in_files = in_files[0] else: magmrg = fsl.Merge(dimension='t', in_files=self.inputs.in_files) in_files = magmrg.run().outputs.merged_file mcflirt = fsl.MCFLIRT(cost='normcorr', save_mats=True, save_plots=True, ref_vol=0, in_file=in_files) mcres = mcflirt.run() self._results['out_mats'] = mcres.outputs.mat_file self._results['out_movpar'] = mcres.outputs.par_file self._results['out_file'] = mcres.outputs.out_file hmcnii = nb.load(mcres.outputs.out_file) hmcdat = hmcnii.get_data().mean(axis=3) if self.inputs.zero_based_avg: hmcdat -= hmcdat.min() nb.Nifti1Image( hmcdat, hmcnii.get_affine(), hmcnii.get_header()).to_filename( self._results['out_avg']) return runtime
def _list_outputs(self): outputs = self.output_spec().get() outputs['out_reg_file'] = self.inputs.out_reg_file if not isdefined(self.inputs.out_reg_file) and self.inputs.source_file: outputs['out_reg_file'] = fname_presuffix(self.inputs.source_file, suffix='_bbreg_%s.dat'%self.inputs.subject_id, use_ext=False) if isdefined(self.inputs.registered_file): outputs['registered_file'] = self.inputs.registered_file if isinstance(self.inputs.registered_file, bool): outputs['registered_file'] = fname_presuffix(self.inputs.source_file,suffix='_bbreg') return outputs
def _tpm2roi(in_tpm, in_mask, mask_erosion_mm=None, erosion_mm=None, mask_erosion_prop=None, erosion_prop=None, pthres=0.95, newpath=None): """ Generate a mask from a tissue probability map """ tpm_img = nb.load(in_tpm) roi_mask = (tpm_img.get_data() >= pthres).astype(np.uint8) eroded_mask_file = None erode_in = (mask_erosion_mm is not None and mask_erosion_mm > 0 or mask_erosion_prop is not None and mask_erosion_prop < 1) if erode_in: eroded_mask_file = fname_presuffix(in_mask, suffix='_eroded', newpath=newpath) mask_img = nb.load(in_mask) mask_data = mask_img.get_data().astype(np.uint8) if mask_erosion_mm: iter_n = max(int(mask_erosion_mm / max(mask_img.header.get_zooms())), 1) mask_data = nd.binary_erosion(mask_data, iterations=iter_n) else: orig_vol = np.sum(mask_data > 0) while np.sum(mask_data > 0) / orig_vol > mask_erosion_prop: mask_data = nd.binary_erosion(mask_data, iterations=1) # Store mask eroded = nb.Nifti1Image(mask_data, mask_img.affine, mask_img.header) eroded.set_data_dtype(np.uint8) eroded.to_filename(eroded_mask_file) # Mask TPM data (no effect if not eroded) roi_mask[~mask_data] = 0 # shrinking erode_out = (erosion_mm is not None and erosion_mm > 0 or erosion_prop is not None and erosion_prop < 1) if erode_out: if erosion_mm: iter_n = max(int(erosion_mm / max(tpm_img.header.get_zooms())), 1) iter_n = int(erosion_mm / max(tpm_img.header.get_zooms())) roi_mask = nd.binary_erosion(roi_mask, iterations=iter_n) else: orig_vol = np.sum(roi_mask > 0) while np.sum(roi_mask > 0) / orig_vol > erosion_prop: roi_mask = nd.binary_erosion(roi_mask, iterations=1) # Create image to resample roi_fname = fname_presuffix(in_tpm, suffix='_roi', newpath=newpath) roi_img = nb.Nifti1Image(roi_mask, tpm_img.affine, tpm_img.header) roi_img.set_data_dtype(np.uint8) roi_img.to_filename(roi_fname) return roi_fname, eroded_mask_file or in_mask
def _list_outputs(self): out_prefix = self.inputs.out_prefix output_type = self.inputs.output_type outputs = self.output_spec().get() outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.'+ output_type)) outputs['DWI'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_dwi.'+ output_type)) outputs['max'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_max.'+ output_type)) outputs['ODF'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_odf.'+ output_type)) if isdefined(self.inputs.output_entropy): outputs['entropy'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_entropy.'+ output_type)) return outputs
def _list_outputs(self): outputs = self._outputs().get() outputs["timecorrected_files"] = [] filelist = filename_to_list(self.inputs.in_files) for f in filelist: run = [] if isinstance(f, list): for inner_f in filename_to_list(f): run.append(fname_presuffix(inner_f, prefix="a")) else: realigned_run = fname_presuffix(f, prefix="a") outputs["timecorrected_files"].append(realigned_run) return outputs
def _list_outputs(self): out_prefix = self.inputs.out_prefix output_type = self.inputs.output_type outputs = self.output_spec().get() outputs["B0"] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix="_b0." + output_type)) outputs["DWI"] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix="_dwi." + output_type)) outputs["max"] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix="_max." + output_type)) outputs["ODF"] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix="_odf." + output_type)) if isdefined(self.inputs.output_entropy): outputs["entropy"] = os.path.abspath( fname_presuffix("", prefix=out_prefix, suffix="_entropy." + output_type) ) return outputs
def make_4d_nibabel(infiles,outdir=None): shape = ni.load(infiles[0]).get_shape() finalshape = tuple([x for x in shape]+[len(infiles)]) dat4d = np.empty(finalshape) for val, f in enumerate(infiles): tmpdat = ni.load(f).get_data() tmpdat[np.isnan(tmpdat)] = 0 dat4d[:,:,:,val] = tmpdat newimg = ni.Nifti1Image(dat4d, ni.load(infiles[0]).get_affine()) if outdir is None: outf = fname_presuffix(infiles[0], prefix='data4d_') else: outf = fname_presuffix(infiles[0], prefix='data4d_',newpath = outdir) newimg.to_filename(outf) return outf
def main(config): """Runs resting state QA workfow Parameters ---------- config : String Filename of .json configuration file """ QA_config = load_config(config,create_config) c = load_config(QA_config.preproc_config, prep_config) a = resting_QA(QA_config,c) a.base_dir = QA_config.working_dir if QA_config.test_mode: a.write_graph() a.config = {'execution' : {'crashdump_dir' : QA_config.crash_dir}} if not os.environ['SUBJECTS_DIR'] == c.surf_dir: print "Your SUBJECTS_DIR is incorrect!" print "export SUBJECTS_DIR=%s"%c.surf_dir else: from nipype.utils.filemanip import fname_presuffix a.export(fname_presuffix(config,'','_script_').replace('.json','')) if c.save_script_only: return 0 if QA_config.run_using_plugin: a.run(plugin=QA_config.plugin,plugin_args=QA_config.plugin_args) else: a.run()
def _run_interface(self, runtime): in_file = nb.load(self.inputs.in_file) wm_mask = nb.load(self.inputs.wm_mask).get_data() wm_mask[wm_mask < 0.9] = 0 wm_mask[wm_mask > 0] = 1 wm_mask = wm_mask.astype(np.uint8) if self.inputs.erodemsk: # Create a structural element to be used in an opening operation. struc = nd.generate_binary_structure(3, 2) # Perform an opening operation on the background data. wm_mask = nd.binary_erosion(wm_mask, structure=struc).astype(np.uint8) data = in_file.get_data() data *= 1000.0 / np.median(data[wm_mask > 0]) out_file = fname_presuffix(self.inputs.in_file, suffix='_harmonized', newpath='.') in_file.__class__(data, in_file.affine, in_file.header).to_filename( out_file) self._results['out_file'] = out_file return runtime
def _gen_fname(self, basename, cwd=None, prefix=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/<prefix>basename. Parameters ---------- basename : str Filename to base the new filename on. cwd : str Path to prefix to the new filename. (default is os.getcwd()) prefix : str Prefix to add to the `basename`. (defaults is '' ) Returns ------- fname : str New filename based on given parameters. """ if basename == '': msg = 'Unable to generate filename for command %s. ' % self.cmd msg += 'basename is not set!' raise ValueError(msg) if cwd is None: cwd = os.getcwd() if prefix is None: prefix = '' fname = fname_presuffix(basename, prefix = prefix, use_ext = False, newpath = cwd) return fname
def _gen_fname(self, basename, fname=None, cwd=None, suffix='_fs', use_ext=True): '''Define a generic mapping for a single outfile The filename is potentially autogenerated by suffixing inputs.infile Parameters ---------- basename : string (required) filename to base the new filename on fname : string if not None, just use this fname cwd : string prefix paths with cwd, otherwise os.getcwd() suffix : string default suffix ''' if basename == '': msg = 'Unable to generate filename for command %s. ' % self.cmd msg += 'basename is not set!' raise ValueError(msg) if cwd is None: cwd = os.getcwd() fname = fname_presuffix(basename, suffix=suffix, use_ext=use_ext, newpath=cwd) return fname
def _gen_output_filename(self): if not isdefined(self.inputs.output_file): output = fname_presuffix(fname=self.inputs.atlas, suffix="_mask", newpath=os.getcwd(), use_ext=True) else: output = os.path.realpath(self.inputs.output_file) return output
def main(config_file): c = load_config(config_file, create_config) prep_c = load_config(c.preproc_config, prep_config) first_level = combine_wkflw(c, prep_c) first_level.config = {'execution' : {'crashdump_dir' : c.crash_dir, "job_finished_timeout": c.timeout}} first_level.base_dir = c.working_dir if c.use_advanced_options: exec c.advanced_options if c.test_mode: first_level.write_graph() from nipype.utils.filemanip import fname_presuffix first_level.export(fname_presuffix(config_file,'','_script_').replace('.json','')) if c.save_script_only: return 0 if c.run_using_plugin: first_level.run(plugin=c.plugin, plugin_args = c.plugin_args) else: first_level.run()
def _gen_fname(self, prefix, suffix=None, ext=".nii.gz", cwd=None): """Generate a filename based on the given parameters. The filename will take the form: preffix<suffix><ext>. Parameters ---------- prefix : str Filename to base the new filename on. suffix : str Suffix to add to the `basename`. (defaults is '' ) ext : str Desired extension (default is nii.gz) Returns ------- fname : str New filename based on given parameters. """ if (prefix == "") or (prefix is None): prefix = "./" if suffix is None: suffix = "" if cwd is None: cwd = os.getcwd() suffix = "".join((suffix, ext)) fname = fname_presuffix(prefix, suffix=suffix, use_ext=False, newpath=cwd) return fname
def main(config_file): c = load_config(config_file,config) if c.heuristic_file and c.use_heuristic: path, fname = os.path.split(os.path.realpath(c.heuristic_file)) sys.path.append(path) mod = __import__(fname.split('.')[0]) heuristic_func = mod.infotodict print "USING HEURISTIC: ", heuristic_func else: heuristic_func=None if c.info_only: try: get_dicom_info(c) except: pass if not c.info_only: wk = convert_wkflw(c,heuristic_func) wk.config = {"execution": {"crashdump_dir": c.crash_dir, "job_finished_timeout": c.timeout}} from nipype.utils.filemanip import fname_presuffix wk.export(fname_presuffix(config_file,'','_script_').replace('.json','')) if c.save_script_only: return 0 if c.run_using_plugin: wk.run(plugin=c.plugin,plugin_args=c.plugin_args) else: wk.run() return 1
def get_mean_timeseries(infile,roi,mask): import os import nibabel as nib from nipype.utils.filemanip import fname_presuffix, split_filename import numpy as np img = nib.load(infile) data, aff = img.get_data(), img.get_affine() roi_img = nib.load(roi) roi_data, roi_affine = roi_img.get_data(), roi_img.get_affine() if len(roi_data.shape) > 3: roi_data = roi_data[:,:,:,0] mask = nib.load(mask).get_data() roi_data = (roi_data > 0).astype(int) + (mask>0).astype(int) _,roiname,_ = split_filename(roi) outfile = fname_presuffix(infile,"%s_"%roiname,'.txt',newpath=os.path.abspath('.'),use_ext=False) out_data = np.mean(data[roi_data>1,:],axis=0) print out_data.shape np.savetxt(outfile,out_data) return outfile, roiname
def main(config_file): c = load_config(config_file, create_config) from first_level import create_config as first_config first_c = load_config(c.first_level_config, first_config) fixedfxflow = create_fixedfx(c,first_c) fixedfxflow.base_dir = c.working_dir fixedfxflow.config = {"execution":{"crashdump_dir": c.crash_dir, "job_finished_timeout": c.timeout}} if c.test_mode: fixedfxflow.write_graph() from nipype.utils.filemanip import fname_presuffix try: fixedfxflow.export(fname_presuffix(config_file,'','_script_').replace('.json','')) except: print "Sorry! Workflow couldn't export TODO: fix this" if c.save_script_only: return 0 if c.run_using_plugin: fixedfxflow.run(plugin=c.plugin, plugin_args=c.plugin_args) else: fixedfxflow.run()
def read_nifti_sidecar(json_file): if not json_file.endswith(".json"): json_file = fname_presuffix(json_file, suffix='.json', use_ext=False) if not op.exists(json_file): raise Exception("No corresponding json file found") with open(json_file, "r") as f: metadata = json.load(f) pe_dir = metadata['PhaseEncodingDirection'] slice_times = metadata.get("SliceTiming") trt = metadata.get("TotalReadoutTime") if trt is None: pass return { "PhaseEncodingDirection": pe_dir, "SliceTiming": slice_times, "TotalReadoutTime": trt }
def _run_interface(self, runtime): ext = '.nii.gz' if self.inputs.compress else '.nii' self._results['out_file'] = fname_presuffix(self.inputs.in_files[0], suffix='_merged' + ext, newpath=runtime.cwd, use_ext=False) new_nii = concat_imgs(self.inputs.in_files, dtype=self.inputs.dtype) if isdefined(self.inputs.header_source): src_hdr = nb.load(self.inputs.header_source).header new_nii.header.set_xyzt_units(t=src_hdr.get_xyzt_units()[-1]) new_nii.header.set_zooms( list(new_nii.header.get_zooms()[:3]) + [src_hdr.get_zooms()[3]]) new_nii.to_filename(self._results['out_file']) return runtime
def _run_interface(self, runtime): nii = nb.load(self.inputs.in_file) phaseEncDim = {'i': 0, 'j': 1, 'k': 2}[self.inputs.pe_dir[0]] if len(self.inputs.pe_dir) == 2: phaseEncSign = 1.0 else: phaseEncSign = -1.0 # Fix header hdr = nii.header.copy() hdr.set_data_dtype(np.dtype('<f4')) hdr.set_intent('vector', (), '') # Get data, convert to mm data = nii.get_fdata() aff = np.diag([1.0, 1.0, -1.0]) if np.linalg.det(aff) < 0 and phaseEncDim != 0: # Reverse direction since ITK is LPS aff *= -1.0 aff = aff.dot(nii.affine[:3, :3]) data *= phaseEncSign * nii.header.get_zooms()[phaseEncDim] # Add missing dimensions zeros = np.zeros_like(data) field = [zeros, zeros] field.insert(phaseEncDim, data) field = np.stack(field, -1) # Add empty axis field = field[:, :, :, np.newaxis, :] # Write out self._results['out_file'] = fname_presuffix(self.inputs.in_file, suffix='_antswarp', newpath=runtime.cwd) nb.Nifti1Image(field.astype(np.dtype('<f4')), nii.affine, hdr).to_filename(self._results['out_file']) return runtime
def _run_interface(self, runtime): in_files = self.inputs.in_files if self.inputs.enhance_t2: in_files = [ _enhance_t2_contrast(f, newpath=runtime.cwd) for f in in_files ] masknii = compute_epi_mask(in_files, lower_cutoff=self.inputs.lower_cutoff, upper_cutoff=self.inputs.upper_cutoff, connected=self.inputs.connected, opening=self.inputs.opening, exclude_zeros=self.inputs.exclude_zeros, ensure_finite=self.inputs.ensure_finite, target_affine=self.inputs.target_affine, target_shape=self.inputs.target_shape) if self.inputs.closing: closed = sim.binary_closing(masknii.get_data().astype(np.uint8), sim.ball(1)).astype(np.uint8) masknii = masknii.__class__(closed, masknii.affine, masknii.header) if self.inputs.fill_holes: filled = binary_fill_holes(masknii.get_data().astype(np.uint8), sim.ball(6)).astype(np.uint8) masknii = masknii.__class__(filled, masknii.affine, masknii.header) if self.inputs.no_sanitize: in_file = self.inputs.in_files if isinstance(in_file, list): in_file = in_file[0] nii = nb.load(in_file) qform, code = nii.get_qform(coded=True) masknii.set_qform(qform, int(code)) sform, code = nii.get_sform(coded=True) masknii.set_sform(sform, int(code)) self._results['out_mask'] = fname_presuffix(self.inputs.in_files[0], suffix='_mask', newpath=runtime.cwd) masknii.to_filename(self._results['out_mask']) return runtime
def run_dura_edit(entry_finder, in_file, meteor_port): import nibabel as nib import numpy as np from scipy.ndimage import label import pandas as pd print("meteor port is", meteor_port) coll, cli = get_collection(meteor_port + 1) entry = coll.find_one(entry_finder) if entry is not None: brain_mask = join(cc["output_directory"], entry["check_masks"][1]) imgb = nib.load(brain_mask) datab, affb = imgb.get_data(), imgb.get_affine() maskimg = nib.load(in_file) mdata = maskimg.get_data() null_coords = np.nonzero(datab[mdata == 1]) print("removing", null_coords[0].shape[0], "voxels in the first pass, out of", mdata.sum()) print("now removing floating chunks") ndatab = datab.copy() ndatab[mdata == 1] = 0 labelimg, nlabels = label(ndatab) sizes = np.bincount(labelimg.ravel())[1:] ndatab[labelimg != 1] = 0 print("removing", len(sizes) - 1, "chunks") print("original brain size is", datab.sum(), "final brain size is", ndatab.sum()) print("removed", 100 - (float(ndatab.sum()) / datab.sum()) * 100, "%s") final_brain_mask = fname_presuffix(in_file, suffix="_edited") nib.Nifti1Image(ndatab, affb).to_filename(final_brain_mask) print("wrote brain mask", final_brain_mask) datab[ndatab == 1] = 0 coords = np.nonzero(datab) df = pd.DataFrame(data=np.asarray(list(coords)).T, columns=["x", "y", "z"]) coords_fname = abspath(in_file).replace(".nii.gz", ".csv") df.to_csv(coords_fname) print("wrote removed coordinates as", coords_fname) else: raise Exception("can't find a valid entry in the db")
def main(config_file): """Runs preprocessing QA workflow Parameters ---------- config_file : String Filename to .json file of configuration parameters for the workflow """ QA_config = load_config(config_file, create_config) from fmri_preprocessing import create_config as prep_config c = load_config(QA_config.preproc_config, prep_config) a = QA_workflow(QA_config, c) a.base_dir = QA_config.working_dir if c.debug: a = debug_workflow(a) if QA_config.test_mode: a.write_graph() a.inputs.inputspec.config_params = start_config_table(c, QA_config) a.config = { 'execution': { 'crashdump_dir': QA_config.crash_dir, 'job_finished_timeout': 14 } } if QA_config.use_advanced_options: exec QA_config.advanced_script from nipype.utils.filemanip import fname_presuffix a.export(fname_presuffix(config_file, '', '_script_').replace('.json', '')) if c.save_script_only: return 0 if QA_config.run_using_plugin: a.run(plugin=QA_config.plugin, plugin_args=QA_config.plugin_args) else: a.run()
def time_normalizer(in_file, tr): ''' Mean centering and variance normalizing a time series ''' import os import nitime.fmri.io as io import nibabel as nib from nipype.utils.filemanip import fname_presuffix T = io.time_series_from_file( in_file, normalize=None, TR=tr ) # previously: normalize="zscore" (percent) as Allen states that variance normalisation degrades inter-subject amplitude and shapes of the components normalized_data = T.data img = nib.load(in_file) out_img = nib.Nifti1Image(normalized_data, img.get_affine(), header=img.header) out_file = fname_presuffix(in_file, suffix='_norm', newpath=os.getcwd()) out_img.to_filename(out_file) return out_file
def main(config_file): c = load_config(config_file, create_config) workflow = normalize_workflow(c) workflow.base_dir = c.working_dir workflow.config = {'execution': {'crashdump_dir': c.crash_dir,"job_finished_timeout": c.timeout}} if c.use_advanced_options: exec c.advanced_script from nipype.utils.filemanip import fname_presuffix workflow.export(fname_presuffix(config_file,'','_script_').replace('.json','')) if c.save_script_only: return 0 if c.run_using_plugin: workflow.run(plugin=c.plugin, plugin_args=c.plugin_args) else: workflow.run()
def _add_volumes(bold_file, bold_cut_file, skip_vols): """Prepend skip_vols from bold_file onto bold_cut_file.""" import nibabel as nb import numpy as np from nipype.utils.filemanip import fname_presuffix if skip_vols == 0: return bold_cut_file bold_img = nb.load(bold_file) bold_cut_img = nb.load(bold_cut_file) bold_data = np.concatenate( (bold_img.dataobj[..., :skip_vols], bold_cut_img.dataobj), axis=3) out = fname_presuffix(bold_cut_file, suffix='_addnonsteady') bold_img.__class__(bold_data, bold_img.affine, bold_img.header).to_filename(out) return out
def test_reslice(): moving = example_data(infile='functional.nii') space_defining = example_data(infile='T1.nii') reslice = spmu.Reslice(matlab_cmd='mymatlab_version') assert_equal(reslice.inputs.matlab_cmd, 'mymatlab_version') reslice.inputs.in_file = moving reslice.inputs.space_defining = space_defining assert_equal(reslice.inputs.interp, 0) assert_raises(TraitError, reslice.inputs.trait_set, interp='nearest') assert_raises(TraitError, reslice.inputs.trait_set, interp=10) reslice.inputs.interp = 1 script = reslice._make_matlab_command(None) outfile = fname_presuffix(moving, prefix='r') assert_equal(reslice.inputs.out_file, outfile) expected = '\nflags.mean=0;\nflags.which=1;\nflags.mask=0;' assert_equal(expected in script.replace(' ', ''), True) expected_interp = 'flags.interp = 1;\n' assert_equal(expected_interp in script, True) assert_equal('spm_reslice(invols, flags);' in script, True)
def flirt_complex(mat_file, cplx_file, ref_file): import numpy as np, os, nibabel as nb import nipy.algorithms.registration.groupwise_registration as gr from nipype.utils.filemanip import fname_presuffix mat = np.loadtxt(mat_file) ref = nb.load(ref_file) cplx = nb.load(cplx_file) grid = np.squeeze(np.mgrid[[slice(0, s) for s in ref.shape] + [slice(1, 2)]]) def _x_flipper(N_i): flipr = np.diag([-1, 1, 1, 1]) flipr[0, 3] = N_i - 1 return flipr def flirt2aff(mat, in_img, ref_img): in_hdr = in_img.get_header() ref_hdr = ref_img.get_header() inspace = np.diag(in_hdr.get_zooms()[:3] + (1, )) refspace = np.diag(ref_hdr.get_zooms()[:3] + (1, )) if np.linalg.det(in_img.get_affine()) >= 0: inspace = np.dot(inspace, _x_flipper(in_hdr.get_data_shape()[0])) if np.linalg.det(ref_img.get_affine()) >= 0: refspace = np.dot(refspace, _x_flipper(ref_hdr.get_data_shape()[0])) return np.dot(np.linalg.inv(refspace), np.dot(mat, inspace)) mat = flirt2aff(mat, cplx, ref) coords = np.linalg.inv(mat).dot(grid.transpose(1, 2, 0, 3)) tmp = np.zeros(ref.shape) out_cplx = np.zeros(ref.shape + cplx.shape[3:], dtype=np.complex64) for t in range((out_cplx.shape[3:] + (1, ))[0]): splines = gr._cspline_transform(np.real(cplx.get_data()[..., t])) gr._cspline_sample3d(tmp, splines, coords[0], coords[1], coords[2]) out_cplx[..., t] = tmp splines = gr._cspline_transform(np.imag(cplx.get_data()[..., t])) gr._cspline_sample3d(tmp, splines, coords[0], coords[1], coords[2]) out_cplx[..., t] += 1j * tmp flirtname = fname_presuffix(cplx_file, suffix='_flirted', newpath=os.getcwd()) nb.save(nb.Nifti1Image(out_cplx, ref.get_affine()), flirtname) return flirtname
def _run_interface(self, runtime): self._results['out_file'] = fname_presuffix(self.inputs.in_file, suffix='_mask', newpath=runtime.cwd) nii = nb.load(self.inputs.in_file) data = nii.get_data() mask = np.zeros_like(data, dtype=np.uint8) if isdefined(self.inputs.match) and self.inputs.match: for label in self.inputs.match: mask[data == label] = 1 else: mask[data >= self.inputs.threshold] = 1 new = nii.__class__(mask, nii.affine, nii.header) new.set_data_dtype(np.uint8) new.to_filename(self._results['out_file']) return runtime
def dseg_label(in_seg, label, newpath=None): """Extract a particular label from a discrete segmentation.""" from pathlib import Path import nibabel as nb import numpy as np from nipype.utils.filemanip import fname_presuffix newpath = Path(newpath or ".") nii = nb.load(in_seg) data = np.int16(nii.dataobj) == label out_file = fname_presuffix(in_seg, suffix="_mask", newpath=str(newpath.absolute())) new = nii.__class__(data, nii.affine, nii.header) new.set_data_dtype(np.uint8) new.to_filename(out_file) return out_file
def _union(in1, in2, newpath=None): """Dilate (binary) input mask.""" from pathlib import Path import numpy as np import nibabel as nb from nipype.utils.filemanip import fname_presuffix mask = nb.load(in1) data = (np.asanyarray(mask.dataobj) + np.asanyarray(nb.load(in2).dataobj)) > 0 hdr = mask.header.copy() hdr.set_data_dtype("uint8") out_file = fname_presuffix(in1, suffix="_union", newpath=newpath or Path.cwd()) mask.__class__(data.astype("uint8"), mask.affine, hdr).to_filename(out_file) return out_file
def _run_interface(self, runtime): self._results['out_file'] = fname_presuffix(self.inputs.in_anat, suffix='_rbrainmask', newpath=runtime.cwd) anatnii = nb.load(self.inputs.in_anat) msknii = nb.Nifti1Image( grow_mask( anatnii.get_fdata(dtype='float32'), np.asanyarray(nb.load( self.inputs.in_aseg).dataobj).astype('int16'), np.asanyarray(nb.load( self.inputs.in_ants).dataobj).astype('int16')), anatnii.affine, anatnii.header) msknii.set_data_dtype(np.uint8) msknii.to_filename(self._results['out_file']) return runtime
def _gen_fname(self, basename, cwd=None, prefix=None, suffix=None): """Generate a filename based on the given parameters. The filename will take the form: cwd/basename<suffix><ext>. If change_ext is True, it will use the extentions specified in <instance>intputs.output_type. Parameters ---------- basename : str Filename to base the new filename on. cwd : str Path to prefix to the new filename. (default is os.getcwd()) suffix : str Suffix to add to the `basename`. (defaults is '' ) change_ext : bool Flag to change the filename extension to the FSL output type. (default True) Returns ------- fname : str New filename based on given parameters. """ if basename == '': msg = 'Unable to generate filename for command %s. ' % self.cmd msg += 'basename is not set!' raise ValueError(msg) if cwd is None: cwd = getcwd() if prefix is None or not isdefined(prefix): prefix = '' if suffix is None: suffix = '' fname = fname_presuffix( basename, prefix=prefix, suffix=suffix, use_ext=True, newpath=cwd) return fname
def _run_interface(self, runtime): if self.inputs.out_file is None: self._results["out_file"] = fname_presuffix( self.inputs.confounds_file, suffix="_confoundCorrelation.svg", use_ext=False, newpath=runtime.cwd, ) else: self._results["out_file"] = self.inputs.out_file confounds_correlation_plot( confounds_file=self.inputs.confounds_file, columns=self.inputs.columns if isdefined(self.inputs.columns) else None, max_dim=self.inputs.max_dim, output_file=self._results["out_file"], reference=self.inputs.reference_column, ) return runtime
def main(config_file): c = load_config(config_file, create_config) wf = create_sm(c) wf.config = {'execution' : {'crashdump_dir' : c.crash_dir, "job_finished_timeout": c.timeout}} wf.base_dir = c.working_dir if c.test_mode: wf.write_graph() from nipype.utils.filemanip import fname_presuffix wf.export(fname_presuffix(config_file,'','_script_').replace('.json','')) if c.save_script_only: return 0 if c.run_using_plugin: wf.run(plugin=c.plugin, plugin_args = c.plugin_args) else: wf.run()
def demean(in_file, in_mask, only_mask=False, newpath=None): """Demean ``in_file`` within the mask defined by ``in_mask``.""" import os import numpy as np import nibabel as nb from nipype.utils.filemanip import fname_presuffix out_file = fname_presuffix(in_file, suffix="_demeaned", newpath=os.getcwd()) nii = nb.load(in_file) msk = np.asanyarray(nb.load(in_mask).dataobj) data = nii.get_fdata() if only_mask: data[msk > 0] -= np.median(data[msk > 0]) else: data -= np.median(data[msk > 0]) nb.Nifti1Image(data, nii.affine, nii.header).to_filename(out_file) return out_file
def _select_labels(in_segm, labels): from os import getcwd import numpy as np import nibabel as nb from nipype.utils.filemanip import fname_presuffix out_files = [] cwd = getcwd() nii = nb.load(in_segm) label_data = np.asanyarray(nii.dataobj).astype('uint8') for l in labels: newnii = nii.__class__(np.uint8(label_data == l), nii.affine, nii.header) newnii.set_data_dtype('uint8') out_file = fname_presuffix(in_segm, suffix='_class-%02d' % l, newpath=cwd) newnii.to_filename(out_file) out_files.append(out_file) return out_files
def fdr(in_file, mask_file, pthresh): import os import nibabel as nib import numpy as np from nipype.utils.filemanip import fname_presuffix qstat = os.path.abspath(os.path.split(in_file)[1]) qrate = fname_presuffix( os.path.split(qstat)[0], 'qrate_', os.path.abspath('.')) p = os.popen('fdr -i %s -m %s -q %s -o %s' % (in_file, mask_file, pthresh, qrate)) qthresh = 1 - float(p.readlines()[1]) img = nib.load(in_file) data, aff = img.get_data(), img.get_affine() data = np.ones(data.shape) - data ominp = nib.Nifti1Image(data, aff) ominp.to_filename(qstat) return qstat, qthresh, qrate
def truncation( in_file, clip_max=99.9, dtype='int16', out_file=None, out_max=1000, out_min=0, percentiles=(0.1, 95), ): """Truncate and clip the input image intensities.""" from pathlib import Path import numpy as np import nibabel as nb from nipype.utils.filemanip import fname_presuffix try: info = np.iinfo(dtype) except ValueError: info = np.finfo(dtype) img = nb.load(in_file) hdr = img.header.copy() hdr.set_data_dtype(dtype) data = img.get_fdata() out_min = max(out_min, info.min) out_max = min(out_max, info.max) a_min = np.percentile(data.reshape(-1), percentiles[0]) data -= a_min a_max = np.percentile(data.reshape(-1), percentiles[1]) data *= out_max / a_max data = np.clip(data, info.min, info.max) if clip_max is not None: data = np.clip(data, 0, np.percentile(data.reshape(-1), clip_max)) if out_file is None: out_file = fname_presuffix(Path(in_file).name, suffix="_trunc") out_file = str(Path(out_file).absolute()) img.__class__(data.astype(dtype), img.affine, hdr).to_filename(out_file) return out_file
def main(config_file): c = load_config(config_file, config) workflow = create_spm_preproc(c, 'spm_preproc') if c.test_mode: workflow.write_graph() from nipype.utils.filemanip import fname_presuffix workflow.export( fname_presuffix(config_file, '', '_script_').replace('.json', '')) if c.save_script_only: return 0 if c.run_using_plugin: workflow.run(plugin=c.plugin, plugin_args=c.plugin_args) else: workflow.run() return None
def _run_interface(self, runtime): in_file = self.inputs.in_file img = nb.load(in_file) extra_dims = tuple(dim for dim in img.shape[3:] if dim > 1) or (1, ) if len(extra_dims) != 1: raise ValueError( f"Invalid shape {'x'.join(str(s) for s in img.shape)}") img = img.__class__(img.dataobj.reshape(img.shape[:3] + extra_dims), img.affine, img.header) self._results["out_files"] = [] for i, img_3d in enumerate(nb.four_to_three(img)): out_file = fname_presuffix(in_file, suffix=f"_idx-{i:03}", newpath=runtime.cwd) img_3d.to_filename(out_file) self._results["out_files"].append(out_file) return runtime
def bvec_to_rasb(bval_file, bvec_file, img_file, workdir): """Use mrinfo to convert a bvec to RAS+ world coordinate reference frame""" # Make a temporary bvec file that mrtrix likes temp_bvec = fname_presuffix(bvec_file, suffix="_fix", newpath=workdir) lps_bvec = np.loadtxt(bvec_file).reshape(3, -1) np.savetxt(temp_bvec, lps_bvec * np.array([[-1], [1], [1]])) # Run mrinfo to to get the RAS+ vector cmd = [ SS3T_ROOT + '/mrinfo', '-dwgrad', '-fslgrad', temp_bvec, bval_file, img_file ] proc = Popen(cmd, stdout=PIPE, stderr=PIPE) out, err = proc.communicate() LOGGER.info(' '.join(cmd)) if err: raise Exception(str(err)) return np.fromstring(out, dtype=float, sep=' ')[:3]
def apply_lut(in_dseg, lut, newpath=None): """Map the input discrete segmentation to a new label set (lookup table, LUT).""" import numpy as np import nibabel as nb from nipype.utils.filemanip import fname_presuffix if newpath is None: from os import getcwd newpath = getcwd() out_file = fname_presuffix(in_dseg, suffix='_dseg', newpath=newpath) lut = np.array(lut, dtype='int16') segm = nb.load(in_dseg) hdr = segm.header.copy() hdr.set_data_dtype('int16') segm.__class__(lut[np.asanyarray(segm.dataobj, dtype=int)].astype('int16'), segm.affine, hdr).to_filename(out_file) return out_file
def restrict_to_gray(rois, mask, threshold=.5, min_nvox=12): import os, nibabel as nb, numpy as np from nipype.utils.filemanip import fname_presuffix if not isinstance(rois,list): rois = [rois] roi_niis = [nb.load(r) for r in rois] mask = nb.load(mask).get_data() > threshold rois_data = [r.get_data() for r in roi_niis] for r in rois_data: r[np.isnan(r)] = 0 new_rois = [r*mask for r in rois_data] nfnames=[] for od,nd,nii,fname in zip(rois_data,new_rois,roi_niis,rois): for rid in np.unique(od)[1:]: if np.count_nonzero(nd==rid) < min_nvox: nd[od==rid] = rid nfname = fname_presuffix(fname,newpath=os.getcwd(),suffix='_gmonly') nb.save(nb.Nifti1Image(nd,nii.get_affine(),nii.get_header()),nfname) nfnames.append(nfname) return nfnames
def _run_interface(self, runtime): if isdefined(self.inputs.output_file): out_file = self.inputs.output_file else: out_file = fname_presuffix( self.inputs.confounds_file, suffix="_expansion.tsv", newpath=runtime.cwd, use_ext=False, ) confounds_data = pd.read_csv(self.inputs.confounds_file, sep="\t") _, confounds_data = parse_formula( model_formula=self.inputs.model_formula, parent_data=confounds_data, unscramble=True, ) confounds_data.to_csv(out_file, sep="\t", index=False, na_rep="n/a") self._results["confounds_file"] = out_file return runtime
def ply2gii(in_file, metadata, out_file=None): """Convert from ply to GIfTI""" from pathlib import Path from numpy import eye from nibabel.gifti import ( GiftiMetaData, GiftiCoordSystem, GiftiImage, GiftiDataArray, ) from pyntcloud import PyntCloud in_file = Path(in_file) surf = PyntCloud.from_file(str(in_file)) # Update centroid metadata metadata.update( zip(('SurfaceCenterX', 'SurfaceCenterY', 'SurfaceCenterZ'), ['%.4f' % c for c in surf.centroid])) # Prepare data arrays da = (GiftiDataArray(data=surf.xyz.astype('float32'), datatype='NIFTI_TYPE_FLOAT32', intent='NIFTI_INTENT_POINTSET', meta=GiftiMetaData.from_dict(metadata), coordsys=GiftiCoordSystem(xform=eye(4), xformspace=3)), GiftiDataArray(data=surf.mesh.values, datatype='NIFTI_TYPE_INT32', intent='NIFTI_INTENT_TRIANGLE', coordsys=None)) surfgii = GiftiImage(darrays=da) if out_file is None: out_file = fname_presuffix(in_file.name, suffix='.gii', use_ext=False, newpath=str(Path.cwd())) surfgii.to_filename(str(out_file)) return out_file
def _run_interface(self, runtime): """Load the atlas and make the atlas name for the output file""" if self.inputs.atlas_name == 'shen': atlas = load_shen_268(resolution=self.inputs.resolution) atlas_name = self.inputs.atlas_name elif self.inputs.atlas_name == 'craddock': atlas = load_craddock_2011( number_of_clusters=self.inputs.number_of_clusters, algorithm=self.inputs.algorithm, similarity_measure=self.inputs.similarity_measure) atlas_name = "%s_%d" % (self.inputs.atlas_name, self.inputs.number_of_clusters) else: raise RuntimeError("Atlas name %s not recognized" % self.inputs.atlas_name) if type(self.inputs.nifti) == list: self.inputs.nifti = self.inputs.nifti[0] source_img = nibabel.load(self.inputs.nifti) source_dimensions = len(source_img.shape) # 4D or 3D masker = nilearn.input_data.NiftiLabelsMasker(atlas) roi_data = masker.fit_transform(source_img) suffix = "_%s.csv" % atlas_name if source_dimensions == 4: suffix = suffix.replace( '.csv', '_ts.csv') # 4D images get the ts suffix for time-series out_file = fname_presuffix(self.inputs.nifti, suffix=suffix, use_ext=False).replace( Path(self.inputs.bids_dir).stem, __name__.split('.')[0]) os.makedirs(Path(out_file).parent, exist_ok=True) numpy.savetxt(out_file, roi_data, delimiter=',') self._results['transformed'] = out_file return runtime