def run_rand(cope_file, design_file, contrast_file, group_file, mask_file, cluster_threshold=2.3, n=10000): import os from glob import glob from nipype.interfaces.base import CommandLine cmd = ( "randomise -i {cope_file} -m {mask_file} -d {design_file} -t {contrast_file} -e {group_file} -T " "-c {cluster_threshold} -x -n {n}") cl = CommandLine( cmd.format(cope_file=cope_file, mask_file=mask_file, design_file=design_file, contrast_file=contrast_file, group_file=group_file, cluster_threshold=cluster_threshold, n=n)) results = cl.run(terminal_output='file') return [os.path.join(os.getcwd(), val) for val in sorted(glob('rand*'))]
def area(command, surface_file): """ Measure area of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : string Voronoi-based surface area C++ executable command surface_file : string vtk file with surface mesh Returns ------- area_file: string vtk file with surface area per vertex of mesh """ import os from nipype.interfaces.base import CommandLine area_file = os.path.join(os.getcwd(), os.path.splitext(os.path.basename(surface_file))[0] + '.area.vtk') cli = CommandLine(command = command) cli.inputs.args = ' '.join([surface_file, area_file]) cli.cmdline cli.run() if not os.path.exists(area_file): raise(IOError(area_file + " not found")) return area_file
def elastix(input_file, target_file, mask_file, output_prefix, output_sub_prefix, param_rigid, param_BSpline): from os.path import abspath as opap from nipype.interfaces.base import CommandLine from nipype.utils.filemanip import split_filename import shutil import glob import os out_dir = experiment_dir + os.sep + output_prefix + os.sep + output_sub_prefix # Create output directory if it does not exist if os.path.exists(out_dir): print "rmtree: " + out_dir shutil.rmtree(out_dir) print "creating: " + out_dir os.makedirs(out_dir) cmd = CommandLine( ( "/Users/eija/Documents/SW/Elastix/elastix_sources_v4.7/bin/bin/elastix -f %s -m %s -out %s -p %s -p %s -threads 8" ) % (target_file, input_file, out_dir, param_rigid, param_BSpline) ) print "elastix: " + cmd.cmd cmd.run() resultfiles = glob.glob(out_dir + os.sep + "result.*.tiff") return resultfiles
def DICOM2animatedGIF_sidebyside(dcmdir_l, dcmdir_r, outputpath, slice_i, suffix): dcmio = DicomIO.DicomIO() dwidcm_l = dcmio.ReadDICOM_frames(dcmdir_l) dwidcm_r = dcmio.ReadDICOM_frames(dcmdir_r) # Write all frames of slice into set of png files for frame_i in range(len(dwidcm_l)): slice_l = dwidcm_l[frame_i][slice_i].pixel_array.T slice_r = dwidcm_r[frame_i][slice_i].pixel_array.T dimx = slice_l.shape[0] dimy = slice_l.shape[1] newImg1 = PIL.Image.new('L', (dimx*2, dimy)) pixels1 = newImg1.load() for i in range (0, dimx): for j in range (0, dimy): pixels1[i, j] = float(slice_l[i, j]) pixels1[dimx+i, j] = float(slice_r[i, j]) #pixels1[i, j] = float(slice[i, j]) * dwidcm[frame_i][slice_i].RescaleSlope + dwidcm[frame_i][slice_i].RescaleIntercept newImg1.save((outputpath + '_' + ('%02d' % frame_i) + '.png'),'PNG') cmd = CommandLine('convert -delay 25 -loop 0 %s_*.png %s_%s.gif' % (outputpath, outputpath, suffix)) cmd.run() for frame_i in range(len(dwidcm_l)): os.remove((outputpath + '_' + ('%02d' % frame_i) + '.png')) print "convert (ImageMagick):" + cmd.cmd return (outputpath + '.gif')
def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('qsub', environ=dict(os.environ), terminal_output='allatonce') path = os.path.dirname(scriptfile) qsubargs = '' if self._qsub_args: qsubargs = self._qsub_args if 'qsub_args' in node.plugin_args: if 'overwrite' in node.plugin_args and \ node.plugin_args['overwrite']: qsubargs = node.plugin_args['qsub_args'] else: qsubargs += (" " + node.plugin_args['qsub_args']) if '-o' not in qsubargs: qsubargs = '%s -o %s' % (qsubargs, path) if '-e' not in qsubargs: qsubargs = '%s -e %s' % (qsubargs, path) if node._hierarchy: jobname = '.'.join((dict(os.environ)['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((dict(os.environ)['LOGNAME'], node._id)) jobnameitems = jobname.split('.') jobnameitems.reverse() jobname = '.'.join(jobnameitems) jobname = qsub_sanitize_job_name(jobname) cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 result = list() while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 time.sleep( self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join((('Could not submit sge task' ' for node %s') % node._id, str(e)))) else: break iflogger.setLevel(oldlevel) # retrieve sge taskid lines = [line for line in result.runtime.stdout.split('\n') if line] taskid = int(re.match("Your job ([0-9]*) .* has been submitted", lines[-1]).groups()[0]) self._pending[taskid] = node.output_dir() self._refQstatSubstitute.add_startup_job(taskid, cmd.cmdline) logger.debug('submitted sge task: %d for node %s with %s' % (taskid, node._id, cmd.cmdline)) return taskid
def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('qsub', environ=os.environ.data) qsubargs = '' if self._qsub_args: qsubargs = self._qsub_args if node._hierarchy: jobname = '.'.join((os.environ.data['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((os.environ.data['LOGNAME'], node._id)) cmd.inputs.args = '%s -N %s %s'%(qsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 while True: try: result = cmd.run() except Exception, e: if tries<self._max_tries: tries += 1 sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join((('Could not submit pbs task' ' for node %s') % node._id, str(e)))) else: break
def dicom2nrrd(dicomdir, out_prefix, out_suffix): import os from nipype.interfaces.base import CommandLine cmd = CommandLine('DWIConvert --inputDicomDirectory %s --outputVolume %s/%s/%s%s.nrrd' % (dicomdir, experiment_dir,out_prefix,out_prefix,out_suffix)) print "DICOM->NRRD:" + cmd.cmd cmd.run() return os.path.abspath('%s/%s/%s%s.nrrd' % (experiment_dir,out_prefix,out_prefix,out_suffix))
def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('bsub', environ=os.environ.data, terminal_output='allatonce') path = os.path.dirname(scriptfile) bsubargs = '' if self._bsub_args: bsubargs = self._bsub_args if 'bsub_args' in node.plugin_args: if 'overwrite' in node.plugin_args and\ node.plugin_args['overwrite']: bsubargs = node.plugin_args['bsub_args'] else: bsubargs += (" " + node.plugin_args['bsub_args']) if '-o' not in bsubargs: # -o outfile bsubargs = '%s -o %s' % (bsubargs, scriptfile + ".log") if '-e' not in bsubargs: bsubargs = '%s -e %s' % (bsubargs, scriptfile + ".log") # -e error file if node._hierarchy: jobname = '.'.join((os.environ.data['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((os.environ.data['LOGNAME'], node._id)) jobnameitems = jobname.split('.') jobnameitems.reverse() jobname = '.'.join(jobnameitems) cmd.inputs.args = '%s -J %s sh %s' % (bsubargs, jobname, scriptfile) # -J job_name_spec logger.debug('bsub ' + cmd.inputs.args) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 sleep( self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join((('Could not submit lsf task' ' for node %s') % node._id, str(e)))) else: break iflogger.setLevel(oldlevel) # retrieve lsf taskid match = re.search('<(\d*)>', result.runtime.stdout) if match: taskid = int(match.groups()[0]) else: raise ScriptError("Can't parse submission job output id: %s" % result.runtime.stdout) self._pending[taskid] = node.output_dir() logger.debug('submitted lsf task: %d for node %s' % (taskid, node._id)) return taskid
def geodesic_depth(command, surface_file): """ Measure "travel depth" of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : travel depth C++ executable command surface_file : ``vtk file`` Returns ------- depth_file: string vtk file with geodesic depth per vertex of mesh """ import os from nipype.interfaces.base import CommandLine depth_file = os.path.join(os.getcwd(), os.path.splitext(os.path.basename(surface_file))[0] + '.geodesic_depth.vtk') cli = CommandLine(command = command) cli.inputs.args = ' '.join([surface_file, depth_file]) cli.cmdline cli.run() if not os.path.exists(depth_file): raise(IOError(depth_file + " not found")) return depth_file
def area(command, surface_file): """ Measure area of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : string Voronoi-based surface area C++ executable command surface_file : string vtk file with surface mesh Returns ------- area_file: string vtk file with surface area per vertex of mesh """ import os from nipype.interfaces.base import CommandLine area_file = os.path.join( os.getcwd(), os.path.splitext(os.path.basename(surface_file))[0] + '.area.vtk') cli = CommandLine(command=command) cli.inputs.args = ' '.join([surface_file, area_file]) cli.cmdline cli.run() if not os.path.exists(area_file): raise (IOError(area_file + " not found")) return area_file
def geodesic_depth(command, surface_file): """ Measure "travel depth" of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : travel depth C++ executable command surface_file : ``vtk file`` Returns ------- depth_file: string vtk file with geodesic depth per vertex of mesh """ import os from nipype.interfaces.base import CommandLine depth_file = os.path.join( os.getcwd(), os.path.splitext(os.path.basename(surface_file))[0] + '.geodesic_depth.vtk') cli = CommandLine(command=command) cli.inputs.args = ' '.join([surface_file, depth_file]) cli.cmdline cli.run() if not os.path.exists(depth_file): raise (IOError(depth_file + " not found")) return depth_file
def _grab_xml(self, module): cmd = CommandLine(command="Slicer3", args="--launch %s --xml" % module) ret = cmd.run() if ret.runtime.returncode == 0: return xml.dom.minidom.parseString(ret.runtime.stdout) else: raise Exception(cmd.cmdline + " failed:\n%s" % ret.runtime.stderr)
def _grab_xml(self, module): cmd = CommandLine(command = "Slicer3", args="--launch %s --xml"%module) ret = cmd.run() if ret.runtime.returncode == 0: return xml.dom.minidom.parseString(ret.runtime.stdout) else: raise Exception(cmd.cmdline + " failed:\n%s"%ret.runtime.stderr)
def extract_stats_fsl(data, mask, gmmask, threshold=0.3): """ uses fsl tools to extract data values in mask, masks 'mask' with gmmask thresholded at 'threshold' (default 0.3) returns mean, std, nvoxels NOTE: generates some tmp files in tempdir, but also removes them""" tmpdir = tempfile.mkdtemp() startdir = os.getcwd() os.chdir(tmpdir) # first mask mask with thresholded gmmask pth, nme = os.path.split(mask) outfile = fname_presuffix(mask, prefix='gmask_', newpath=tmpdir) c1 = CommandLine('fslmaths %s -thr %2.2f -nan -mul %s %s' % (gmmask, threshold, mask, outfile)).run() if not c1.runtime.returncode == 0: print 'gm masking of mask failed for %s' % (mask) print 'tmp dir', tmpdir print c1.runtime.stderr return None #first mask data cmd = 'fslmaths %s -nan -mas %s masked_data' % (data, outfile) mask_out = CommandLine(cmd).run() if not mask_out.runtime.returncode == 0: print 'masking failed for %s' % (data) return None, None, None masked = find_single_file('masked*') # get stats mean_out = CommandLine('fslstats %s -M' % (masked)).run() mean = mean_out.runtime.stdout.strip('\n').strip() std_out = CommandLine('fslstats %s -S' % (masked)).run() std = std_out.runtime.stdout.strip('\n').strip() vox_out = CommandLine('fslstats %s -V' % (masked)).run() vox = vox_out.runtime.stdout.split()[0] os.chdir(startdir) rmtree(tmpdir) return mean, std, vox
def dicom2mhd(dicomdir, experiment_dir, out_prefix): from nipype.utils.filemanip import split_filename from nipype.interfaces.base import CommandLine import os _, name, _ = split_filename(dicomdir) outfile_mhd = experiment_dir + '/' + out_prefix + '/' + name + '_tmp/' + 'output' + '.mhd' outfile_raw = experiment_dir + '/' + out_prefix + '/' + name + '_tmp/' + 'output' + '.raw' outfile_txt = experiment_dir + '/' + out_prefix + '/' + name + '_tmp/' + 'output' + '_info.txt' outdir = experiment_dir + '/' + out_prefix + '/' + name + '_tmp' cmd = CommandLine(( mcverter_basedir + 'mcverter %s -r -f meta -o %s -F-PatientName-SeriesDate-SeriesDescription-StudyId-SeriesNumber' % (dicomdir, outdir))) print "DICOM->NII:" + cmd.cmd cmd.run() # Move to results folder outfile_mhd = move_to_results(outfile_mhd, experiment_dir, out_prefix) outfile_raw = move_to_results(outfile_raw, experiment_dir, out_prefix) outfile_txt = move_to_results(outfile_txt, experiment_dir, out_prefix) os.rmdir(outdir) # Rename basename, and reference in mhd header outfile_mhd = rename_basename_to(outfile_mhd, name) outfile_raw = rename_basename_to(outfile_raw, name) outfile_txt = rename_basename_to(outfile_txt, name) replace_inplace(outfile_mhd, ('output.raw'), (name + '.raw')) return outfile_mhd, outfile_raw, outfile_txt
def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('qsub', environ=os.environ.data, terminal_output='allatonce') path = os.path.dirname(scriptfile) qsubargs = '' if self._qsub_args: qsubargs = self._qsub_args if 'qsub_args' in node.plugin_args: if 'overwrite' in node.plugin_args and \ node.plugin_args['overwrite']: qsubargs = node.plugin_args['qsub_args'] else: qsubargs += (" " + node.plugin_args['qsub_args']) if '-o' not in qsubargs: qsubargs = '%s -o %s' % (qsubargs, path) if '-e' not in qsubargs: qsubargs = '%s -e %s' % (qsubargs, path) if node._hierarchy: jobname = '.'.join((os.environ.data['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((os.environ.data['LOGNAME'], node._id)) jobnameitems = jobname.split('.') jobnameitems.reverse() jobname = '.'.join(jobnameitems) jobname = qsub_sanitize_job_name(jobname) cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 result = list() while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 time.sleep( self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join((('Could not submit sge task' ' for node %s') % node._id, str(e)))) else: break iflogger.setLevel(oldlevel) # retrieve sge taskid lines = [line for line in result.runtime.stdout.split('\n') if line] taskid = int(re.match("Your job ([0-9]*) .* has been submitted", lines[-1]).groups()[0]) self._pending[taskid] = node.output_dir() self._refQstatSubstitute.add_startup_job(taskid, cmd.cmdline) logger.debug('submitted sge task: %d for node %s with %s' % (taskid, node._id, cmd.cmdline)) return taskid
def run_palm(cope_file, design_file, contrast_file, group_file, mask_file, cluster_threshold=3.09): import os from glob import glob from nipype.interfaces.base import CommandLine #cmd = ("palm -i {cope_file} -m {mask_file} -d {design_file} -t {contrast_file} -eb {group_file} -T " # "-C {cluster_threshold} -Cstat extent -fdr -noniiclass -twotail -logp -zstat") #cl = CommandLine(cmd.format(cope_file=cope_file, mask_file=mask_file, design_file=design_file, # contrast_file=contrast_file, # group_file=group_file, cluster_threshold=cluster_threshold)) # XXX: ideally we should make it more fancy, but since we're only doing # 1-sample t-tests we need to omit the design, contrast, and group files # as for PALM's FAQs cmd = ( "palm -i {cope_file} -m {mask_file} -T " "-C {cluster_threshold} -Cstat extent -fdr -noniiclass -twotail -logp -zstat" ) cl = CommandLine( cmd.format(cope_file=cope_file, mask_file=mask_file, cluster_threshold=cluster_threshold)) results = cl.run(terminal_output='file') return [os.path.join(os.getcwd(), val) for val in sorted(glob('palm*'))]
def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('bsub', environ=os.environ.data, terminal_output='allatonce') path = os.path.dirname(scriptfile) bsubargs = '' if self._bsub_args: bsubargs = self._bsub_args if 'bsub_args' in node.plugin_args: if 'overwrite' in node.plugin_args and\ node.plugin_args['overwrite']: bsubargs = node.plugin_args['bsub_args'] else: bsubargs += (" " + node.plugin_args['bsub_args']) if '-o' not in bsubargs: # -o outfile bsubargs = '%s -o %s' % (bsubargs, scriptfile + ".log") if '-e' not in bsubargs: bsubargs = '%s -e %s' % (bsubargs, scriptfile + ".log" ) # -e error file if node._hierarchy: jobname = '.'.join( (os.environ.data['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((os.environ.data['LOGNAME'], node._id)) jobnameitems = jobname.split('.') jobnameitems.reverse() jobname = '.'.join(jobnameitems) cmd.inputs.args = '%s -J %s sh %s' % (bsubargs, jobname, scriptfile ) # -J job_name_spec logger.debug('bsub ' + cmd.inputs.args) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 sleep( self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join( (('Could not submit lsf task' ' for node %s') % node._id, str(e)))) else: break iflogger.setLevel(oldlevel) # retrieve lsf taskid match = re.search('<(\d*)>', result.runtime.stdout) if match: taskid = int(match.groups()[0]) else: raise ScriptError("Can't parse submission job output id: %s" % result.runtime.stdout) self._pending[taskid] = node.output_dir() logger.debug('submitted lsf task: %d for node %s' % (taskid, node._id)) return taskid
def export_graph(graph_in, base_dir=None, show=False, use_execgraph=False, show_connectinfo=False, dotfilename='graph.dot', format='png', simple_form=True): """ Displays the graph layout of the pipeline This function requires that pygraphviz and matplotlib are available on the system. Parameters ---------- show : boolean Indicate whether to generate pygraphviz output fromn networkx. default [False] use_execgraph : boolean Indicates whether to use the specification graph or the execution graph. default [False] show_connectioninfo : boolean Indicates whether to show the edge data on the graph. This makes the graph rather cluttered. default [False] """ graph = deepcopy(graph_in) if use_execgraph: graph = generate_expanded_graph(graph) logger.debug('using execgraph') else: logger.debug('using input graph') if base_dir is None: base_dir = os.getcwd() if not os.path.exists(base_dir): os.makedirs(base_dir) outfname = fname_presuffix(dotfilename, suffix='_detailed.dot', use_ext=False, newpath=base_dir) logger.info('Creating detailed dot file: %s' % outfname) _write_detailed_dot(graph, outfname) cmd = 'dot -T%s -O %s' % (format, outfname) res = CommandLine(cmd).run() if res.runtime.returncode: logger.warn('dot2png: %s', res.runtime.stderr) pklgraph = _create_dot_graph(graph, show_connectinfo, simple_form) outfname = fname_presuffix(dotfilename, suffix='.dot', use_ext=False, newpath=base_dir) nx.write_dot(pklgraph, outfname) logger.info('Creating dot file: %s' % outfname) cmd = 'dot -T%s -O %s' % (format, outfname) res = CommandLine(cmd).run() if res.runtime.returncode: logger.warn('dot2png: %s', res.runtime.stderr) if show: pos = nx.graphviz_layout(pklgraph, prog='dot') nx.draw(pklgraph, pos) if show_connectinfo: nx.draw_networkx_edge_labels(pklgraph, pos)
def area(command, surface_file, verbose=False): """ Measure area of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : string Voronoi-based surface area C++ executable command surface_file : string vtk file with surface mesh verbose : bool print statements? Returns ------- area_file: string vtk file with surface area per vertex of mesh Examples -------- >>> import os >>> import numpy as np >>> from mindboggle.shapes.surface_shapes import area >>> from mindboggle.mio.vtks import read_scalars >>> from mindboggle.mio.fetch_data import prep_tests >>> urls, fetch_data = prep_tests() >>> surface_file = fetch_data(urls['left_pial'], '', '.vtk') >>> verbose = False >>> ccode_path = os.environ['vtk_cpp_tools'] >>> command = os.path.join(ccode_path, 'area', 'PointAreaMain') >>> area_file = area(command, surface_file, verbose) >>> >>> scalars, name = read_scalars(area_file) >>> np.allclose(scalars[0:8], ... [0.48270401731, 0.39661528543, 0.57813454792, 0.70574099571, ... 0.84318527207, 0.57642554119, 0.66942016035, 0.70629953593]) True """ import os from nipype.interfaces.base import CommandLine basename = os.path.splitext(os.path.basename(surface_file))[0] area_file = os.path.join(os.getcwd(), basename + '.area.vtk') args = ' '.join([surface_file, area_file]) if verbose: print("{0} {1}".format(command, args)) cli = CommandLine(command=command) cli.inputs.args = args cli.terminal_output = 'file' cli.run() if not os.path.exists(area_file): raise IOError(area_file + " not found") return area_file
def _is_pending(self, taskid): cmd = CommandLine('qstat') cmd.inputs.args = '-j %d'%taskid # check sge task result = cmd.run(ignore_exception=True) if result.runtime.stdout.startswith('='): return True return False
def dicom2nrrd(dicomdir, out_prefix): import os from nipype.interfaces.base import CommandLine cmd = CommandLine( 'DWIConvert --inputDicomDirectory %s --outputVolume %s.nrrd' % (dicomdir, out_prefix)) cmd.run() return os.path.abspath('%s.nrrd' % out_prefix)
def _is_pending(self, taskid): cmd = CommandLine("qstat") cmd.inputs.args = "%s" % taskid # check pbs task result = cmd.run(ignore_exception=True) if "Unknown Job Id" in result.runtime.stderr: return False return True
def area(command, surface_file, verbose=False): """ Measure area of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : string Voronoi-based surface area C++ executable command surface_file : string vtk file with surface mesh verbose : bool print statements? Returns ------- area_file: string vtk file with surface area per vertex of mesh Examples -------- >>> import os >>> import numpy as np >>> from mindboggle.shapes.surface_shapes import area >>> from mindboggle.mio.vtks import read_scalars >>> from mindboggle.mio.fetch_data import prep_tests >>> urls, fetch_data = prep_tests() >>> surface_file = fetch_data(urls['left_pial'], '', '.vtk') >>> verbose = False >>> ccode_path = os.environ['vtk_cpp_tools'] >>> command = os.path.join(ccode_path, 'area', 'PointAreaMain') >>> area_file = area(command, surface_file, verbose) >>> scalars, name = read_scalars(area_file) >>> np.allclose(scalars[0:8], ... [0.48270401731, 0.39661528543, 0.57813454792, 0.70574099571, ... 0.84318527207, 0.57642554119, 0.66942016035, 0.70629953593]) True """ import os from nipype.interfaces.base import CommandLine basename = os.path.splitext(os.path.basename(surface_file))[0] area_file = os.path.join(os.getcwd(), basename + '.area.vtk') args = ' '.join([surface_file, area_file]) if verbose: print("{0} {1}".format(command, args)) cli = CommandLine(command=command) cli.inputs.args = args cli.terminal_output = 'file' cli.run() if not os.path.exists(area_file): raise IOError(area_file + " not found") return area_file
def _submit_batchtask(self, scriptfile, node): """ This is more or less the _submit_batchtask from sge.py with flipped variable names, different command line switches, and different output formatting/processing """ cmd = CommandLine('sbatch', environ=os.environ.data, terminal_output='allatonce') path = os.path.dirname(scriptfile) sbatch_args = '' if self._sbatch_args: sbatch_args = self._sbatch_args if 'sbatch_args' in node.plugin_args: if 'overwrite' in node.plugin_args and\ node.plugin_args['overwrite']: sbatch_args = node.plugin_args['sbatch_args'] else: sbatch_args += (" " + node.plugin_args['sbatch_args']) if '-o' not in sbatch_args: sbatch_args = '%s -o %s' % (sbatch_args, os.path.join(path, 'slurm-%j.out')) if '-e' not in sbatch_args: sbatch_args = '%s -e %s' % (sbatch_args, os.path.join(path, 'slurm-%j.out')) if '-p' not in sbatch_args: sbatch_args = '%s -p normal' % (sbatch_args) if '-n' not in sbatch_args: sbatch_args = '%s -n 16' % (sbatch_args) if '-t' not in sbatch_args: sbatch_args = '%s -t 1:00:00' % (sbatch_args) if node._hierarchy: jobname = '.'.join((os.environ.data['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((os.environ.data['LOGNAME'], node._id)) jobnameitems = jobname.split('.') jobnameitems.reverse() jobname = '.'.join(jobnameitems) cmd.inputs.args = '%s -J %s %s' % (sbatch_args, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 while True: try: result = cmd.run() except Exception, e: if tries < self._max_tries: tries += 1 sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join((('Could not submit sbatch task' ' for node %s') % node._id, str(e)))) else: break
def convert_audio_file(old_file, new_file, command='ffmpeg', input_args='-i', output_args='-ac 2'): """ Convert audio file to new format. Parameters ---------- old_file : string full path to the input file new_file : string full path to the output file command : string executable command without arguments input_args : string arguments preceding input file name in command output_args : string arguments preceding output file name in command Returns ------- new_file : string full path to the output file Examples -------- >>> from mhealthx.xio import convert_audio_file >>> old_file = '/Users/arno/mhealthx_cache/mhealthx/feature_files/test.m4a' >>> new_file = 'test.wav' >>> command = 'ffmpeg' >>> input_args = '-y -i' >>> output_args = '-ac 2' >>> new_file = convert_audio_file(old_file, new_file, command, input_args, output_args) """ import os from nipype.interfaces.base import CommandLine if not os.path.isfile(old_file): raise IOError("{0} does not exist.".format(old_file)) new_file = None else: input_args = ' '.join([input_args, old_file, output_args, new_file]) try: # Nipype command line wrapper: cli = CommandLine(command=command) cli.inputs.args = input_args cli.cmdline cli.run() except: import traceback traceback.print_exc() print("'{0} {1}' unsuccessful".format(command, input_args)) new_file = None return new_file
def _submit_batchtask(self, scriptfile, node): """ This is more or less the _submit_batchtask from sge.py with flipped variable names, different command line switches, and different output formatting/processing """ cmd = CommandLine('sbatch', environ=os.environ.data, terminal_output='allatonce') path = os.path.dirname(scriptfile) sbatch_args = '' if self._sbatch_args: sbatch_args = self._sbatch_args if 'sbatch_args' in node.plugin_args: if 'overwrite' in node.plugin_args and\ node.plugin_args['overwrite']: sbatch_args = node.plugin_args['sbatch_args'] else: sbatch_args += (" " + node.plugin_args['sbatch_args']) if '-o' not in sbatch_args: sbatch_args = '%s -o %s' % (sbatch_args, os.path.join(path, 'slurm-%j.out')) if '-e' not in sbatch_args: sbatch_args = '%s -e %s' % (sbatch_args, os.path.join(path, 'slurm-%j.out')) if '-p' not in sbatch_args: sbatch_args = '%s -p normal' % (sbatch_args) if '-n' not in sbatch_args: sbatch_args = '%s -n 16' % (sbatch_args) if '-t' not in sbatch_args: sbatch_args = '%s -t 1:00:00' % (sbatch_args) if node._hierarchy: jobname = '.'.join( (os.environ.data['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((os.environ.data['LOGNAME'], node._id)) jobnameitems = jobname.split('.') jobnameitems.reverse() jobname = '.'.join(jobnameitems) cmd.inputs.args = '%s -J %s %s' % (sbatch_args, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 while True: try: result = cmd.run() except Exception, e: if tries < self._max_tries: tries += 1 sleep( self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join( (('Could not submit sbatch task' ' for node %s') % node._id, str(e)))) else: break
def travel_depth(command, surface_file, verbose=False): """ Measure "travel depth" of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : string travel depth C++ executable command surface_file : string vtk file verbose : bool print statements? Returns ------- depth_file: string vtk file with travel depth per vertex of mesh Examples -------- >>> import os >>> import numpy as np >>> from mindboggle.shapes.surface_shapes import travel_depth >>> from mindboggle.mio.vtks import read_scalars >>> from mindboggle.mio.fetch_data import prep_tests >>> urls, fetch_data = prep_tests() >>> surface_file = fetch_data(urls['left_pial'], '', '.vtk') >>> verbose = False >>> ccode_path = os.environ['vtk_cpp_tools'] >>> command = os.path.join(ccode_path, 'travel_depth', 'TravelDepthMain') >>> depth_file = travel_depth(command, surface_file, verbose) >>> scalars, name = read_scalars(depth_file) >>> print(np.array_str(np.array(scalars[0:8]), precision=5, ... suppress_small=True)) [ 0.02026 0.06009 0.12859 0.04564 0.00774 0.05284 0.05354 0.01316] """ import os from nipype.interfaces.base import CommandLine basename = os.path.splitext(os.path.basename(surface_file))[0] depth_file = os.path.join(os.getcwd(), basename + '.travel_depth.vtk') args = ' '.join([surface_file, depth_file]) if verbose: print("{0} {1}".format(command, args)) cli = CommandLine(command=command) cli.inputs.args = args cli.cmdline cli.run() if not os.path.exists(depth_file): raise IOError(depth_file + " not found") return depth_file
def tar_cmd(infile): """ given a ipped tar archive, untars""" cwd = os.getcwd() pth, nme = os.path.split(infile) os.chdir(pth) cl = CommandLine('tar xfvz %s'%(infile)) cout = cl.run() os.chdir(cwd) return pth
def unzip(infile): gunzipfile, gz = os.path.splitext(infile) if not 'gz' in gz: #when running gunzip on file when return infile else: c3 = CommandLine('gunzip %s'%(infile)) c3.run() return gunzipfile
def gznii2nii(in_file): import os import shutil fileName, fileExtension = os.path.splitext(in_file) cmd = CommandLine('gunzip -f -k %s.nii.gz' % (fileName)) print "gunzip NII.GZ:" + cmd.cmd cmd.run() return os.path.abspath('%s.nii' % (fileName))
def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('condor_qsub', environ=os.environ.data, terminal_output='allatonce') path = os.path.dirname(scriptfile) qsubargs = '' if self._qsub_args: qsubargs = self._qsub_args if 'qsub_args' in node.plugin_args: if 'overwrite' in node.plugin_args and\ node.plugin_args['overwrite']: qsubargs = node.plugin_args['qsub_args'] else: qsubargs += (" " + node.plugin_args['qsub_args']) if self._qsub_args: qsubargs = self._qsub_args if '-o' not in qsubargs: qsubargs = '%s -o %s' % (qsubargs, path) if '-e' not in qsubargs: qsubargs = '%s -e %s' % (qsubargs, path) if node._hierarchy: jobname = '.'.join((os.environ.data['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((os.environ.data['LOGNAME'], node._id)) jobnameitems = jobname.split('.') jobnameitems.reverse() jobname = '.'.join(jobnameitems) cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 sleep(self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join((('Could not submit condor ' 'cluster' ' for node %s') % node._id, str(e)))) else: break iflogger.setLevel(oldlevel) # retrieve condor clusterid taskid = int(result.runtime.stdout.split(' ')[2]) self._pending[taskid] = node.output_dir() logger.debug('submitted condor cluster: %d for node %s' % (taskid, node._id)) return taskid
def fs_extract_label_rois(subdir, pet, dat, labels): """ Uses freesurfer tools to extract Parameters ----------- subdir : subjects freesurfer directory pet : filename of subjects PET volume coreg'd to mri space dat : filename of dat generated by tkregister mapping pet to mri labels : filename of subjects aparc+aseg.mgz Returns ------- stats_file: file that contains roi stats label_file : file of volume with label rois in pet space you can check dat with ... 'tkmedit %s T1.mgz -overlay %s -overlay-reg %s -fthresh 0.5 -fmid1'%(subject, pet, dat) """ pth, nme, ext = split_filename(pet) pth_lbl, nme_lbl, ext_lbl = split_filename(labels) stats_file = os.path.join(pth, '%s_%s_stats' % (nme, nme_lbl)) label_file = os.path.join(pth, '%s_%s_.nii.gz' % (nme, nme_lbl)) # Gen label file cmd = [ 'mri_label2vol', '--seg %s/mri/%s' % (subdir, labels), '--temp %s' % (pet), '--reg' % (dat), '--o %s' % (label_file) ] cmd = ' '.join(cmd) cout = CommandLine(cmd).run() if not cout.runtime.returncode == 0: print 'mri_label2vol failed for %s' % (pet) return None, None ## Get stats cmd = [ 'mri_segstats', '--seg %s' % (label_file), '--sum %s' % (stats_file), '--in %s' % (pet), '--nonempty --ctab', '/usr/local/freesurfer_x86_64-4.5.0/FreeSurferColorLUT.txt' ] cmd = ' '.join(cmd) cout = CommandLine(cmd).run() if not cout.runtime.returncode == 0: print 'mri_segstats failed for %s' % (pet) return None, None return stats_file, label_file
def travel_depth(command, surface_file, verbose=False): """ Measure "travel depth" of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : string travel depth C++ executable command surface_file : string vtk file verbose : bool print statements? Returns ------- depth_file: string vtk file with travel depth per vertex of mesh Examples -------- >>> import os >>> import numpy as np >>> from mindboggle.shapes.surface_shapes import travel_depth >>> from mindboggle.mio.vtks import read_scalars >>> from mindboggle.mio.fetch_data import prep_tests >>> urls, fetch_data = prep_tests() >>> surface_file = fetch_data(urls['left_pial'], '', '.vtk') >>> verbose = False >>> ccode_path = os.environ['vtk_cpp_tools'] >>> command = os.path.join(ccode_path, 'travel_depth', 'TravelDepthMain') >>> depth_file = travel_depth(command, surface_file, verbose) >>> scalars, name = read_scalars(depth_file) >>> np.allclose(scalars[0:8], [0.020259869839, 0.06009166489, 0.12858575442, 0.045639221313, 0.007742772964, 0.052839111255, 0.053538904296, 0.013158746337]) True """ import os from nipype.interfaces.base import CommandLine basename = os.path.splitext(os.path.basename(surface_file))[0] depth_file = os.path.join(os.getcwd(), basename + '.travel_depth.vtk') args = ' '.join([surface_file, depth_file]) if verbose: print("{0} {1}".format(command, args)) cli = CommandLine(command=command) cli.inputs.args = args cli.terminal_output = 'file' cli.run() if not os.path.exists(depth_file): raise IOError(depth_file + " not found") return depth_file
def dicom2nii(dicomdir, experiment_dir, out_prefix, out_suffix): import os import shutil from nipype.interfaces.base import CommandLine dirnames = os.listdir(dicomdir) for d_i in range(len(dirnames)): fileName, fileExtension = os.path.splitext(dirnames[d_i]) if fileExtension == '.gz': os.remove(os.path.join(dicomdir, dirnames[d_i])) if fileExtension == '.bval': os.remove(os.path.join(dicomdir, dirnames[d_i])) if fileExtension == '.bvec': os.remove(os.path.join(dicomdir, dirnames[d_i])) from nipype.interfaces.base import CommandLine basename = experiment_dir + '/' + out_prefix + '/' + out_prefix + out_suffix cmd = CommandLine( '/Users/eija/Documents/osx/dcm2nii -a Y -d N -e N -i N -p N -o %s %s' % (basename, dicomdir)) print "DICOM->NII:" + cmd.cmd cmd.run() dirnames = os.listdir(dicomdir) filename_nii = '' filename_bvec = '' filename_bval = '' for d_i in range(len(dirnames)): fileName, fileExtension = os.path.splitext(dirnames[d_i]) if fileExtension == '.gz': if len(filename_nii) > 0: raise "multiple copies of .nii.gz was found" filename_nii = fileName if fileExtension == '.bval': if len(filename_nii) > 0: raise "multiple copies of .bval was found" filename_bval = fileName if fileExtension == '.bvec': if len(filename_nii) > 0: raise "multiple copies of .bvec was found" filename_bvec = fileName outfile = move_to_results((dicomdir + '/' + filename_nii + '.gz'), experiment_dir, out_prefix) outfile_bval = '' outfile_bvec = '' if len(filename_bval) > 0: outfile_bval = move_to_results( (dicomdir + '/' + filename_bval + '.bval'), experiment_dir, out_prefix) if len(filename_bvec) > 0: outfile_bvec = move_to_results( (dicomdir + '/' + filename_bvec + '.bvec'), experiment_dir, out_prefix) return outfile, outfile_bval, outfile_bvec
def _submit_batchtask(self, scriptfile, node): cmd = CommandLine("qsub", environ=os.environ.data) qsubargs = "" if self._qsub_args: qsubargs = self._qsub_args cmd.inputs.args = "%s -N %s %s" % (qsubargs, ".".join((os.environ.data["LOGNAME"], node._id)), scriptfile) try: result = cmd.run() except Exception, e: raise RuntimeError("\n".join(("Could not submit pbs task for node %s" % node._id, str(e))))
def gunzip(file): import os os.mkdir("./out") from nipype.interfaces.base import CommandLine c = CommandLine(command="tar zxvf %s" % file) c.run() from nipype.utils.filemanip import split_filename _, base, _ = split_filename(file) return os.path.abspath(base)
def gznii2nii(in_file): import os import shutil from nipype.interfaces.base import CommandLine fileName, fileExtension = os.path.splitext(in_file) cmd = CommandLine('gunzip -f -k %s.nii.gz' % (fileName)) print "gunzip NII.GZ:" + cmd.cmd cmd.run() return os.path.abspath('%s.nii' % (fileName))
def gunzip(file): import os os.mkdir("./out") from nipype.interfaces.base import CommandLine c = CommandLine(command="tar zxvf %s"%file) c.run() from nipype.utils.filemanip import split_filename _,base,_ = split_filename(file) return os.path.abspath(base)
def nii2nrrd(filename_nii, filename_bval, filename_bvec, out_prefix, out_suffix): import os import shutil from nipype.interfaces.base import CommandLine basename = experiment_dir + os.sep + out_prefix + os.sep + out_prefix + out_suffix cmd = CommandLine('./DWIConvert --inputVolume %s --outputVolume %s.nrrd --conversionMode FSLToNrrd --inputBValues %s --inputBVectors %s' % (filename_nii, basename, filename_bval, filename_bvec)) print "NII->NRRD:" + cmd.cmd cmd.run() return os.path.abspath('%s.nrrd' % (basename))
def opensmile(wav_file, config_file, file_append='.csv', command='SMILExtract'): """ Run openSMILE's SMILExtract on input file to extract audio features. SMILExtract -C config/my_configfile.conf -I input.wav -O output.csv Parameters ---------- wav_file : string full path to the input file command : string command without arguments config_file : string path to openSMILE configuration file file_append : string append to each file name to indicate output file format (e.g., '.csv') command : string executable command without arguments Returns ------- feature_file : string output table of features (full path) Examples -------- >>> from mhealthx.features import opensmile >>> wav_file = ['/home/arno/mhealthx_working/mHealthX/phonation_files/test.wav'] >>> config_file = '/home/arno/software/audio/openSMILE/config/IS13_ComParE.conf' >>> file_append = '.csv' >>> command = '/home/arno/software/audio/openSMILE/SMILExtract' >>> feature_file = opensmile(wav_file, config_file, file_append, command) """ import os from nipype.interfaces.base import CommandLine if not os.path.exists(wav_file): raise(IOError(wav_file + " not found")) else: feature_file = wav_file + file_append # Nipype command line wrapper over openSMILE: cli = CommandLine(command = command) cli.inputs.args = ' '.join(['-C', config_file, '-I', wav_file, '-O', feature_file]) cli.cmdline cli.run() if not os.path.exists(feature_file): raise(IOError(feature_file + " not found")) return feature_file
def _submit_batchtask(self, scriptfile, node): cmd = CommandLine('condor_qsub', environ=os.environ.data, terminal_output='allatonce') path = os.path.dirname(scriptfile) qsubargs = '' if self._qsub_args: qsubargs = self._qsub_args if 'qsub_args' in node.plugin_args: if 'overwrite' in node.plugin_args and\ node.plugin_args['overwrite']: qsubargs = node.plugin_args['qsub_args'] else: qsubargs += (" " + node.plugin_args['qsub_args']) if self._qsub_args: qsubargs = self._qsub_args if '-o' not in qsubargs: qsubargs = '%s -o %s' % (qsubargs, path) if '-e' not in qsubargs: qsubargs = '%s -e %s' % (qsubargs, path) if node._hierarchy: jobname = '.'.join( (os.environ.data['LOGNAME'], node._hierarchy, node._id)) else: jobname = '.'.join((os.environ.data['LOGNAME'], node._id)) jobnameitems = jobname.split('.') jobnameitems.reverse() jobname = '.'.join(jobnameitems) cmd.inputs.args = '%s -N %s %s' % (qsubargs, jobname, scriptfile) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) tries = 0 while True: try: result = cmd.run() except Exception as e: if tries < self._max_tries: tries += 1 sleep( self._retry_timeout) # sleep 2 seconds and try again. else: iflogger.setLevel(oldlevel) raise RuntimeError('\n'.join( (('Could not submit condor ' 'cluster' ' for node %s') % node._id, str(e)))) else: break iflogger.setLevel(oldlevel) # retrieve condor clusterid taskid = int(result.runtime.stdout.split(' ')[2]) self._pending[taskid] = node.output_dir() logger.debug('submitted condor cluster: %d for node %s' % (taskid, node._id)) return taskid
def _is_pending(self, taskid): cmd = CommandLine('qstat') cmd.inputs.args = '%s' % taskid # check pbs task oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) if 'Unknown Job Id' in result.runtime.stderr: return False return True
def Voxel2Image(bfloat_file, nii_file_header): import os import shutil from nipype.interfaces.base import CommandLine fileName, fileExtension = os.path.splitext(bfloat_file) head, root = os.path.split(fileName) cmd = CommandLine('cat %s | fa -inputmodel dt | voxel2image -outputroot %s -header %s -components %s' % (bfloat_file, root, nii_file_header, 1)) print "voxel2image:" + cmd.cmd cmd.run() return os.path.abspath('%s.nii' % (root))
def convert_audio_file(old_file, new_file, command='ffmpeg', input_args='-i', output_args='-ac 2'): """ Convert audio file to new format. Parameters ---------- old_file : string full path to the input file new_file : string full path to the output file command : string executable command without arguments input_args : string arguments preceding input file name in command output_args : string arguments preceding output file name in command Returns ------- new_file : string full path to the output file Examples -------- >>> from mhealthx.xio import convert_audio_file >>> old_file = '/Users/arno/mhealthx_cache/mhealthx/feature_files/test.m4a' >>> new_file = 'test.wav' >>> command = 'ffmpeg' >>> input_args = '-y -i' >>> output_args = '-ac 2' >>> new_file = convert_audio_file(old_file, new_file, command, input_args, output_args) """ import os from nipype.interfaces.base import CommandLine if not os.path.isfile(old_file): raise IOError("{0} does not exist.".format(old_file)) new_file = None else: input_args = ' '.join([input_args, old_file, output_args, new_file]) try: # Nipype command line wrapper: cli = CommandLine(command = command) cli.inputs.args = input_args cli.cmdline cli.run() except: import traceback; traceback.print_exc() print("'{0} {1}' unsuccessful".format(command, input_args)) new_file = None return new_file
def dt2Image(bfloat_file, nii_file_header): import os import shutil from nipype.interfaces.base import CommandLine fileName, fileExtension = os.path.splitext(bfloat_file) head, root = os.path.split(fileName) cmd = CommandLine('dt2nii -inputfile %s -inputdatatype float -header %s -outputroot camino_' % (bfloat_file, nii_file_header)) print "dt2nii:" + cmd.cmd cmd.run() return os.path.abspath('%s.nii' % (root))
def fslmath_op_custom(input_file, operation_str, output_prefix): import os from nipype.interfaces.base import CommandLine from nipype.utils.filemanip import split_filename path, name, ext = split_filename(input_file) outfile = path + '/' + name + '_op' + ext cmd = CommandLine('fslmaths %s %s %s' % (input_file, operation_str, outfile)) print "custom fslmaths command:" + cmd.cmd cmd.run() return outfile
def geodesic_depth(command, surface_file, verbose=False): """ Estimate geodesic depth of each vertex in a surface mesh. (Calls Joachim Giard's C++ code) Parameters ---------- command : travel depth C++ executable command surface_file : ``vtk file`` verbose : bool print statements? Returns ------- depth_file: string vtk file with geodesic depth per vertex of mesh Examples -------- >>> import os >>> import numpy as np >>> from mindboggle.shapes.surface_shapes import geodesic_depth >>> from mindboggle.mio.vtks import read_scalars >>> from mindboggle.mio.fetch_data import prep_tests >>> urls, fetch_data = prep_tests() >>> surface_file = fetch_data(urls['left_pial'], '', '.vtk') >>> verbose = False >>> ccode_path = os.environ['vtk_cpp_tools'] >>> command = os.path.join(ccode_path, 'geodesic_depth', 'GeodesicDepthMain') >>> depth_file = geodesic_depth(command, surface_file, verbose) >>> scalars, name = read_scalars(depth_file) >>> [np.float("{0:.{1}f}".format(x, 5)) for x in scalars[0:8]] [0.02026, 0.06009, 0.12859, 0.04564, 0.00774, 0.05284, 0.05354, 0.01316] """ import os from nipype.interfaces.base import CommandLine basename = os.path.splitext(os.path.basename(surface_file))[0] depth_file = os.path.join(os.getcwd(), basename + '.geodesic_depth.vtk') args = ' '.join([surface_file, depth_file]) if verbose: print("{0} {1}".format(command, args)) cli = CommandLine(command=command) cli.inputs.args = args cli.cmdline cli.run() if not os.path.exists(depth_file): raise IOError(depth_file + " not found") return depth_file
def _is_pending(self, taskid): cmd = CommandLine('qstat') cmd.inputs.args = '-j %d' % taskid # check sge task oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) if result.runtime.stdout.startswith('='): return True return False
def _is_pending(self, taskid): cmd = CommandLine('condor_q') cmd.inputs.args = '%d' % taskid # check condor cluster oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName('CRITICAL')) result = cmd.run(ignore_exception=True) iflogger.setLevel(oldlevel) if result.runtime.stdout.count('\n%d' % taskid): return True return False
def evaluate_surface_overlaps_cpp(command, labels_file1, labels_file2, output_file): """ Measure surface overlap using Joachim Giard's code. Note: Fails if two files have different number of vertices. Parameters ---------- command : string surface overlap C++ executable command labels_file1 : string ``vtk file`` with index labels for scalar values labels_file2 : string ``vtk file`` with index labels for scalar values output_file : string (optional) output file name Returns ------- output_file : string name of output text file with overlap results Examples -------- >>> import os >>> from mindboggle.evaluate.evaluate_labels import evaluate_surface_overlaps_cpp >>> from mindboggle.mindboggle import hashes_url >>> from mindboggle.mio.fetch_data import fetch_check_data >>> hashes, url, cache_env, cache = hashes_url() >>> ccode_path = os.environ['MINDBOGGLE_TOOLS'] >>> command = os.path.join(ccode_path, 'surface_overlap', 'SurfaceOverlapMain') >>> label_file1 = 'lh.labels.DKT25.manual.vtk' >>> label_file2 = 'lh.labels.DKT31.manual.vtk' >>> file1 = fetch_check_data(label_file1, url, hashes, cache_env, cache) >>> file2 = fetch_check_data(label_file2, url, hashes, cache_env, cache) >>> output_file = '' >>> evaluate_surface_overlaps_cpp(command, file1, file2, output_file) """ import os from nipype.interfaces.base import CommandLine if not output_file: output_file = os.path.basename(labels_file1) + '_and_' + \ os.path.basename(labels_file2) + '.txt' output_file = os.path.join(os.getcwd(), output_file) cli = CommandLine(command = command) cli.inputs.args = ' '.join([labels_file1, labels_file2, output_file]) cli.cmdline cli.run() return output_file
def run_resample(in_file, like): import os from glob import glob from nipype.interfaces.base import CommandLine #cmd = ("palm -i {cope_file} -m {mask_file} -d {design_file} -t {contrast_file} -eb {group_file} -T " # "-fdr -noniiclass -twotail -logp -zstat") cmd = ("rg_realign {in_file} {like} resampled trf.mat") cl = CommandLine(cmd.format(in_file=in_file, like=like)) results = cl.run(terminal_output='file') return os.path.join(os.getcwd(), 'trf.mat')
def zip_files(files): if not hasattr(files, '__iter__'): files = [files] for f in files: base, ext = os.path.splitext(f) if 'gz' in ext: # file already gzipped continue cmd = CommandLine('gzip %s' % f) cout = cmd.run() if not cout.runtime.returncode == 0: logging.error('Failed to zip %s'%(f))