def _list_outputs(self): """Execute this module. """ # Init variables outputs = self.output_spec().get() out_files = [] # Use hardlink use_hardlink = str2bool( config.get('execution', 'try_hard_link_datasink')) outdir = os.path.abspath(self.inputs.base_directory) # Iterate through outputs attributes {key : path(s)} for key, files in list(self.inputs._outputs.items()): if not isdefined(files): continue files = ensure_list(files) # flattening list if isinstance(files, list): if isinstance(files[0], list): files = [item for sublist in files for item in sublist] # Iterate through passed-in source files for src in ensure_list(files): # Format src and dst files src = os.path.abspath(src) if not os.path.isfile(src): src = os.path.join(src, '') dst = self._get_dst(src) dst = os.path.join(outdir, dst) # If src is a file, copy it to dst if os.path.isfile(src): copyfile(src, dst, copy=True, hashmethod='content', use_hardlink=use_hardlink) out_files.append(dst) # If src is a directory, copy entire contents to dst dir elif os.path.isdir(src): if os.path.exists(dst) and self.inputs.remove_dest_dir: shutil.rmtree(dst) copytree(src, dst) out_files.append(dst) # Return outputs dictionary outputs['out_file'] = out_files return outputs
def _run_and_test(opts, output_base): outputs = fsl.FAST(**opts)._list_outputs() for output in outputs.values(): if output: for filename in ensure_list(output): assert os.path.realpath(filename).startswith( os.path.realpath(output_base))
def checkT1s(T1_files, cw256=False): """Verifying size of inputs and setting workflow parameters""" import sys import nibabel as nb from nipype.utils.filemanip import ensure_list T1_files = ensure_list(T1_files) if len(T1_files) == 0: print("ERROR: No T1's Given") sys.exit(-1) shape = nb.load(T1_files[0]).shape for t1 in T1_files[1:]: if nb.load(t1, mmap=NUMPY_MMAP).shape != shape: print("ERROR: T1s not the same size. Cannot process {0} and {1} " "together".format(T1_files[0], t1)) sys.exit(-1) origvol_names = ["{0:03d}.mgz".format(i + 1) for i in range(len(T1_files))] # check if cw256 is set to crop the images if size is larger than 256 if not cw256 and any(dim > 256 for dim in shape): print("Setting MRI Convert to crop images to 256 FOV") cw256 = True resample_type = 'cubic' if len(T1_files) > 1 else 'interpolate' return T1_files, cw256, resample_type, origvol_names
def analyze_pair_image_files(outdir, filelist, shape): for f in ensure_list(filelist): hdr = nb.Nifti1Header() hdr.set_data_shape(shape) img = np.random.random(shape) analyze = nb.AnalyzeImage(img, np.eye(4), hdr) analyze.to_filename(os.path.join(outdir, f))
def cartesian_product(fwhms, in_files, usans, btthresh): from nipype.utils.filemanip import ensure_list # ensure all inputs are lists in_files = ensure_list(in_files) fwhms = [fwhms] if isinstance(fwhms, (int, float)) else fwhms # create cartesian product lists (s_<name> = single element of list) cart_in_file = [s_in_file for s_in_file in in_files for s_fwhm in fwhms] cart_fwhm = [s_fwhm for s_in_file in in_files for s_fwhm in fwhms] cart_usans = [s_usans for s_usans in usans for s_fwhm in fwhms] cart_btthresh = [s_btthresh for s_btthresh in btthresh for s_fwhm in fwhms] return cart_in_file, cart_fwhm, cart_usans, cart_btthresh
def cartesian_product(fwhms, in_files, usans, btthresh): from nipype.utils.filemanip import ensure_list # ensure all inputs are lists in_files = ensure_list(in_files) fwhms = [fwhms] if isinstance(fwhms, (int, float)) else fwhms # create cartesian product lists (s_<name> = single element of list) cart_in_file = [ s_in_file for s_in_file in in_files for s_fwhm in fwhms ] cart_fwhm = [s_fwhm for s_in_file in in_files for s_fwhm in fwhms] cart_usans = [s_usans for s_usans in usans for s_fwhm in fwhms] cart_btthresh = [ s_btthresh for s_btthresh in btthresh for s_fwhm in fwhms ] return cart_in_file, cart_fwhm, cart_usans, cart_btthresh
def _list_outputs(self): """Execute this module. """ # Init variables outputs = self.output_spec().get() out_files = [] # Use hardlink use_hardlink = str2bool( config.get('execution', 'try_hard_link_datasink')) # Set local output directory if specified if isdefined(self.inputs.local_copy): outdir = self.inputs.local_copy else: outdir = self.inputs.base_directory # If base directory isn't given, assume current directory if not isdefined(outdir): outdir = '.' # Check if base directory reflects S3 bucket upload s3_flag, bucket_name = self._check_s3_base_dir() if s3_flag: s3dir = self.inputs.base_directory # If user overrides bucket object, use that if self.inputs.bucket: bucket = self.inputs.bucket # Otherwise fetch bucket object using name else: try: bucket = self._fetch_bucket(bucket_name) # If encountering an exception during bucket access, set output # base directory to a local folder except Exception as exc: s3dir = '<N/A>' if not isdefined(self.inputs.local_copy): local_out_exception = os.path.join( os.path.expanduser('~'), 's3_datasink_' + bucket_name) outdir = local_out_exception # Log local copying directory iflogger.info( 'Access to S3 failed! Storing outputs locally at: ' '%s\nError: %s', outdir, exc) else: s3dir = '<N/A>' # If container input is given, append that to outdir if isdefined(self.inputs.container): outdir = os.path.join(outdir, self.inputs.container) s3dir = os.path.join(s3dir, self.inputs.container) # If sinking to local folder if outdir != s3dir: outdir = os.path.abspath(outdir) # Create the directory if it doesn't exist if not os.path.exists(outdir): try: os.makedirs(outdir) except OSError as inst: if 'File exists' in inst.strerror: pass else: raise (inst) # Iterate through outputs attributes {key : path(s)} for key, files in list(self.inputs._outputs.items()): if not isdefined(files): continue iflogger.debug("key: %s files: %s", key, str(files)) files = ensure_list(files if files else []) tempoutdir = outdir if s3_flag: s3tempoutdir = s3dir for d in key.split('.'): if d[0] == '@': continue tempoutdir = os.path.join(tempoutdir, d) if s3_flag: s3tempoutdir = os.path.join(s3tempoutdir, d) # flattening list if files and isinstance(files, list): if isinstance(files[0], list): files = [item for sublist in files for item in sublist] # Iterate through passed-in source files for src in ensure_list(files): # Format src and dst files src = os.path.abspath(src) if not os.path.isfile(src): src = os.path.join(src, '') dst = self._get_dst(src) if s3_flag: s3dst = os.path.join(s3tempoutdir, dst) s3dst = self._substitute(s3dst) dst = os.path.join(tempoutdir, dst) dst = self._substitute(dst) path, _ = os.path.split(dst) # If we're uploading to S3 if s3_flag: self._upload_to_s3(bucket, src, s3dst) out_files.append(s3dst) # Otherwise, copy locally src -> dst if not s3_flag or isdefined(self.inputs.local_copy): # Create output directory if it doesn't exist if not os.path.exists(path): try: os.makedirs(path) except OSError as inst: if 'File exists' in inst.strerror: pass else: raise (inst) # If src == dst, it's already home if (not os.path.exists(dst)) or (os.stat(src) != os.stat(dst)): # If src is a file, copy it to dst if os.path.isfile(src): iflogger.debug(f'copyfile: {src} {dst}') copyfile(src, dst, copy=True, hashmethod='content', use_hardlink=use_hardlink) # If src is a directory, copy # entire contents to dst dir elif os.path.isdir(src): if (os.path.exists(dst) and self.inputs.remove_dest_dir): iflogger.debug('removing: %s', dst) shutil.rmtree(dst) iflogger.debug('copydir: %s %s', src, dst) copytree(src, dst) out_files.append(dst) # Return outputs dictionary outputs['out_file'] = out_files return outputs
def __init__(self, input_names=None, output_names='out', function=None, imports=None, as_module=False, **inputs): """ Parameters ---------- input_names: single str or list or None names corresponding to function inputs if ``None``, derive input names from function argument names output_names: single str or list names corresponding to function outputs (default: 'out'). if list of length > 1, has to match the number of outputs function : callable callable python object. must be able to execute in an isolated namespace (possibly in concert with the ``imports`` parameter) imports : list of strings list of import statements that allow the function to execute in an otherwise empty namespace """ super(Function, self).__init__(**inputs) if function: if as_module: module = inspect.getmodule(function).__name__ full_name = "%s.%s" % (module, function.__name__) self.inputs.function_str = full_name elif hasattr(function, '__call__'): try: self.inputs.function_str = getsource(function) except IOError: raise Exception('Interface Function does not accept ' 'function objects defined interactively ' 'in a python session') else: if input_names is None: fninfo = function.__code__ elif isinstance(function, (str, bytes)): self.inputs.function_str = function if input_names is None: fninfo = create_function_from_source(function, imports).__code__ else: raise Exception('Unknown type of function') if input_names is None: input_names = fninfo.co_varnames[:fninfo.co_argcount] self.as_module = as_module self.inputs.on_trait_change(self._set_function_string, 'function_str') self._input_names = ensure_list(input_names) self._output_names = ensure_list(output_names) add_traits(self.inputs, [name for name in self._input_names]) self.imports = imports self._out = {} for name in self._output_names: self._out[name] = None
def nifti_image_files(outdir, filelist, shape): for f in ensure_list(filelist): img = np.random.random(shape) nb.Nifti1Image(img, np.eye(4), None).to_filename( os.path.join(outdir, f))
def nifti_image_files(outdir, filelist, shape): for f in ensure_list(filelist): img = np.random.random(shape) nb.Nifti1Image(img, np.eye(4), None).to_filename(os.path.join(outdir, f))