def __init__(self, name): if type(name) is not str: print("\nError: " + str(name) + " must be of type 'str'!!\n") sys.exit(1) ## namey things self.name = name self.rel_path = os.path.dirname(name) self.full_path_name = os.path.abspath(name) self.full_path = os.path.dirname(self.full_path_name) self.full_path_prefix = os.path.splitext(self.full_path_name)[0] self.file_name = os.path.basename(name) self.prefix = os.path.splitext(self.file_name)[0] self.ext = os.path.splitext(self.file_name)[1] ## read in and get list of fields self.dict = json_import(name) self.fields = self.dict.keys() ## some useful fields if "EchoTime" in self.fields: self.te_sec = self.dict["EchoTime"] self.te = self.te_sec * 1000 else: self.te_sec = None self.te = None if "RepetitionTime" in self.fields: self.tr = self.dict["RepetitionTime"] else: self.tr = "" if "SliceTiming" in self.fields: self.slice_list = self.dict["SliceTiming"] self.slice_str = ' '.join(map(str, self.slice_list)) else: self.slice_list = None self.slice_str = None ## find matching nifti and return afni_name object if os.path.isfile(self.full_path_prefix + ".nii.gz"): self.nii = afni_base.afni_name(self.full_path_prefix + ".nii.gz") elif os.path.isfile(self.full_path_prefix + ".nii"): self.nii = afni_base.afni_name(self.full_path_prefix + ".nii") else: self.nii = afni_base.afni_name("None") return
if ted_bin is None: ted_bin = afni_dir + "/meica.libs/tedana.py" if not os.path.exists(ted_bin): print("\nERROR: tedana.py version: " + ted_bin + " not found!!\n") sys.exit(1) else: print("\nUsing: " + ted_bin + " for tedana.py.\n") ######################################################################## ## proc loop ## place holder p50 = "" ## create the mask object here mask_dset = afni_base.afni_name(mask) for e in range(0, len(echos)): ## make names for the intermediate datasets cur_echo = afni_base.afni_name(echos[e]) mask_echo = afni_base.afni_name(echos[e]) mask_echo.new_prefix(OutName + "_echo_" + str(e + 1) + "_masked") mask_echo.new_path(OutFolder) # mask_scaled_echo = afni_base.afni_name(echos[e]) # mask_scaled_echo.new_prefix(OutName+"_echo_"+str(e+1)+"_masked_scaled") # mask_scaled_echo.new_path(OutFolder) # mean_echo = afni_base.afni_name(echos[e])
' mask: mask volume dataset\n' \ ' out.script: name for output script to write\n' \ '\n' \ 'R Reynolds July, 2010\n' % (prog, prog) narg = 0 if len(sys.argv) != 5: if len(sys.argv) == 1 or '-help' in sys.argv: print(helpstr) sys.exit(0) else: print('\n usage: %s dataset brick_index mask out.script\n' % prog) sys.exit(1) narg += 1 dset = B.afni_name(sys.argv[narg]) if not dset.exist(): print('** dataset %s not found' % dset.rpv()) sys.exit(1) narg += 1 try: sub = int(sys.argv[narg]) except: print('** invalid sub-brick index %s' % sys.argv[narg]) sys.exit(1) narg += 1 mask = B.afni_name(sys.argv[narg]) if not mask.exist(): print('** dataset %s not found' % mask.rpv())
def process_opts(self): """apply each option""" # ---------------------------------------- # set verb first self.verb,err = self.user_opts.get_type_opt(int, '-verb') if err: return 1 if self.verb == None: self.verb = gDEF_VERB # ---------------------------------------- # required args self.input, err = self.user_opts.get_string_opt('-input') if self.input == None or err: return 1 self.prefix, err = self.user_opts.get_string_opt('-prefix') if self.prefix == None or err: return 1 self.script, err = self.user_opts.get_string_opt('-script') if self.script == None or err: return 1 if self.verb > 1: print "-- have input '%s', script '%s', prefix '%s'" % \ (self.input, self.script, self.prefix) # ---------------------------------------- # optional arguments val, err = self.user_opts.get_string_opt('-kernel') if err: return 1 if val != None: self.kernel = val val, err = self.user_opts.get_string_opt('-kernel_file') if err: return 1 if val != None: self.kfile = val val, err = self.user_opts.get_string_opt('-mask_dset') if err: return 1 if val != None: self.maskset = val val,err = self.user_opts.get_type_opt(float, '-tr') if err: return 1 if val != None: self.tr = val # ---------------------------------------- # check over the inputs # check over -input as an AFNI dataset self.aname = BASE.afni_name(self.input) if self.aname == None: return 1 if self.verb > 2: self.aname.show() if self.aname.type == '1D': if self.tr == None: print '** -tr is required if the input is in 1D format' self.reps = UTIL.max_dim_1D(self.input) else: if self.aname.type != 'BRIK': print "** unknown 'type' for -input '%s'" % self.input err,self.reps,self.tr = \ UTIL.get_dset_reps_tr(self.aname.pv(),self.verb) if err: return 1 if self.verb > 1: print '-- using kernel %s, kfile %s, tr = %s, reps = %s' % \ (self.kernel, self.kfile, self.tr, self.reps) return None
def script_set_vars(self): """use variables for inputs (anat, epi, epi_base) and for options (cost_main, cost_list, align_opts) """ U = self.uvars # for convenience # init with a section comment cmd = SUBJ.comment_section_string('set processing variables') + '\n' # maybe init with top_dir if not self.LV.is_trivial_dir('top_dir'): cmd += '# top data directory\n' \ 'set top_dir = %s\n\n' % self.LV.top_dir # surf_vol and vol_mask might use top_dir if self.LV.is_trivial_dir('top_dir'): # if self.cvars.val('on_surface') != 'yes': self.LV.svol = U.surf_vol # # rcr - fix this, surf_vol should not be needed if only on surface # (for getting the node count, avoid SurfMeasures or any other # program that uses -sv) self.LV.svol = U.surf_vol self.LV.svset = BASE.afni_name(self.LV.svol) self.LV.vmask = U.vol_mask self.LV.vmset = BASE.afni_name(U.vol_mask) self.LV.spec = U.spec_file if self.uvars.surf_mask != '': self.LV.smset = BASE.afni_name(self.uvars.surf_mask) self.LV.smask = self.LV.smset.real_input() else: self.LV.svol = '$top_dir/%s' % self.LV.short_names[0][0] self.LV.svset = BASE.afni_name(self.LV.svol) self.LV.spec = '$top_dir/%s' % self.LV.short_names[0][1] if self.cvars.val('on_surface') == 'yes': if self.uvars.surf_mask != '': self.LV.smask = '$top_dir/%s' % self.LV.short_names[0][2] self.LV.smset = BASE.afni_name(self.LV.smask) else: self.LV.vmask = '$top_dir/%s' % self.LV.short_names[0][2] self.LV.vmset = BASE.afni_name(self.LV.vmask) cmd += '# input datasets and surface specification file\n' \ '# (absolute paths are used since inputs are not copied)\n' \ 'set surf_vol = %s\n' \ 'set spec_file = %s\n' % (self.LV.svol, self.LV.spec) if self.cvars.val('on_surface') != 'yes': cmd += 'set vol_mask = %s\n' % self.LV.vmask if self.LV.val('smask'): cmd += 'set surf_mask = %s\n' % self.LV.smask # as a list, these might come is as strings or floats, be generic plist = ['%s' % p for p in U.pthr_list] cmd += '\n' \ '# iterations and blur/clust parameters\n' \ 'set niter = %d\n' \ 'set itersize = %d\n' \ 'set pthr_list = ( %s )\n\n' \ 'set blur = %g\n' \ 'set rmm = %g\n\n' \ % (U.niter, U.itersize, ' '.join(plist), U.blur, U.rmm) cmd += '# surface mapping parameters\n' \ 'set surfA = %s\n' \ 'set surfB = %s\n' \ 'set map_func = %s\n' \ 'set nsteps = %d\n\n' \ % (U.surfA, U.surfB, U.map_func, self.cvars.nsteps) if self.cvars.keepblocks > 0: cmd += '# note how many blocks to keep output datasets for\n' \ 'set keepblocks = %d\n\n' % self.cvars.keepblocks if self.cvars.time_process: cmd += "# prepare to possibly time programs (/usr/bin/time or '')\n" \ "set time_str = /usr/bin/time \n\n" self.LV.time_str = '$time_str \\\n' else: self.LV.time_str = '' return cmd
def process_opts(self): """apply each option""" # ---------------------------------------- # set verb first self.verb, err = self.user_opts.get_type_opt(int, '-verb') if err: return 1 if self.verb == None: self.verb = gDEF_VERB # ---------------------------------------- # required args self.infiles, err = self.user_opts.get_string_list('-infiles') if self.infiles == None or err: return 1 # ---------------------------------------- # optional arguments val, err = self.user_opts.get_string_opt('-kernel') if err: return 1 if val != None: self.kernel = val val, err = self.user_opts.get_string_opt('-kernel_file') if err: return 1 if val != None: self.kfile = val val, err = self.user_opts.get_string_opt('-mask_dset') if err: return 1 if val != None: self.maskset = val # allow use of old method if self.user_opts.find_opt('-old'): self.stype = 'old' val, err = self.user_opts.get_string_opt('-outdir') if err: return 1 if val != None: self.outdir = val val, err = self.user_opts.get_string_opt('-prefix') if err: return 1 if val != None: self.prefix = val self.script, err = self.user_opts.get_string_opt('-script') if self.script == None or err: return 1 val, err = self.user_opts.get_type_opt(float, '-tr') if err: return 1 if val != None: self.tr = val val, err = self.user_opts.get_type_opt(int, '-tr_nup') if err: return 1 if val != None: self.tr_nup = val # ---------------------------------------- # check over the inputs if len(self.infiles) < 1: print '** missing option -infiles' return 1 # check over -input as an AFNI dataset self.aname = BASE.afni_name(self.infiles[0]) if self.aname == None: return 1 if self.verb > 2: self.aname.show() if self.aname.type == '1D': if self.tr == None: print '** -tr is required if the input is in 1D format' self.reps = UTIL.max_dim_1D(self.infiles[0]) else: if self.aname.type != 'BRIK': print "** unknown 'type' for -input '%s'" % self.infiles[0] err,self.reps,self.tr = \ UTIL.get_dset_reps_tr(self.aname.pv(),self.verb) if err: return 1 if self.verb > 1: print '-- using kernel %s, kfile %s, tr = %s, reps = %s' % \ (self.kernel, self.kfile, self.tr, self.reps) return None
def process_opts(self): """apply each option""" # ---------------------------------------- # set verb first self.verb, err = self.user_opts.get_type_opt(int, '-verb') if err: return 1 if self.verb == None: self.verb = gDEF_VERB # ---------------------------------------- # required args opt = self.user_opts.find_opt('-dsets') if opt and opt.parlist: self.dsets = opt.parlist self.adsets = [BASE.afni_name(s) for s in opt.parlist] if len(self.dsets) < 1: print('** missing input dataets') return 1 # ---------------------------------------- # optional arguments self.script, err = self.user_opts.get_string_opt('-script') if err: return 1 if self.script == None: self.script = '@review_epi_data' if os.path.isfile(self.script): print("** script file '%s' already exists, failing..." \ % self.script) return 1 opt = self.user_opts.find_opt('-windows') if opt and opt.parlist: self.windows = opt.parlist if len(self.windows) < 1: print('** missing window list') return 1 # ---------------------------------------- # image coordinates self.im_size, err = self.user_opts.get_type_list(int, '-im_size', 2, 'two', verb=self.verb) if err: return 1 if not self.im_size: self.im_size = gDEF_IM_SIZE self.im_xoff, err = self.user_opts.get_type_opt(int, '-im_xoff') if err: return 1 if not self.im_xoff: self.im_xoff = gDEF_IM_XOFF self.im_yoff, err = self.user_opts.get_type_opt(int, '-im_yoff') if err: return 1 if not self.im_yoff: self.im_yoff = gDEF_IM_YOFF # ---------------------------------------- # graph coordinates self.gr_size, err = self.user_opts.get_type_list(int, '-gr_size', 2, 'two', verb=self.verb) if err: return 1 if not self.gr_size: self.gr_size = gDEF_GR_SIZE self.gr_xoff, err = self.user_opts.get_type_opt(int, '-gr_xoff') if err: return 1 if not self.gr_xoff: self.gr_xoff = gDEF_GR_XOFF self.gr_yoff, err = self.user_opts.get_type_opt(int, '-gr_yoff') if err: return 1 if not self.gr_yoff: self.gr_yoff = gDEF_GR_YOFF
if new_prefix is not None: if len(new_prefix) != len(dset_list): print("\nError: " + str(len(new_prefix)) + " prefix(es) do(es) not match " + str(len(dset_list)) + " dataset(s)!!\n") sys.exit(1) ######################################################################## ## go for all for i in range(0, len(dset_list)): ######################################################################## ## check input dataset ## name the dset dset = afni_base.afni_name(dset_list[i]) ## get some info afni_cmd = ("3dinfo -exists -ni -nj -nk -TR " + dset.rppv()) check_info = subprocess.check_output(afni_cmd, shell=True).split() ## is the input dataset there? if check_info[0] == "0": print("\nError: " + dset.pv() + " does not exist or is not loadable!!\n") sys.exit(1) ######################################################################## ## check for the json file and read it in ## automatically find matching .json ad check it