def FileCat(self, src=None, dest=None, text=None): """Append content of 'src' to 'dest' (filename or file object). Warning : Only local filenames ! """ assert dest != None and (src != None or text != None) #peter.zhang, for cygwin dest = dest.replace('\\', '/') dest = dest.replace('/cygdrive/', '') if dest.find(':/') < 0: dest = dest.replace('/', ':/', 1) if type(dest) != file: if not os.path.isdir(dest) or os.access(dest, os.W_OK): f2 = open(dest, 'a') else: self._mess(ufmt(_(u'No write access to %s'), dest), '<F>_ERROR') return else: f2 = dest if text is None: if os.path.exists(src): if os.path.isfile(src): text = open(src, 'r').read() else: self._mess(ufmt(_(u'file not found : %s'), src), '<F>_FILE_NOT_FOUND') return if text: f2.write(text) if type(dest) != file: f2.close()
def Delete(self, rep, remove_dirs=True, verbose=None): """Delete a file or a directory tree (rm -rf ...). Set 'remove_dirs' to False to be sure to delete 'rep' only if it's a file. """ if verbose == None: verbose = self.verbose iret = 0 dico = self.filename2dict(rep) # preventing to delete first level directories (as /, /home, /usr...) if dico['name'][0] == '/' and len(dico['name'][:-1].split( os.sep)) <= 2: self._mess(ufmt(_(u'deleting this directory seems too dangerous. ' \ '%s has not been removed'), dico['name']), '<A>_ALARM') return if remove_dirs: cmd = command['rm_dirs'] % {'args': dico['name']} else: cmd = command['rm_file'] % {'args': dico['name']} tail = '.../' + '/'.join(rep.split('/')[-2:]) comment = ufmt(_(u'deleting %s'), tail) if self.IsRemote(rep): iret, out = self.Shell(cmd, dico['mach'], dico['user'], alt_comment=comment, verbose=verbose, stack_id=4) else: iret, out = self.Shell(cmd, alt_comment=comment, verbose=verbose, stack_id=4) return
def signal_job(self, jobid, signal): """Send the given signal to the job.""" if signal != 'KILL' and not self.supports_signal(): self.run.Mess( _(u'Job scheduler does not know how to send a signal.'), '<A>_ALARM') return 4 jobinf = JobInfo() jobinf.jobid = jobid dcfg = self.config_dict() dcfg.update(jobinf.dict_values()) dcfg['signal'] = signal cmd = '' if dcfg['batch_ini'] != '': cmd = '. %(batch_ini)s ; ' if signal == 'KILL': cmd += self.cmdkil else: cmd += self.cmdsig cmd = cmd % dcfg iret, out = self.run.Shell(cmd) self.run.DBG(u'Output of job kill :', out, all=True) if iret != 0: self.run.Mess( ufmt(_(u'Failure during killing a job. Error message :\n%s'), out), '<A>_ALARM') self.run.Mess(ufmt(_(u'Command line: %s'), cmd)) return iret
def AddToEnv(self, profile): """Read 'profile' file (with sh/bash/ksh syntax) and add updated variables to os.environ. """ if not os.path.isfile(profile): self._mess(ufmt(_(u'file not found : %s'), profile), '<A>_FILE_NOT_FOUND') return # read initial environment iret, out = self.Shell('%s env' % shell_cmd) self._dbg("env_init", out, all=True) env_init = env2dict(out) if iret != 0: self._mess(_(u'error getting environment'), '<E>_ABNORMAL_ABORT') return # read profile and dump modified environment iret, out = self.Shell('%s ". %s ; env"' % (shell_cmd, profile)) self._dbg("env_prof", out, all=True) env_prof = env2dict(out) if iret != 0: self._mess(ufmt(_(u'error reading profile : %s'), profile), '<E>_ABNORMAL_ABORT') return # "diff" for k, v in env_prof.items(): if env_init.get(k, None) != v: self._dbg('AddToEnv set : %s=%s' % (k, v)) os.environ[k] = convert(v) for k in [k for k in env_init.keys() if env_prof.get(k) is None]: self._dbg('unset %s ' % k, DBG=True) try: del os.environ[k] except: pass
def copyfileD(run, df, icomm, ncomm): """Copy datas from `df` into current directory. Raise only <E> if an error occurs run CheckOK() after. """ dest = None # 1. ----- if logical unit is set : fort.* if df['ul'] != 0 or df['type'] in ('nom',): dest = 'fort.%d' % df['ul'] if df['type'] == 'nom': dest = osp.basename(df['path']) # exception for multiple command files (adding _N) if df['ul'] == 1: icomm += 1 format = '%%0%dd' % (int(log10(max(1, ncomm))) + 1) dest = dest + '.' + format % icomm # warning if file already exists if run.Exists(dest): run.Mess(ufmt(_(u"'%s' overwrites '%s'"), df['path'], dest), '<A>_COPY_DATA') if df['compr']: dest = dest + '.gz' # 2. ----- bases and directories (ul=0) else: # base if df['type'] in ('base', 'bhdf'): dest = osp.basename(df['path']) # ensi elif df['type'] == 'ensi': dest = 'DONNEES_ENSIGHT' # repe elif df['type'] == 'repe': dest = 'REPE_IN' if dest is not None: # 3. --- copy kret = run.Copy(dest, df['path'], niverr='<E>_COPY_ERROR', verbose=True) # 4. --- decompression if kret == 0 and df['compr']: kret, dest = run.Gunzip(dest, niverr='<E>_DECOMPRESSION', verbose=True) # 5. --- move the bases in main directory if df['type'] in ('base', 'bhdf'): for f in glob(osp.join(dest, '*')): run.Rename(f, osp.basename(f)) # force the file to be writable make_writable(dest) # clean text files if necessary if df['ul'] != 0 and run.IsTextFileWithCR(dest): file_cleanCR(dest) print3(ufmt(' ` ' + _(u'line terminators have been removed from %s'), dest)) return icomm
def _read_rc(self, ficrc, destdict, optional=False, mcsimp=None): """Read a ressource file and store variables to 'destdict'.""" if osp.isfile(ficrc): read_rcfile(ficrc, destdict, mcsimp=mcsimp) elif not optional: print3(ufmt(_(u'file not found : %s'), ficrc)) self.Sortie(4)
def Func_tail(run, njob, nomjob, mode, nbline, expression=None): """Return the output the last part of fort.6 file or filter lines matching a pattern. """ # retrieve the job status etat, diag, node, tcpu, wrk, queue = Func_actu(run, njob, nomjob, mode) run.DBG(u"actu returns : etat/diag/node/tcpu/wrk/queue", (etat, diag, node, tcpu, wrk, queue)) # fill output file s_out = '' if mode == 'batch' and run['batch_nom'] == 'SunGE': s_out = _(u"Sorry I don't know how to ask Sun Grid Engine batch " \ "scheduler the running node.") if etat == 'RUN': cmd = "cat {fich}" if expression is None or expression.strip() == "": cmd += " | tail -{nbline}" else: cmd += " | egrep -- '{expression}'" # file to parse fich = osp.join(wrk, 'fort.6') run.DBG(ufmt(u'path to fort.6 : %s', fich)) if node != '_': mach = node fich = '%s:%s' % (node, fich) else: mach = '' # execute command if run.Exists(fich): fich = run.PathOnly(fich) jret, s_out = run.Shell(cmd.format(**locals()), mach=mach) return etat, diag, s_out
def apply_special_service(prof, run, on_client_side=False): """Return the profile modified for the "special" service.""" # allow customization of the modifier if run.get('schema_profile_modifier'): schem = get_plugin(run['schema_profile_modifier']) run.DBG("calling plugin : %s" % run['schema_profile_modifier']) serv, prof = schem(prof, on_client_side) serv = prof['special'][0].split(SEPAR)[0] if serv == "": if prof['distrib'][0] in YES_VALUES: serv = 'distribution' elif prof['exectool'][0] != '': serv = 'exectool' elif prof['multiple'][0] in YES_VALUES: serv = 'multiple' else: return serv, prof modifier = ModifierFactory(serv, prof, run, on_client_side) if modifier is None: run.Mess(ufmt(_(u"unknown service name : %s"), serv), '<F>_ERROR') modifier.modify() new_prof = modifier.return_profile() return serv, new_prof
def __init__(self, config_file, run=None, version_path=None): """config_file : filename of the 'config.txt' file to read run : AsterRun object (optional) version_path : directory of the version (if it is not the dirname of 'config_file'). """ # ----- initialisations self.config = {} self.filename = config_file self.dirn = version_path or get_absolute_dirname(config_file) self.verbose = False self.debug = False # ----- reference to AsterRun object which manages the execution self.run = run if run != None: self.verbose = run['verbose'] self.debug = run['debug'] # ----- set optional/defaults values (ALWAYS AS LIST !) self.config = DEFAULTS.copy() # ----- read config file if not osp.isfile(config_file): self._mess(ufmt(_(u'file not found : %s'), config_file), '<F>_FILE_NOT_FOUND') f = open(config_file, 'r') content = f.read() f.close() self.config.update(self._parse(content)) if self.debug: print3('<DBG> <init> AsterConfig :') print3(self)
def Mess(self, msg, cod='', store=False): """Print a message sur stdout, 'cod' is an error code (format "<.>_...") <E> : continue, <F> : stop, <A> : alarm, continue, '' or INFO : for an info, SILENT : like an info without <INFO> string, TITLE : add a separator. If cod='<F>' (without a description), if exists last <E> error is transformed into <F>, else <F>_ABNORMAL_ABORT. If store is True, 'msg' is stored in print_on_exit dictionnary.""" # ----- gravite g0 = self.GetGrav(self.diag) g1 = self.GetGrav(cod) coderr = cod if cod == '<F>': if g0 < self.GetGrav('<E>'): coderr = '<F>_ABNORMAL_ABORT' else: coderr = self.diag.replace('<E>', '<F>') if g1 >= self.GetGrav('<A>'): self.DBG(u'Warning or error raised :', '%s %s' % (cod, msg), print_traceback=True) # ----- message if cod == '' or cod == 'INFO': fmt = self.fmt['msg_info'] coderr = '' elif cod == 'SILENT': fmt = self.fmt['silent'] coderr = '' elif cod == 'TITLE': fmt = self.fmt['title'] coderr = '' else: fmt = self.fmt['msg+cod'] # unknown flag if g1 == -9: coderr = '<I> '+coderr print3(ufmt(fmt, coderr, msg)) magic.get_stdout().flush() # ----- store in print_on_exit if store or (not self.ExitOnFatalError and g1 >= self.GetGrav('<S>')): k = '?' msg2 = msg mat = re.search('<(.)>', cod) if mat != None and mat.group(1) in ('A', 'S', 'E', 'F'): k = mat.group(1) msg2 = self.fmt['msg+cod'] % (coderr, msg) elif cod in ('OK', 'NOOK'): k = cod self.print_on_exit[k] = self.print_on_exit.get(k, []) + [msg2, ] # ----- diagnostic le plus défavorable if g1 > g0: self.diag = coderr if g1 == self.GetGrav('<F>'): self.Sortie(4)
def pre_exec(self, **kwargs): """Function called at the beginning of execute. """ if sum(self.nbnook) >= self.nbmaxnook: reason = ufmt(_(u'Maximum number of errors reached : %d (%d errors, per thread : %s)'), self.nbmaxnook, sum(self.nbnook), ', '.join([str(n) for n in self.nbnook])) current_result = kwargs['result'] raise TaskAbort(reason, current_result)
def get_exec_command(self, cmd_in, add_tee=False, env=None): """Return command to run Code_Aster. """ run = magic.run # add source of environments files if env is not None and self.really(): envstr = [". %s" % f for f in env] envstr.append(cmd_in) cmd_in = " ; ".join(envstr) dict_val = { 'cmd_in' : cmd_in, 'var' : 'EXECUTION_CODE_ASTER_EXIT_%s' % run['num_job'], } if add_tee: cmd_in = """( %(cmd_in)s ; echo %(var)s=$? ) | tee fort.6""" % dict_val if not self.really(): if add_tee: cmd_in += """ ; exit `grep -a %(var)s fort.6 | head -1 """ \ """| sed -e 's/%(var)s=//'`""" % dict_val return cmd_in mpi_script = osp.join(self.global_reptrav, 'mpi_script.sh') if run.get('use_parallel_cp') in YES_VALUES: cp_cmd = '%s --with-as_run %s %s' \ % (osp.join(aster_root, 'bin', 'parallel_cp'), " ".join(run.get_remote_args()), self.global_reptrav) elif self.nbnode() > 1: cp_cmd = 'scp -r' else: cp_cmd = 'cp -r' dict_mpi_args = { 'cmd_to_run' : cmd_in, 'program' : mpi_script, 'cp_cmd' : cp_cmd, } dict_mpi_args.update(self.build_dict_mpi_args()) template = osp.join(datadir, 'mpirun_template') if not run.Exists(template): run.Mess(ufmt(_(u'file not found : %s'), template), '<F>_FILE_NOT_FOUND') content = open(template, 'r').read() % dict_mpi_args run.DBG(content, all=True) open(mpi_script, 'w').write(convert(content)) os.chmod(mpi_script, 0755) # add comment because cpu/system times are not counted by the timer self._add_timer_comment() # mpirun/mpiexec command = dict_mpi_args['mpirun_cmd'] % dict_mpi_args # need to initialize MPI session ? if dict_mpi_args['mpi_ini']: command = dict_mpi_args['mpi_ini'] % dict_mpi_args + " ; " + command # need to close MPI session ? if dict_mpi_args['mpi_end']: command = command + " ; " + dict_mpi_args['mpi_end'] % dict_mpi_args return command
def Symlink(self, src, link_name, verbose=True): """Create a symbolic link.""" if on_windows(): return self.Copy(link_name, src) self.VerbStart(ufmt(_(u'adding a symbolic link %s to %s...'), link_name, src), verbose=verbose) iret = 0 output = '' try: if osp.exists(link_name): self.Delete(link_name) os.symlink(src, link_name) except OSError, output: iret = 4 self._mess(ufmt(_(u'error occurs during creating a symbolic link' \ ' from %s to %s'), src, link_name), '<E>_SYMLINK')
def add_ct(self, l_file): """Ajoute des cas-tests dans la liste. """ new_ct = set() if len(l_file) == 0 and self.alltest: if self.verbose: print3( ufmt(_(u"Searching '*.export' from %s..."), ','.join(self.astest_dir))) l_file = '*' # sans extension for ct in l_file: l_gl = self.filename(ct, 'export', first_only=False) new_ct.update([osp.basename(osp.splitext(t)[0]) for t in l_gl]) new_ct.update(self.liste_ct) self.liste_ct = list(new_ct) self.liste_ct.sort() if self.verbose: print3(ufmt(_(u'%6d testcases in the list.'), len(self.liste_ct)))
def build_config_of_version(run, label, filename=None, error=True): """Build an AsterConfig object of the version named 'label'.""" filename = filename or 'config.txt' version_path = run.get_version_path(label) ficconf = os.path.join(version_path, filename) if not osp.exists(ficconf): if not error: return None run.Mess(ufmt(_(u'file not found : %s'), ficconf), '<F>_FILE_NOT_FOUND') return AsterConfig(ficconf, run, version_path)
def __init__(self, astest_dir, all=False, verbose=True): """Initialisations """ self.astest_dir = force_list(astest_dir) self.liste_ct = [] self.alltest = all self.filter = [] self.liste_crit = [] self.list_para_test = list_para_test self.verbose = verbose if self.verbose: print3(ufmt(_(u'Directory of testcases : %s'), self.astest_dir))
def WriteConfigTo(self, fich): """Dump the content of config file into 'filename'. """ try: #peter.zhang, for cygwin fich = fich.replace('\\', '/') fich = fich.replace('/cygdrive/', '') if fich.find(':/') < 0: fich = fich.replace('/', ':/', 1) open(fich, 'w').write(self._content) except IOError, msg: self._mess(ufmt(_(u'No write access to %s'), fich), '<F>_ERROR')
def call_plugin(action, prof, *args): """Wrapper to import and run a plugin.""" # the schema can be forced using the --schema option. schema_name = magic.run['schema'] \ or ACTIONS[action]['default_schema'] try: schem = get_plugin(schema_name) except ImportError: magic.run.Mess(ufmt(_(u'can not import : %s'), schema_name), '<F>_FILE_NOT_FOUND') iret = schem(prof, args) return iret
def MkDir(self, rep, niverr=None, verbose=None, chmod=0755): """Create the directory 'rep' (mkdir -p ...) """ if niverr == None: niverr = '<F>_MKDIR_ERROR' if verbose == None: verbose = self.verbose iret = 0 dico = self.filename2dict(rep) if self.IsRemote(rep): cmd = 'mkdir -p %(dir)s ; chmod %(chmod)o %(dir)s' % { 'dir': dico['name'], 'chmod': chmod, } iret, out = self.Shell(cmd, dico['mach'], dico['user'], verbose=verbose) else: dico['name'] = osp.expandvars(dico['name']) self.VerbStart(ufmt(_(u'creating directory %s'), dico['name']), verbose) # ----- it's not tested in remote s = '' try: #peter.zhang, for cygwin dico['name'] = dico['name'].replace('\\', '/') dico['name'] = dico['name'].replace('/cygdrive/', '') if dico['name'].find(':/') < 0: dico['name'] = dico['name'].replace('/', ':/', 1) os.makedirs(dico['name']) except OSError, s: # maybe simultaneously created by another process if not os.path.isdir(dico['name']): iret = 4 try: os.chmod(dico['name'], chmod) except OSError, s: self._mess(ufmt(_(u'can not change permissions on %s'), rep), '<A>_ALARM')
def get_jobstate(self, jobid, jobname): """Return infos about this job : its state (PEND, RUN, SUSPENDED, ENDED), the node on and the queue in it is running, the cpu time spend. """ jobinf = JobInfo() jobinf.jobid, jobinf.jobname = jobid, jobname dcfg = self.config_dict() dcfg.update(jobinf.dict_values()) cmd = '' if dcfg['batch_ini'] != '': cmd = '. %(batch_ini)s ; ' cmd += self.cmdjob cmd = cmd % dcfg iret, out = self.run.Shell(cmd) self.run.DBG(u'Output of job status :', out, all=True) if iret != 0: self.run.Mess( ufmt( _(u'Failure during retreiving job information. Error message :\n%s' ), out), '<A>_ALARM') self.run.Mess(ufmt(_(u'Command line: %s'), cmd)) jobinf = self.parse_jobstate_output(out, jobinf) if jobinf.state == 'RUN' and self.cmdcpu: cmd = '' if dcfg['batch_ini'] != '': cmd = '. %(batch_ini)s ; ' cmd += self.cmdcpu cmd = cmd % dcfg iret, out = self.run.Shell(cmd) self.run.DBG(u'Output of job status (cpu) :', out, all=True) if iret != 0: self.run.Mess( ufmt( _(u'Failure during retreiving job cpu information. Error message :\n%s' ), out), '<A>_ALARM') self.run.Mess(ufmt(_(u'Command line: %s'), cmd)) jobinf = self.parse_jobcpu(out, jobinf) return jobinf.as_func_actu_result()
def result(self, *l_resu, **kwargs): """Function called after each task to treat results of execute. Arguments are 'execute' results + keywords args. 'result' is called thread-safely, so can store results in attributes. """ nf = len(self.exec_result) self.exec_result.extend(l_resu) for job, opts, diag, tcpu, tsys, ttot, telap, output in l_resu: nf += 1 if self.info >= 2: self.run.Mess( ufmt(_(u'%s completed (%d/%d), diagnostic : %s'), job, nf, self.nbitem, diag), 'SILENT')
def result(self, *l_resu, **kwargs): """Function called after each task to treat results of 'execute'. Arguments are 'execute' results + keywords args. 'result' is called thread-safely, so can store results in attributes. """ nf = len(self.test_result) self.test_result.extend(l_resu) for values in l_resu: job, diag = values[0], values[2] nf += 1 if self.info >= 2: self.run.Mess(ufmt(_(u'%s completed (%d/%d), diagnostic : %s'), job, nf, self.nbitem, diag), 'SILENT')
def ProxyToServer(run, *args): """Work as a proxy to a server to run an action. An export file is required to get the informations to connect the server. If the action has not a such argument, it will be the first for calling through the proxy. The other arguments are those of the action. This option is intended to be called on a client machine (directly by the gui for example). """ # The options must be passed explictly for each action because their # meaning are not necessarly the same on client and server sides. # Example : "num_job" of client has no sense on the server. # An options list can be added to ACTIONS definitions. magic.log.info('-' * 70) run.DBG("'--proxy' used for action '%s' and args : %s" % (run.current_action, args)) dact = ACTIONS.get(run.current_action) # check argument if dact is None: run.parser.error(_(u"these action can not be called through the proxy : '--%s'") \ % run.current_action) if not (dact['min_args'] <= len(args) <= dact['max_args']): run.parser.error(_(u"'--%s' : wrong number of arguments (min=%d, max=%d)") \ % (run.current_action, dact['min_args'], dact['max_args'])) # read export from arguments prof = None if dact['export_position'] < len(args): profname = args[dact['export_position']] fprof = run.PathOnly(profname) if fprof != profname: run.DBG( "WARNING: --proxy should be called on a local export file, not %s" % profname) fprof = get_tmpname(run, run['tmp_user'], basename='profil_astk') iret = run.Copy(fprof, profname, niverr='<F>_PROFILE_COPY') run.ToDelete(fprof) if fprof == "None": # the client knows that the schema does not need an export file fprof = None elif not osp.isfile(fprof): run.Mess(ufmt(_(u'file not found : %s'), fprof), '<F>_FILE_NOT_FOUND') prof = AsterProfil(fprof, run) if fprof is not None: run.DBG("Input export : %s" % fprof, prof) iret = call_plugin(run.current_action, prof, *args) if type(iret) in (list, tuple): iret = iret[0] run.Sortie(iret)
def refused(self, job, opts, itemid, status): """Action when a job is refused. """ opts['refused'] = opts.get('refused', 0) + 1 result = FAILURE if status == OVERLIMIT: self.run.Mess(ufmt(_(u"job '%s' exceeds resources limit (defined through " "hostfile), it will not be submitted."), job), '<A>_LIMIT_EXCEEDED') elif status == NORESOURCE: if self.last_submit > 0.: dt = time.time() - self.last_submit else: dt = 0.01 if self.info >= 2 : self.run.Mess(ufmt(_(u"'%s' no resource available (attempt #%d, " \ "no submission for %.1f s)."), job, opts['refused'], dt)) # try another time if 0. < dt < self.timeout: self.queue_put((job, opts)) result = None else: self._mess_timeout(dt, self.timeout, job, opts['refused']) return result
def summary_line(self, job, opts, res, compatibility=False): """Return a summary line of the execution result.""" args = [job, ] fmt = fmt_resu expect = 5 if compatibility: args.append('(%d/%d)' % (opts.get('order', 0), self.nbitem)) fmt = fmt_resu_numb expect += 2 else: res = res[:-1] args.extend(res) if len(args) == expect: line = ufmt(fmt, *args) else: line = '%s : can not write the summary line' % job return line
def check(self): """Vérifie le texte des messages.""" def_args = {} for i in range(1, 100): def_args['i%d' % i] = 99999999 def_args['r%d' % i] = 1.234e16 # not too big to avoid python issue1742669 def_args['k%d' % i] = 'xxxxxx' def_args['ktout'] = "all strings !" error = [] for num, msg in self.cata_msg.items(): if type(msg) is dict: msg = msg['message'] try: txt = msg % def_args except: error.append(ufmt(_(u'message #%s invalid : %s'), num, msg)) if len(error) > 0: raise CataMessageError(self.fcata, os.linesep.join(error))
def Tail(run, *args): """Output the last part of fort.6 file or filter lines matching a pattern. """ if len(args) < 5: run.parser.error(_(u"'--%s' takes at least %d arguments (%d given)") % \ (run.current_action, 5, len(args))) elif len(args) > 6: run.parser.error(_(u"'--%s' takes at most %d arguments (%d given)") % \ (run.current_action, 6, len(args))) # arguments njob, nomjob, mode, fdest, nbline = args[:5] expression = None if len(args) > 5: expression = args[5] # allow to customize of the executed function worker = Func_tail if run.get('schema_tail_exec'): worker = get_plugin(run['schema_tail_exec']) run.DBG("calling plugin : %s" % run['schema_tail_exec']) etat, diag, s_out = worker(run, njob, nomjob, mode, nbline, expression) print_tail_result(nomjob, njob, etat, diag) if s_out == "": s_out = _(u'the output is empty (job ended ?)') # exit if job isn't running run.PrintExitCode = False if run.get('result_to_output') or fdest == 'None': run.DBG(_(u'tail put to output')) print3(s_out) else: # send output file if run.IsRemote(fdest): ftmp = get_tmpname(run, run['tmp_user'], basename='tail') open(ftmp, 'w').write(convert(s_out)) jret = run.Copy(fdest, ftmp) else: fdest = run.PathOnly(fdest) open(fdest, 'w').write(convert(s_out)) run.DBG(ufmt(u'output written to : %s', fdest))
def CheckExtensions(self): """Initialisations des éventuelles extensions.""" user_extensions = osp.join(get_home_directory(), self.rcdir, 'as_run.extensions') cwd_extensions = 'as_run.extensions' config = SafeConfigParser() l_read = config.read([user_extensions, cwd_extensions]) l_ext = config.sections() for extension in l_ext: try: filename = osp.splitext(config.get(extension, 'module'))[0] if self['verbose']: print3(ufmt(_(u'Loading extension %s from %s...'), extension, filename)) module = __import__(filename, globals(), locals(), ['SetParser']) init_function = getattr(module, 'SetParser') init_function(self) if self['verbose']: print3(_(u'Extension %s loaded') % extension) except (ImportError, NoOptionError), msg: print3(_(u'Extension %s not loaded (reason : %s)') % (extension, msg))
def submit(self): """Submit the script. Returns a tuple : (exitcode, jobid, queue) """ cmd = '' if self.dict_info['batch_ini'] != '': cmd = '. %(batch_ini)s ; ' cmd += self.cmdsub iret, out = self.run.Shell(cmd % self.dict_info) self.run.DBG(u'Output of submitting :', out, all=True) if iret != 0: self.run.Mess( ufmt(_(u'Failure during submitting. Error message :\n%s'), out), '<A>_ALARM') if osp.isfile(self.btc_file): self.run.DBG('submitted script :', open(self.btc_file, 'r').read(), all=True) jobid, queue = self.parse_output(out) return iret, jobid, queue
def GetInfos(run, *l_hosts): run.PrintExitCode = False if len(l_hosts) < 1: run.parser.error( _(u"'--%s' requires one or more arguments") % run.current_action) numthread = run.GetCpuInfo('numthread') # request all hosts host_infos = {} task = GetInfosTask(run=run, silent=run["silent"], host_infos=host_infos) check = Dispatcher(l_hosts, task, numthread) run.DBG(check.report()) # build ResourceManager object and print its representation hostrc = ResourceManager(host_infos) result = hostrc.hostinfo_repr() if run.get('output'): open(run['output'], 'w').write(result) print3( ufmt(_(u'The results have been written into the file : %s'), run['output'])) else: print3(result)