def get_results(prof, args, **kwargs): """Download result files from the server.""" run = magic.run magic.log.info(_(u"get result files from the server")) # read studyid study_prof = get_study_export(prof) if study_prof is None: return 4 studyid = study_prof['studyid'][0] forig = osp.join(run['tmp_user'], "%s.orig.export" % studyid) #peter.zhang, for cygwin forig = forig.replace('\\', '/') # read server informations scopy = build_server_from_profile(study_prof, TYPES.COPY_FROM, jobid=studyid) # get original export iret = scopy.copyfrom(forig) if iret != 0: magic.log.warn(_(u"the results seem already downloaded.")) return iret oprof = AsterProfil(forig, run) magic.run.DBG("original export :\n%s" % repr(oprof), all=True) run_on_localhost = scopy.is_localhost() # copy results files if not run_on_localhost: local_resu = oprof.get_result().get_on_serv(local_full_host) local_nom, local_other = local_resu.get_type('nom', with_completion=True) iret = scopy.copyfrom(convert=unique_basename, *local_other.topath()) jret = scopy.copyfrom(*local_nom.topath()) iret = max(iret, jret) local_resu = local_resu.topath() else: local_resu = [] remote_resu = oprof.get_result().get_on_serv(scopy.host, scopy.user).topath() all = set(oprof.get_result().topath()) all.difference_update(local_resu) all.difference_update(remote_resu) if len(all) > 0: magic.log.warn( _(u"files on a third host should have been copied " "at the end of the calculation (if possible) : %s"), [e.repr() for e in all]) # remove remote repository if iret == 0: scopy.delete_proxy_dir() return iret
def finalize(self, code=None): """End""" import sys import shutil from glob import glob sys.path.append('/opt/aster/lib/python2.7/site-packages') from asrun.profil import AsterProfil exp = glob('*.export') if len(exp) == 0: return assert len(exp) == 1, exp prof = AsterProfil(exp[0]) comm = prof.get_type('comm') num = len(glob(FILENAME + '.*')) + 1 lfn = [ fname for fname in self.text.keys() if fname.startswith('fort.')] assert len(lfn) <= 1, lfn if len(lfn) == 0: fname = 'fort.1' else: fname = lfn[0] changes = self.text.get(fname, []) modified = '%s.%d' % (FILENAME, num) final = osp.basename(comm[num - 1].path) orig = open(fname, 'r').read().splitlines() new = orig[:] changes.reverse() for chg in changes: start, end, indent, txt = chg offset = ' ' * indent nlin = [offset + lin for lin in txt.splitlines()] new = new[:start] + nlin + new[end + 1:] txt = ["### filename : %s ###" % fname, "### lines range: %d-%d (indent: %d)" % tuple(chg[:3]), chg[-1], ] if new[-1].strip(): new.append('') open(modified, 'w').write(os.linesep.join(new)) shutil.copy(modified, osp.join(DEST, final))
def ProxyToServer(run, *args): """Work as a proxy to a server to run an action. An export file is required to get the informations to connect the server. If the action has not a such argument, it will be the first for calling through the proxy. The other arguments are those of the action. This option is intended to be called on a client machine (directly by the gui for example). """ # The options must be passed explictly for each action because their # meaning are not necessarly the same on client and server sides. # Example : "num_job" of client has no sense on the server. # An options list can be added to ACTIONS definitions. magic.log.info('-' * 70) run.DBG("'--proxy' used for action '%s' and args : %s" % (run.current_action, args)) dact = ACTIONS.get(run.current_action) # check argument if dact is None: run.parser.error(_(u"these action can not be called through the proxy : '--%s'") \ % run.current_action) if not (dact['min_args'] <= len(args) <= dact['max_args']): run.parser.error(_(u"'--%s' : wrong number of arguments (min=%d, max=%d)") \ % (run.current_action, dact['min_args'], dact['max_args'])) # read export from arguments prof = None if dact['export_position'] < len(args): profname = args[dact['export_position']] fprof = run.PathOnly(profname) if fprof != profname: run.DBG( "WARNING: --proxy should be called on a local export file, not %s" % profname) fprof = get_tmpname(run, run['tmp_user'], basename='profil_astk') iret = run.Copy(fprof, profname, niverr='<F>_PROFILE_COPY') run.ToDelete(fprof) if fprof == "None": # the client knows that the schema does not need an export file fprof = None elif not osp.isfile(fprof): run.Mess(ufmt(_(u'file not found : %s'), fprof), '<F>_FILE_NOT_FOUND') prof = AsterProfil(fprof, run) if fprof is not None: run.DBG("Input export : %s" % fprof, prof) iret = call_plugin(run.current_action, prof, *args) if type(iret) in (list, tuple): iret = iret[0] run.Sortie(iret)
def __init__(self, run, filename=None, prof=None, pid=None, differ_init=False): """Initializations """ BaseCalcul.__init__(self) assert filename or prof, 'none of (filename, prof) provided!' self.run = run if pid is None: self.pid = self.run['num_job'] else: self.pid = pid self.studyid = self.pid if prof is not None: self.prof = prof else: # ----- profile filename fprof = get_tmpname(self.run, self.run['tmp_user'], basename='profil_astk') self.run.ToDelete(fprof) kret = self.run.Copy(fprof, filename, niverr='<F>_PROFILE_COPY') self.prof = AsterProfil(fprof, self.run) if self.prof['nomjob'][0] == '': self.prof['nomjob'] = 'unnamed' # attributes self.dict_info = None self.as_exec_ref = self.run.get('as_exec_ref') self.diag = '?' self.__initialized = False if not differ_init: self.finalize_init()
def get_study_export(prof): """Return the original export.""" run = magic.run # read server informations jobid = prof['jobid'][0] serv = build_server_from_profile(prof, TYPES.COPY_FROM, jobid=jobid) dst = jobid + ".export" iret = serv.copyfrom(dst) if iret != 0: return None # read studyid study_prof = AsterProfil(dst, run) return study_prof
def read_para_ct(self, ct): """Lecture des paramètres d'un cas-test""" export = self.filename(ct, 'export') if not export: para = self.filename(ct, 'para') assert para, 'neither .export or .para found for %s' % ct return getpara(para, others=['liste_test']) pexp = AsterProfil(export, magic.run) dpara = {} for key in PARAMS: val = pexp[key][0] if val.isdigit(): dpara[key] = int(val) else: try: dpara[key] = float(val) except ValueError: dpara[key] = val # compatibility dpara['mem_job'] = dpara['memory_limit'] dpara['tps_job'] = dpara['time_limit'] return 0, dpara, ''
def serv_infos_prof(cfg): """Return an AsterProfil with the parameters to request a server.""" dmap = MAPPING_ASTK_ASRUN run = magic.run prof = AsterProfil(run=run) serv = cfg.get("nom_complet") or cfg.get(dmap["nom_complet"]) login = cfg.get("login") or cfg.get(dmap["login"]) root = cfg.get("rep_serv") or cfg.get(dmap["rep_serv"]) assert not (serv is None or login is None or root is None) prof["serveur"] = serv prof["username"] = login prof["aster_root"] = root if cfg.get("plate-forme"): prof["platform"] = cfg["plate-forme"] value = cfg.get("protocol_exec") if not value: value = "asrun.plugins.server.SSHServer" if run["remote_shell_protocol"] and run["remote_shell_protocol"].find( "RSH") > -1: value = "asrun.plugins.server.RSHServer" prof["protocol_exec"] = value value = cfg.get("protocol_copyfrom") if not value: value = "asrun.plugins.server.SCPServer" if run["remote_copy_protocol"] and run["remote_copy_protocol"].find( "RCP") > -1: value = "asrun.plugins.server.RCPServer" prof["protocol_copyfrom"] = value value = cfg.get("protocol_copyto") if not value: value = "asrun.plugins.server.SCPServer" if run["remote_copy_protocol"] and run["remote_copy_protocol"].find( "RCP") > -1: value = "asrun.plugins.server.RCPServer" prof["protocol_copyto"] = value value = cfg.get("proxy_dir", "/tmp/") prof["proxy_dir"] = value return prof
def get_study_export(prof): """Return the original export.""" run = magic.run # read server informations serv = build_server_from_profile(prof, TYPES.COPY_FROM) jobid = prof['jobid'][0] jobname = prof['nomjob'][0] mode = prof['mode'][0] # flasheur is in the home directory dirname, fname = osp.split( flash_filename("flasheur", jobname, jobid, "export", mode)) run.DBG("export file in %s is named %s" % (dirname, fname)) # copy export file locally serv.set_proxy_dir(dirname) dst = osp.join(run['tmp_user'], 'flasheur_%s' % serv.host, fname) iret = serv.copyfrom(dst) if iret != 0: return None # read studyid study_prof = AsterProfil(dst, run) return study_prof
def build_export_from_files(run, lf, root="", with_default=True, with_results=False): """Build an export file from a list of files. """ prof = AsterProfil(run=run) if with_default: prof.add_default_parameters() ddat = build_dict_file(run, prof, dict_typ_test(root), lf, ['com?', '[0-9]*']) dres = {} if with_results: dres = build_dict_file(run, prof, dict_typ_result(), lf) for dicf, dr in ((ddat, 'D'), (dres, 'R')): lcom_i = [] for f, dico in dicf.items(): if dico['type'] != 'comm' or osp.splitext(f)[-1] == '.comm': prof.Set(dr, dico) else: lcom_i.append([f, dico]) lcom_i.sort() for f, dico in lcom_i: prof.Set(dr, dico) return prof
def Creation_Fichier_Export_Esclave(self, tmp_macr_recal): """ Creation du fichier .export pour le calcul esclave """ # Recuperation du fichier .export if self.export: export = self.export else: list_export = glob.glob('*.export') if len(list_export) == 0: UTMESS('F', 'RECAL0_4') elif len(list_export) > 1: UTMESS('F', 'RECAL0_5') export = list_export[0] # On modifie le profil prof = AsterProfil(export) # En local user_mach = '' # Chaine user@hostname (pour les calculs distribues et en batch) try: username = prof.param['username'][0] except: try: username = os.getlogin() except: username = getpass.getuser() user_mach_dist = "%s@%s:" % (username, socket.gethostname()) # On cherche s'il y a un fichier hostfile pour rajouter user@hostname l_fr = getattr(prof, 'data') l_tmp = l_fr[:] for dico in l_tmp: if dico['type'] == 'hostfile': user_mach = user_mach_dist break # Parametres ajoutes par le mot-cle CALCUL_ESCLAVE if self.tpsjob: prof.param['tpsjob'] = str(self.tpsjob) if self.tpmax: prof.args['tpmax'] = str(self.tpmax) if self.mem_aster: prof.param['mem_aster'] = str(self.mem_aster) if self.memjob: prof.param['memjob'] = str(self.memjob) if self.memjeveux: prof.args['memjeveux'] = str(self.memjeveux) if self.mpi_nbcpu: prof.param['mpi_nbcpu'] = str(self.mpi_nbcpu) if self.mpi_nbnoeud: prof.param['mpi_nbnoeud'] = str(self.mpi_nbnoeud) # En batch et distribue if self.MODE == 'BATCH': user_mach = user_mach_dist prof.param['mode'] = 'batch' # if self.mem_aster: prof.param['mem_aster'] = str(self.mem_aster) # Choix d'une classe reservee if self.CLASSE: prof.param['classe'] = self.CLASSE # xterm if 'xterm' in prof.param: del prof.param['xterm'] # fichier/répertoire for lab in ('data', 'resu'): l_fr = getattr(prof, lab) l_tmp = l_fr[:] for dico in l_tmp: # répertoires if dico['isrep']: # base non prise en compte if dico['type'] in ('base', 'bhdf'): l_fr.remove(dico) if lab == 'resu': dico['path'] = user_mach + os.path.join( tmp_macr_recal, os.path.basename(dico['path'])) # fichiers else: # Nom du fichier .mess (pour recuperation dans REPE_OUT) if dico['ul'] == 6: self.nom_fichier_mess_fils = os.path.basename( dico['path']) # Nom du fichier .resu (pour recuperation dans REPE_OUT) if dico['ul'] == 8: self.nom_fichier_resu_fils = os.path.basename( dico['path']) # Ancien .comm non pris en compte # Fichier d'unite logique UNITE_RESU (rapport de # MACR_RECAL) non pris en compte if dico['type'] == 'comm' or (dico['ul'] == self.UNITE_RESU and lab == 'resu'): l_fr.remove(dico) # Fichier d'unite logique UL devient le nouveau .comm elif dico['ul'] == self.UNITE_ESCL: self.fichier_esclave = dico['path'] dico['type'] = 'comm' dico['ul'] = 1 dico['path'] = user_mach + os.path.join( os.getcwd(), 'fort.%d' % self.UNITE_ESCL) # Tous les autres fichiers en Resultat elif lab == 'resu': l_fr.remove(dico) # Tous les autres fichiers en Donnees elif lab == 'data': if dico['type'] not in ('exec', 'ele'): if dico['ul'] != 0: # Traite le cas des sources python sourchargees # Si distant/distribue on doit prendre les # fichiers de donnes dans un endroit partage # entre les machines/noeuds if user_mach: src = dico['path'] dst = os.path.join( tmp_macr_recal, os.path.basename(dico['path'])) try: shutil.copyfile(src, dst) dico['path'] = user_mach + os.path.join( tmp_macr_recal, os.path.basename(dico['path'])) except Exception as e: if debug: print(e) else: dico['path'] = user_mach + os.path.join( os.getcwd(), 'fort.%s' % dico['ul']) # sinon on garde la ligne telle quelle setattr(prof, lab, l_fr) # Ecriture du nouveau fichier export prof.WriteExportTo(self.new_export) if debug: os.system('cp ' + self.new_export + ' /tmp')
line = format % locals() file = LockedFile(filename, mode='a+b', max_attempt=25, info=0, lockdir=lockdir) file.write(line.strip() + os.linesep) def log_usage_version_unfail(*args): """Log the version used (will never fail).""" try: return log_usage_version(*args) except: pass if __name__ == '__main__': import getpass from asrun.profil import AsterProfil #filename = "/aster/log/usage_version.log" filename = magic.run['log_usage_version'] prof = AsterProfil() prof['version'] = 'STA10' prof['mclient'] = 'claut682.der.edf.fr' prof['username'] = getpass.getuser() prof['nomjob'] = 'etude_avec_contact' prof['mode'] = 'interactif' log_usage_version(filename, prof)
def build_test_export(run, conf, REPREF, reptest, test, resutest=None, with_default=True, d_unig=None): """Return a profile for a testcase. """ lrep = [osp.join(REPREF, dt) for dt in conf['SRCTEST']] if reptest: lrep.extend(reptest) for rep in lrep: if run.IsRemote(rep): run.Mess(ufmt(_(u'reptest (%s) must be on exec host'), rep), '<F>_INVALID_DIR') lrep = [run.PathOnly(rep) for rep in lrep] lrm = [] if d_unig: d_unig = glob_unigest(d_unig, REPREF) lrm = set([osp.basename(f) for f in d_unig['test']]) export = _existing_file(test + '.export', lrep, last=True) # new testcase with .export if export: prof = AsterProfil(run=run) if with_default: prof.add_default_parameters() pexp = AsterProfil(export, run) pexp.set_param_limits() for entry in pexp.get_data(): if osp.basename(entry.path) in lrm: run.Mess(ufmt(_(u'deleting %s (matches unigest)'), osp.basename(entry.path))) pexp.remove(entry) found = _existing_file(entry.path, lrep, last=True) if found is None: run.Mess(ufmt(_(u'file not found : %s'), entry.path), '<E>_FILE_NOT_FOUND') pexp.remove(entry) else: entry.path = found pexp._compatibility() prof.update(pexp) else: # old version using .para lall = [] for r in lrep: f = osp.join(r, '%s.*' % test) lall.extend(glob(f)) lf = [] for f in lall: if osp.basename(f) in lrm: run.Mess(ufmt(_(u'deleting %s (matches unigest)'), osp.basename(f))) else: lf.append(f) if not lf: run.Mess(ufmt(_(u'no such file : %s.*'), test), '<E>_FILE_NOT_FOUND') prof = build_export_from_files(run, lf, test, with_default=with_default) if resutest: ftyp = { 'resu' : 8, 'mess' : 6, 'code' : 15 } for typ, ul in ftyp.items(): new = ExportEntry(osp.join(resutest, '%s.%s' % (test, typ)), type=typ, ul=ul, result=True) prof.add(new) return prof
def Func_actu(run, *args): """Return state, diagnosis, node, cpu time and working directory of a job. """ if len(args) != 3: run.parser.error(_(u"'--%s' takes exactly %d arguments (%d given)") % \ (run.current_action, 3, len(args))) njob, nomjob, mode = args # defaults etat = '_' diag = '_' node = '_' tcpu = '_' wrk = '_' queue = '_' psout = '' # the real job id may differ jobid = str(njob) # astk profile pr_astk = osp.join(run['flasheur'], '%s.p%s' % (nomjob, njob)) prof = None if osp.isfile(pr_astk): prof = AsterProfil(pr_astk, run) wrk = prof['rep_trav'][0] or '_' # 1. get information about the job # 1.1. batch mode if mode == "batch": m = 'b' scheduler = BatchSystemFactory(run, prof) etat, diag, node, tcpu, wrkb, queue = scheduler.get_jobstate( njob, nomjob) # 1.2. interactive mode elif mode == "interactif": m = 'i' # if it doesn't exist the job is ended etat = "ENDED" if prof is not None: node = prof['noeud'][0] else: run.Mess(_(u'unexpected mode : %s') % mode, '<F>_UNEXPECTED_VALUE') # 2. query the process if node != '_': if mode == "interactif" or tcpu == '_': jret, psout = run.Shell(run['ps_cpu'], mach=node) # ended ? if mode == "interactif" and psout.find('btc.%s' % njob) > -1: etat = "RUN" # 3.1. the job is ended if etat == "ENDED": fdiag = osp.join(run['flasheur'], '%s.%s%s' % (nomjob, m, njob)) if osp.isfile(fdiag): diag = open(fdiag, 'r').read().split(os.linesep)[0] or "?" if diag == '?': diag = '<F>_SYSTEM' # try to find something in output fout = osp.join(run['flasheur'], '%s.o%s' % (nomjob, njob)) if osp.isfile(fout): f = open(fout, 'r') for line in f: if line.find('--- DIAGNOSTIC JOB :') > -1: diag = line.split()[4] elif line.find('Cputime limit exceeded') > -1: diag = '<F>_CPU_LIMIT_SYSTEM' f.close() # copy fort.6 to '.o' if node != '_': ftcp = get_tmpname(run, run['tmp_user'], basename='actu') # same name as in the btc script generated by calcul.py wrk6 = get_nodepara(node, 'rep_trav', run['rep_trav']) fort6 = osp.join(wrk6, '%s.%s.fort.6.%s' % (nomjob, njob, m)) jret = run.Copy(ftcp, '%s:%s' % (node, fort6), niverr='SILENT') if osp.isfile(ftcp): txt = [os.linesep * 2] txt.append('=' * 48) txt.append( '===== Pas de diagnostic, recopie du fort.6 =====') txt.append('=' * 48) txt.append(open(ftcp, 'r').read()) txt.append('=' * 48) txt.append('=' * 48) txt.append(os.linesep * 2) f = open(fout, 'a') f.write(os.linesep.join(txt)) f.close() else: # 3.2. job is running if etat in ('RUN', 'SUSPENDED'): # working directory if wrk == '_': wrk = get_tmpname(run, basename=mode, node=node, pid=njob) if etat == 'RUN' and tcpu == '_': # tcpu may have been retrieved upper l_tcpu = [] for line in psout.split(os.linesep): if re.search('\-\-num_job=%s' % njob, line) != None and \ re.search('\-\-mode=%s' % mode, line) != None: l_tcpu.append( re.sub('\..*$', '', line.split()[0]).replace('-', ':')) if len(l_tcpu) > 0: try: tcpu = dhms2s(l_tcpu) except ValueError: pass # 4. return the result if node == "": node = "_" return etat, diag, node, tcpu, wrk, queue
def run_distrib(self, list_val): """ Module permettant de lancer N+1 calculs avec le module de calculs distribues d'asrun """ # ---------------------------------------------------------------------------- # Parametres # ---------------------------------------------------------------------------- # Code_Aster installation ASTER_ROOT = self.ASTER_ROOT as_run = self.as_run # General resudir = self.resudir clean = self.clean info = self.info # Study export = self.export # MACR_RECAL inputs parametres = self.parametres calcul = self.calcul experience = self.experience parametres = self.parametres calcul = self.calcul experience = self.experience CalcGradient = self.CalcGradient NMAX_SIMULT = self.NMAX_SIMULT # ---------------------------------------------------------------------------- # Import des modules python d'ASTK # ---------------------------------------------------------------------------- if not ASTER_ROOT: try: ASTER_ROOT = os.environ['ASTER_ROOT'] except: pass try: sys.path.append( os.path.join(ASTER_ROOT, 'ASTK', 'ASTK_SERV', 'lib')) sys.path.append( os.path.join( ASTER_ROOT, 'lib', 'python%s.%s' % (sys.version_info[0], sys.version_info[1]), 'site-packages')) except: pass assert is_list_of_dict(list_val) nbval = len(list_val) # ---------------------------------------------------------------------------- # Generation des etudes esclaves # ---------------------------------------------------------------------------- sys.argv = [''] run = AsRunFactory() # if info<=2: run.options['debug_stderr'] = False # pas d'output d'executions des esclaves dans l'output maitre if self.unity_follow and info == 2: run.options['debug_stderr'] = True else: run.options[ 'debug_stderr'] = False # pas d'output d'executions des esclaves dans l'output maitre # Master profile prof = AsterProfil(run=run, filename=export) tmp_param = tempfile.mkdtemp() try: username = prof.param['username'][0] except: username = os.environ['LOGNAME'] try: noeud = prof.param['noeud'][0] except: noeud = platform.uname()[1] tmp_param = "%s@%s:%s" % (username, noeud, tmp_param) prof.Set( 'R', { 'type': 'repe', 'isrep': True, 'ul': 0, 'compr': False, 'path': tmp_param }) if info >= 2: print(prof) # Si batch n'est pas possible, on bascule en interactif if not prof['mode'][0] or (prof['mode'][0] == 'batch' and run.get('batch') == 'non'): UTMESS('I', 'RECAL0_28', valk=noeud) prof['mode'] = 'interactif' prof['version'] = ExecutionParameter().get_option("rcdir") # result directories if resudir: if not os.path.isdir(resudir): try: os.mkdir(resudir) except: if info >= 1: UTMESS('A', 'RECAL0_82', valk=resudir) resudir = None if not resudir: # Par defaut, dans un sous-repertoire du repertoire d'execution pref = 'tmp_macr_recal_' # On cherche s'il y a un fichier hostfile pour placer les fichiers dans un repertoire partage l_fr = getattr(prof, 'data') l_tmp = l_fr[:] for dico in l_tmp: if dico['type'] == 'hostfile': pref = get_shared_tmpdir('tmp_macr_recal_') break # Si batch alors on place les fichiers dans un repertoire partage if prof['mode'][0] == 'batch': pref = get_shared_tmpdir('tmp_macr_recal1_') resudir = tempfile.mkdtemp(prefix=pref) flashdir = os.path.join(resudir, 'flash') if info >= 1: UTMESS('I', 'RECAL0_81', valk=resudir) prof.WriteExportTo(os.path.join(resudir, 'master.export')) # get hostrc object hostrc = get_hostrc(run, prof) # timeout before rejected a job timeout = prof.get_timeout() # Ajout des impressions de tables a la fin du .comm t = [] reponses = calcul for i in range(len(reponses)): _ul = str(int(100 + i)) num_ul = '99' # Pour la dynamique la table avec la matrice MAC a un traitement different if self.DYNAMIQUE: if ('MAC' in reponses[i][2]): t.append(self.ajout_post_mac(reponses[i])) try: os.remove('tmp_macr_recal' + os.sep + "REPE_TABLE" + os.sep + "fort." + _ul) except: pass t.append("\n# Recuperation de la table : " + str(reponses[i][0]) + "\n") t.append("DEFI_FICHIER(UNITE=" + num_ul + ", FICHIER='" + os.path.join('.', 'REPE_OUT', 'fort.' + _ul) + "',);\n") t.append("IMPR_TABLE(TABLE=" + str(reponses[i][0]) + ", FORMAT='ASTER', UNITE=" + num_ul + ", INFO=1, FORMAT_R='E30.20',);\n") t.append("DEFI_FICHIER(ACTION='LIBERER', UNITE=" + num_ul + ",);\n") # Pour la dynamique uniquement if self.DYNAMIQUE: if ( self.DYNAMIQUE['APPARIEMENT_MANUEL'] == 'OUI' and self.graph_mac and (True in ['MAC' in reponses[ii][2] for ii in range(len(reponses))]) ): ## on cherche a inverser la liste de frequences potentiellement changee par la fenetre MAC for ind_rep in range(len(reponses)): if reponses[ind_rep][2] == 'FREQ': t.append("data1 = " + reponses[ind_rep][0] + ".EXTR_TABLE().Array('" + reponses[ind_rep][1] + "','FREQ')\n" ) ## on recupere la table des frequences t.append("nume_freq=data1[:,0].tolist()\n") t.append("val_freq=data1[:,1].tolist()\n") t.append( "val_freq_permute=[]\n" ) ## val_freq_permute contient la liste de frequences permutee t.append("for ii in range(len(list_num_pour_freq)):\n") t.append( " if list_num_pour_freq[ii]==list_exp_pour_freq[ii]:\n" ) t.append( " val_freq_permute.append(val_freq[ii])\n") t.append(" else:\n") t.append( " ii_p= list_exp_pour_freq.index(list_num_pour_freq[ii])\n" ) t.append( " val_freq_permute.append(val_freq[ii_p])\n") t.append("DETRUIRE(CONCEPT=_F(NOM=" + str(reponses[ind_rep][0]) + "),)\n") t.append( reponses[ind_rep][0] + "=CREA_TABLE(LISTE=(_F(PARA='" + reponses[ind_rep][1] + "',LISTE_I=nume_freq,),_F(PARA='FREQ',LISTE_R=val_freq_permute,),),)\n" ) # number of threads to follow execution numthread = 1 # ---------------------------------------------------------------------------- # Executions des etudes esclaves # ---------------------------------------------------------------------------- # ----- Execute calcutions in parallel using a Dispatcher object # elementary task... task = DistribParametricTask( run=run, prof=prof, # IN hostrc=hostrc, nbmaxitem=self.NMAX_SIMULT, timeout=timeout, resudir=resudir, flashdir=flashdir, keywords={'POST_CALCUL': '\n'.join(t)}, info=info, nbnook=[ 0, ] * numthread, exec_result=[]) # OUT # ... and dispatch task on 'list_tests' etiq = 'calc_%%0%dd' % (int(log10(nbval)) + 1) labels = [etiq % (i + 1) for i in range(nbval)] couples = list(zip(labels, list_val)) if info >= 2: print(couples) execution = Dispatcher(couples, task, numthread=numthread) # ---------------------------------------------------------------------------- # Liste des diagnostics # ---------------------------------------------------------------------------- d_diag = {} for result in task.exec_result: label = result[0] diag = result[2] if len(result) >= 8: output_filename = os.path.join('~', 'flasheur', str(result[7])) else: output_filename = '' d_diag[label] = diag # Affichage de l'output de l'esclave dans l'output du maitre if self.unity_follow: affiche(unity=self.unity_follow, filename=output_filename, label=label, filetype='stdout') # Calcul esclave NOOK if not diag[0:2] in ['OK', '<A']: # Affichage de l'output et/ou de l'error de l'esclave dans l'output du maitre try: affiche(unity=None, filename=output_filename, label=label, filetype='stdout') error_filename = '.'.join( output_filename.split('.') [0:-1]) + '.e' + output_filename.split('.')[-1][1:] affiche(unity=None, filename=error_filename, label=label, filetype='stderr') except Exception as e: print(e) if diag in ['<F>_NOT_RUN', '<A>_NOT_SUBMITTED']: UTMESS('F', 'RECAL0_86', valk=(label, diag)) else: UTMESS('A', 'RECAL0_83', valk=(label, output_filename)) if not d_diag: UTMESS('F', 'RECAL0_84', valk=resudir) self.list_diag = [d_diag[label] for label in labels] # ---------------------------------------------------------------------------- # Arret si tous les jobs ne se sont pas deroules correctement # ---------------------------------------------------------------------------- if sum(task.nbnook) > 0: UTMESS('F', 'RECAL0_84', valk=resudir) # ---------------------------------------------------------------------------- # Recuperation des tables calculees # ---------------------------------------------------------------------------- Lcalc = [] i = 0 for c in labels: tbl = get_tables(tables_calc=calcul, tmp_repe_table=os.path.join( resudir, c, 'REPE_OUT'), prof=prof) Lcalc.append(tbl) # On stocke sous la forme d'une liste de numpy i += 1 # ---------------------------------------------------------------------------- # Calcul de la fonctionnelle et du gradient # ---------------------------------------------------------------------------- if debug: print("AA4/Lcalc=", Lcalc) fonctionnelle, gradient = self.calc2fonc_gradient(Lcalc) # ---------------------------------------------------------------------------- # Clean result directories # ---------------------------------------------------------------------------- if clean: shutil.rmtree(resudir, ignore_errors=True) # ---------------------------------------------------------------------------- # Save all calculated responses # ---------------------------------------------------------------------------- self.Lcalc = Lcalc return fonctionnelle, gradient
def macr_recal(self, UNITE_ESCL, RESU_EXP, POIDS, LIST_PARA, RESU_CALC, ITER_MAXI, ITER_FONC_MAXI, RESI_GLOB_RELA, UNITE_RESU, PARA_DIFF_FINI, GRAPHIQUE, METHODE, INFO, **args): ASTER_ROOT = os.environ['ASTER_ROOT'] try: sys.path.append(os.path.join(ASTER_ROOT, 'ASTK', 'ASTK_SERV', 'lib')) sys.path.append( os.path.join( ASTER_ROOT, 'lib', 'python%s.%s' % (sys.version_info[0], sys.version_info[1]), 'site-packages')) except: pass #_____________________________________________ # # RECUPERATION DU PROFIL DU CALCUL MAITRE #_____________________________________________ # Lecture du fichier .export dans le repertoire temporaire d'execution list_export = glob.glob('*.export') if len(list_export) == 0: UTMESS('F', 'RECAL0_4') elif len(list_export) > 1: UTMESS('F', 'RECAL0_5') prof = AsterProfil(list_export[0]) #_____________________________________________ # # PARAMETRES #_____________________________________________ TOLE_PARA = args['TOLE_PARA'] TOLE_FONC = args['TOLE_FONC'] # Pour les calculs esclaves CALCUL_ESCLAVE = {}.fromkeys([ 'LANCEMENT', 'MODE', 'UNITE_SUIVI', 'CLASSE', 'ACTUALISATION', 'memjeveux', 'memjob', 'mem_aster', 'tpmax', 'tpsjob', 'mpi_nbnoeud', 'mpi_nbcpu', 'NMAX_SIMULT', ]) dESCLAVE = args['CALCUL_ESCLAVE'][0].cree_dict_valeurs( args['CALCUL_ESCLAVE'][0].mc_liste) for i in list(dESCLAVE.keys()): if dESCLAVE[i] is None: del dESCLAVE[i] CALCUL_ESCLAVE['LANCEMENT'] = dESCLAVE['LANCEMENT'] if 'UNITE_SUIVI' in dESCLAVE: CALCUL_ESCLAVE['UNITE_SUIVI'] = dESCLAVE['UNITE_SUIVI'] else: CALCUL_ESCLAVE['UNITE_SUIVI'] = None if 'MODE' in dESCLAVE: CALCUL_ESCLAVE['MODE'] = dESCLAVE['MODE'] else: CALCUL_ESCLAVE['MODE'] = prof['mode'][0].upper() LANCEMENT = CALCUL_ESCLAVE['LANCEMENT'] # Parametres de l'algorithme genetique if 'NB_PARENTS' in args: NB_PARENTS = args['NB_PARENTS'] if 'NB_FILS' in args: NB_FILS = args['NB_FILS'] if 'ECART_TYPE' in args: ECART_TYPE = args['ECART_TYPE'] if 'ITER_ALGO_GENE' in args: ITER_ALGO_GENE = args['ITER_ALGO_GENE'] if 'RESI_ALGO_GENE' in args: RESI_ALGO_GENE = args['RESI_ALGO_GENE'] if 'GRAINE' in args: UTMESS('A', 'RECAL0_43') GRAINE = args['GRAINE'] else: GRAINE = None # Parametres concernant le recalage d'un modele dynamique if 'DYNAMIQUE' in args: DYNAMIQUE = args['DYNAMIQUE'] else: DYNAMIQUE = None #_____________________________________________ # # VERIFICATION PREALABLE SUR GNUPLOT #_____________________________________________ if GRAPHIQUE: dGRAPHIQUE = GRAPHIQUE[0].cree_dict_valeurs(GRAPHIQUE[0].mc_liste) if 'FORMAT' in dGRAPHIQUE and dGRAPHIQUE['FORMAT'] == 'GNUPLOT': # On essaie d'importer Gnuplot -> PAS DE GRAPHIQUE if not HAS_GNUPLOT: GRAPHIQUE = None UTMESS('A', 'RECAL0_3') #_____________________________________________ # # PARAMETRES DU MODE DISTRIBUTION #_____________________________________________ if LANCEMENT == 'DISTRIBUTION': if debug: print(prof.param['tpsjob'][0]) print(prof.args['tpmax']) print(prof.param['mem_aster'][0]) print(prof.args['memjeveux']) print(prof.param['memjob'][0]) # Pour la conversion mega-mots / mega-octets if on_64bits(): facw = 8 else: facw = 4 # Recuperation du parametre mem_aster try: mem_aster = int(prof['mem_aster'][0]) except ValueError: mem_aster = 100 if mem_aster in (0, 100): if CALCUL_ESCLAVE['MODE'] == 'INTERACTIF': UTMESS('A', 'RECAL0_6') mem_aster = 100 CALCUL_ESCLAVE['mem_aster'] = mem_aster # Utilisation du mot-cle TEMPS if 'TEMPS' in dESCLAVE: CALCUL_ESCLAVE['tpsjob'] = int(dESCLAVE['TEMPS'] / 60) CALCUL_ESCLAVE['tpmax'] = int(dESCLAVE['TEMPS']) else: # Recuperation depuis le calcul maitre CALCUL_ESCLAVE['tpsjob'] = prof.param['tpsjob'][0] CALCUL_ESCLAVE['tpmax'] = prof.args['tpmax'] # Utilisation du mot-cle MEMOIRE if 'MEMOIRE' in dESCLAVE: CALCUL_ESCLAVE['memjob'] = int(dESCLAVE['MEMOIRE'] * 1024) # Calcul du parametre memjeveux esclave memjeveux = int(dESCLAVE['MEMOIRE'] / facw) try: if mem_aster == 100: CALCUL_ESCLAVE['memjeveux'] = memjeveux else: CALCUL_ESCLAVE['memjeveux'] = float( int((float(mem_aster) / 100.) * float(memjeveux))) except: UTMESS('F', 'RECAL0_8') else: # Recuperation depuis le calcul maitre CALCUL_ESCLAVE['memjob'] = int(prof.param['memjob'][0]) CALCUL_ESCLAVE['memjeveux'] = prof.args['memjeveux'] # Utilisation du mot-cle MPI_NBCPU if 'MPI_NBCPU' in dESCLAVE: # Verifie que le calcul maitre est bien en MPI sur 1 cpu mpi_nbcpu = str(prof['mpi_nbcpu'][0]) if mpi_nbcpu != '1': UTMESS('A', 'RECAL0_7') CALCUL_ESCLAVE['mpi_nbcpu'] = int(dESCLAVE['MPI_NBCPU']) # Utilisation du mot-cle MPI_NBNOEUD if 'MPI_NBNOEUD' in dESCLAVE: CALCUL_ESCLAVE['mpi_nbnoeud'] = int(dESCLAVE['MPI_NBNOEUD']) # Parametres batch if CALCUL_ESCLAVE['MODE'] == 'BATCH': if 'CLASSE' in dESCLAVE: CALCUL_ESCLAVE['CLASSE'] = dESCLAVE['CLASSE'] if 'ACTUALISATION' in dESCLAVE: CALCUL_ESCLAVE['ACTUALISATION'] = dESCLAVE['ACTUALISATION'] # Affichage parametres batch if CALCUL_ESCLAVE['CLASSE']: classe = CALCUL_ESCLAVE['CLASSE'] else: classe = ' -auto- ' UTMESS('I', 'RECAL0_69', valk=(str(CALCUL_ESCLAVE['tpmax']), str(int(CALCUL_ESCLAVE['memjob']) / 1024), str(int(float(CALCUL_ESCLAVE['memjeveux']) * facw)), classe)) #_____________________________________________ # # VERIFICATIONS #_____________________________________________ if float(PARA_DIFF_FINI) > 0.1: UTMESS('A', 'RECAL0_76', valk=(str(PARA_DIFF_FINI))) #_____________________________________________ # # INITIALISATIONS #_____________________________________________ # Stocke l'ordre initial des parametres pour restituer dans le bon ordre # les valeurs en sortie de la macro LIST_NOM_PARA = [para[0] for para in LIST_PARA] # On classe les parametres LIST_PARA.sort() # Pour les algorithmes d'optimize.py, on a des limitations if METHODE in ['FMIN', 'FMINBFGS', 'FMINNCG']: # On ne peut tracer qu'a la derniere iteration if GRAPHIQUE: if GRAPHIQUE['AFFICHAGE'] == 'TOUTE_ITERATION': UTMESS('I', 'RECAL0_10', valk=METHODE) # Les bornes ne sont pas gerees UTMESS('I', 'RECAL0_11', valk=METHODE) #_______________________________________________ # # GESTION DE L'OPTION FACULTATIVE POUR LES POIDS #_______________________________________________ if (POIDS is None): POIDS = NP.ones(len(RESU_EXP)) #_____________________________________________ # # GESTION DES ERREURS DE SYNTAXE #_____________________________________________ texte_erreur, texte_alarme = gestion(UNITE_ESCL, LIST_PARA, RESU_CALC, RESU_EXP, POIDS, GRAPHIQUE, UNITE_RESU, METHODE) if (texte_erreur != ""): UTMESS('F', "RECAL0_12", valk=texte_erreur) if (texte_alarme != ""): UTMESS('A', "RECAL0_12", valk=texte_alarme) #_____________________________________________ # # INITIALISATIONS #_____________________________________________ iter = 0 restant, temps_iter = 0., 0. restant, temps_iter, err = reca_utilitaires.temps_CPU(restant, temps_iter) para, val, borne_inf, borne_sup = reca_utilitaires.transforme_list_Num( LIST_PARA, RESU_EXP) val_init = copy.copy(val) # Fonctionnelle en sortie (vectorielle ou scalaire) if METHODE in ['FMIN', 'FMINBFGS', 'FMINNCG', 'GENETIQUE', 'HYBRIDE']: vector_output = False else: vector_output = True # OBJET "CALCUL" CALCUL_ASTER = reca_calcul_aster.CALCUL_ASTER( jdc=self, METHODE=METHODE, UNITE_ESCL=UNITE_ESCL, UNITE_RESU=UNITE_RESU, para=para, reponses=RESU_CALC, PARA_DIFF_FINI=PARA_DIFF_FINI, vector_output=vector_output, DYNAMIQUE=DYNAMIQUE, # LANCEMENT = LANCEMENT, CALCUL_ESCLAVE=CALCUL_ESCLAVE, INFO=INFO, ) CALCUL_ASTER.RESU_EXP = RESU_EXP CALCUL_ASTER.RESU_CALC = RESU_CALC CALCUL_ASTER.LIST_PARA = LIST_PARA if CALCUL_ESCLAVE['UNITE_SUIVI']: CALCUL_ASTER.unity_follow = CALCUL_ESCLAVE['UNITE_SUIVI'] # Instances des classes pour le calcul de l'erreur et le # dimensionnemnt/adim Dim = reca_algo.Dimension(copy.copy(val_init)) CALCUL_ASTER.Simul = reca_interp.Sim_exp(RESU_EXP, POIDS) CALCUL_ASTER.Dim = Dim CALCUL_ASTER.reca_algo = reca_algo if (GRAPHIQUE): CALCUL_ASTER.UNITE_GRAPHIQUE = GRAPHIQUE['UNITE'] # Dans le cas de la dynamique avec appariement manual des MAC, on passe la # flag correspondant a True if METHODE in [ 'HYBRIDE', 'LEVENBERG', 'GENETIQUE' ]: # AAC --> j'ai modifie et donne la possibilite d'afficher la fenetre mac pour levenb et gene if (DYNAMIQUE is not None and DYNAMIQUE['APPARIEMENT_MANUEL'] == 'OUI'): CALCUL_ASTER.graph_mac = True # Instance de la classe gérant l'affichage des resultats du calcul de # l'optimisation Mess = reca_message.Message(para, RESU_EXP, copy.copy(val_init), UNITE_RESU) Mess.initialise() # Calcul de F # erreur = CALCUL_ASTER.calcul_F(val) # Calcul de F et G # erreur, residu, A_nodim, A = CALCUL_ASTER.calcul_FG(val) # sys.exit() # Mode INCLUDE : on doit executer les commandes PRE ici if LANCEMENT == 'INCLUSION': UNITE_INCLUDE = UNITE_ESCL recal.make_include_files(UNITE_INCLUDE=UNITE_INCLUDE, calcul=RESU_CALC, parametres=LIST_PARA) #------------------------------------------------------------------------------- # Pas d'optimisation (juste une evaluation de la fonctionnelle pour le point courant) #------------------------------------------------------------------------------- # if ITER_MAXI <= 0: erreur = CALCUL_ASTER.calcul_F(val) residu = 0 iter = 0 L_F = CALCUL_ASTER.Lcalc[0] CALCUL_ASTER.evaluation_fonction = 1 #------------------------------------------------------------------------------- # Algorithme FMIN (pas d'adimensionnement car n'utilise pas de gradient) #------------------------------------------------------------------------------- # elif (METHODE == 'FMIN'): UTMESS('I', 'RECAL0_13', valk=METHODE, files=Mess.get_filename()) val, fval, warnflag = fmin(CALCUL_ASTER.calcul_F, val, maxiter=ITER_MAXI, maxfun=ITER_FONC_MAXI, fulloutput=1) iter_fonc = CALCUL_ASTER.evaluation_fonction if warnflag == 1: UTMESS('I', 'RECAL0_54', files=Mess.get_filename()) if warnflag == 2: UTMESS('I', 'RECAL0_55', files=Mess.get_filename()) Mess.affiche_etat_final_convergence(iter, ITER_MAXI, iter_fonc, ITER_FONC_MAXI, RESI_GLOB_RELA, residu=0, Act=[]) Mess.affiche_fonctionnelle(fval) Mess.affiche_valeurs(val) nomres = Sortie(LIST_NOM_PARA, LIST_PARA, val, CALCUL_ASTER, Mess) return nomres #------------------------------------------------------------------------------- # Algorithme GENETIQUE (pas d'adimensionnement car n'utilise pas de gradient) #------------------------------------------------------------------------------- # elif (METHODE == 'GENETIQUE'): UTMESS('I', 'RECAL0_13', valk=METHODE, files=Mess.get_filename()) nb_parents = NB_PARENTS nb_fils = NB_FILS nb_iter = ITER_ALGO_GENE sigma = ECART_TYPE err_min = RESI_ALGO_GENE graine = GRAINE val = evolutivo(CALCUL_ASTER, val, nb_iter, err_min, nb_parents, nb_fils, sigma, borne_inf, borne_sup, graine) nomres = Sortie(LIST_NOM_PARA, LIST_PARA, val, CALCUL_ASTER, Mess) return nomres #------------------------------------------------------------------------------- # Pour tous les autres methodes, on adimensionne #------------------------------------------------------------------------------- # else: #------------------------------------------------------------------------------- # Si METHODE=='HYBRIDE', on lance d'abord l'algo genetique et ensuite celui de # Levenberg-Marquardt qui demarre avec le jeu de parametres issu de # genetique if (METHODE == 'HYBRIDE'): nb_parents = NB_PARENTS nb_fils = NB_FILS nb_iter = ITER_ALGO_GENE sigma = ECART_TYPE err_min = RESI_ALGO_GENE graine = GRAINE val_gene = evolutivo(CALCUL_ASTER, val, nb_iter, err_min, nb_parents, nb_fils, sigma, borne_inf, borne_sup, graine) val = copy.copy(val_gene) val_init = copy.copy(val) # AA ? CALCUL_ASTER.graph_mac = True # Calcul de F et G erreur, residu, A_nodim, A = CALCUL_ASTER.calcul_FG(val) E = recal.CALC_ERROR(experience=RESU_EXP, X0=val, calcul=RESU_CALC, poids=POIDS) E.CalcError(CALCUL_ASTER.Lcalc) E.CalcSensibilityMatrix(CALCUL_ASTER.Lcalc, val, dX=None, pas=PARA_DIFF_FINI) L_init = E.L_init L_J_init = E.L_J_init J_init = E.J_init J = E.J A = E.A A_nodim = E.A_nodim erreur = E.erreur residu = E.residu gradient_init = E.gradient_init # Calcul du lambda_init l = reca_algo.lambda_init(NP.dot(NP.transpose(A), A)) Mess.affiche_result_iter(iter, J, val, residu, NP.array([])) CALCUL_ASTER.L_init = L_init CALCUL_ASTER.L_J_init = L_J_init CALCUL_ASTER.J_init = J_init CALCUL_ASTER.A_init = A CALCUL_ASTER.gradient_init = gradient_init CALCUL_ASTER.residu_init = residu # On teste un manque de temps CPU restant, temps_iter, err = reca_utilitaires.temps_CPU( restant, temps_iter) if (err == 1): ier = ier + 1 return ier #------------------------------------------------------------------------------- # Methode FMINBFGS et FMINNCG #------------------------------------------------------------------------------- # if METHODE in ['FMINBFGS', 'FMINNCG']: UTMESS('I', 'RECAL0_13', valk=METHODE, files=Mess.get_filename()) # Derivees f = CALCUL_ASTER.calcul_F2 fprime = CALCUL_ASTER.calcul_G warnflag = 0 if 'GRADIENT' in args and args['GRADIENT'] == 'NON_CALCULE': f = CALCUL_ASTER.calcul_F fprime = None if fprime: UTMESS('I', 'RECAL0_14') else: UTMESS('I', 'RECAL0_15') # Lancement de l'optimisation if METHODE == 'FMINBFGS': val, fval, func_calls, grad_calls, warnflag = fminBFGS( f=f, x0=val, fprime=fprime, maxiter=ITER_MAXI, avegtol=RESI_GLOB_RELA, fulloutput=1) elif METHODE == 'FMINNCG': val, fval, func_calls, grad_calls, hcalls, warnflag = fminNCG( f=f, x0=val, fprime=fprime, fhess_p=None, fhess=None, maxiter=ITER_MAXI, avextol=RESI_GLOB_RELA, fulloutput=1) # Affichage des messages de sortie iter_fonc = CALCUL_ASTER.evaluation_fonction if warnflag: UTMESS('I', 'RECAL0_55', files=Mess.get_filename()) Mess.affiche_etat_final_convergence(iter, ITER_MAXI, iter_fonc, ITER_FONC_MAXI, RESI_GLOB_RELA, residu=0, Act=[]) Mess.affiche_fonctionnelle(fval) Mess.affiche_valeurs(val) # Permet d'avoir un diagnostic NOOK pour le job if warnflag: iter = ITER_MAXI L_F = CALCUL_ASTER.L residu = fval ecart_fonc = 0 # non calcule avec ces methodes ecart_para = 0 # non calcule avec ces methodes #------------------------------------------------------------------------------- # Methode Levenberg-Marquardt #---------------------------------------------------------------------- elif METHODE in ['LEVENBERG', 'HYBRIDE']: #___________________________________________________________ # # BOUCLE PRINCIPALE DE L'ALGORITHME de Levenberg-Marquardt #___________________________________________________________ UTMESS('I', 'RECAL0_13', valk=METHODE, files=Mess.get_filename()) while (iter < ITER_MAXI): iter = iter + 1 new_val, s, l, Act = reca_algo.Levenberg_bornes( val, Dim, val_init, borne_inf, borne_sup, A, erreur, l, UNITE_RESU) # On teste la variation sur les parametres ecart_para = reca_algo.calcul_norme2( NP.array(new_val) - NP.array(val)) if debug: print("AA0/ecart para=%s\nAA0/oldpara/newpara=%s %s" % (ecart_para, val, new_val)) if ecart_para < TOLE_PARA: UTMESS('I', 'RECAL0_51', valr=ecart_para, files=Mess.get_filename()) break # Calculs au point courant val et toutes les perturbations par # differences finies (N+1 calculs distribues ou inclus) CALCUL_ASTER.calcul_FG(new_val) # Calcul de l'erreur et de la matrice des sensibilites old_J = copy.copy(J) E.CalcError(CALCUL_ASTER.Lcalc) new_J = E.J l = reca_algo.actualise_lambda(l, Dim.adim(val), Dim.adim(new_val), A, erreur, new_J, J) E.CalcSensibilityMatrix(CALCUL_ASTER.Lcalc, new_val, dX=None, pas=PARA_DIFF_FINI) L_F = CALCUL_ASTER.Lcalc[0] A = E.A_nodim val = copy.copy(new_val) erreur = copy.copy(E.erreur) J = E.J if debug: print("AA0/L_F=", L_F) print("AA0/l=", l) print("AA0/erreur=", erreur) print("AA0/J=", J) print("AA0/A_nodim=", A) # Calcul de la matrice des sensibilites A = Dim.adim_sensi(A) # Calcul du residu residu = reca_algo.test_convergence(gradient_init, erreur, A, s) if debug: print("AA0/residu=", residu) print("AA0/new_val=", new_val) print("AA0/A=", A) # On calcule la variation sur la fonctionnelle ecart_fonc = abs(new_J - old_J) # Affichage iteration Mess.affiche_result_iter(iter, J, val, residu, Act, ecart_para, ecart_fonc) # On teste la variation sur la fonctionnelle if ecart_fonc < TOLE_FONC: UTMESS('I', 'RECAL0_52', valr=ecart_fonc, files=Mess.get_filename()) break if (GRAPHIQUE): if GRAPHIQUE['AFFICHAGE'] == 'TOUTE_ITERATION': GRAPHE_UL_OUT = GRAPHIQUE['UNITE'] if 'FORMAT' in dGRAPHIQUE and dGRAPHIQUE[ 'FORMAT'] == 'XMGRACE': pilote = GRAPHIQUE['PILOTE'] else: pilote = 'INTERACTIF' reca_utilitaires.graphique(GRAPHIQUE['FORMAT'], L_F, RESU_EXP, RESU_CALC, iter, GRAPHE_UL_OUT, pilote) # On teste le residu if residu <= RESI_GLOB_RELA: UTMESS('I', 'RECAL0_50', valr=residu, files=Mess.get_filename()) break # On teste un manque de temps CPU restant, temps_iter, err = reca_utilitaires.temps_CPU( restant, temps_iter) if (err == 1): UTMESS('I', 'RECAL0_53', files=Mess.get_filename()) break #_____________________________________________ # # FIN DES ITERATIONS # CONVERGENCE OU ECHEC #_____________________________________________ iter_fonc = CALCUL_ASTER.evaluation_fonction Mess.affiche_etat_final_convergence(iter, ITER_MAXI, iter_fonc, ITER_FONC_MAXI, RESI_GLOB_RELA, residu, Act) reca_algo.calcul_etat_final(para, A, iter, ITER_MAXI, RESI_GLOB_RELA, residu, Mess) #---------------------------------------------------------------------- #_____________________________________________ # # FIN DES ITERATIONS POUR TOUS LES ALGOS #_____________________________________________ if (GRAPHIQUE): fichier = None # Pour les algorithmes d'optimize.py, on ne peut tracer qu'a la # derniere iteration if (GRAPHIQUE['AFFICHAGE'] == 'ITERATION_FINALE') or (METHODE in [ 'FMIN', 'FMINBFGS', 'FMINNCG' ]) or (ITER_MAXI <= 0): UTMESS('I', 'RECAL0_17') GRAPHE_UL_OUT = GRAPHIQUE['UNITE'] pilote = GRAPHIQUE['PILOTE'] reca_utilitaires.graphique(GRAPHIQUE['FORMAT'], L_F, RESU_EXP, RESU_CALC, iter, GRAPHE_UL_OUT, pilote, fichier) # Si pas de convergence alors diagnostic NOOK_TEST_RESU # if (residu > RESI_GLOB_RELA) and (ecart_fonc > TOLE_FONC) and # (ecart_para < TOLE_PARA): if debug: print("residu, RESI_GLOB_RELA=", residu, RESI_GLOB_RELA, (residu > RESI_GLOB_RELA)) print("ecart_fonc, TOLE_FONC=", ecart_fonc, TOLE_FONC, (ecart_fonc > TOLE_FONC)) print("ecart_para, TOLE_PARA=", ecart_para, TOLE_PARA, (ecart_para > TOLE_PARA)) if (residu > RESI_GLOB_RELA): _tmp = [] _tmp.append({ 'PARA': 'ITER_MAXI', 'LISTE_R': 0.0, }) #_____________________________________________ # # CREATIONS DE LA LISTE DE REELS CONTENANT # LES VALEURS DES PARAMETRES A CONVERGENCE #_____________________________________________ nomres = Sortie(LIST_NOM_PARA, LIST_PARA, val, CALCUL_ASTER, Mess) return nomres
def RunAster(run, *args): """Allow to run Code_Aster with or without compiling additional source files, check a development, run a list of test cases... """ run.print_timer = True prev = os.getcwdu() # ----- check argument if len(args) != 1: run.parser.error( _(u"'--%s' requires one argument") % run.current_action) # 1. ----- initializations jn = run['num_job'] fprof = get_tmpname(run, run['tmp_user'], basename='profil_astk') run.ToDelete(fprof) # 1.1. ----- check argument type # 1.1.1. ----- use profile from args if isinstance(args[0], AsterProfil): prof = args[0].copy() prof.WriteExportTo(fprof) forig = fprof else: # 1.1.2. ----- read profile from args forig = args[0] kret = run.Copy(fprof, forig, niverr='<F>_PROFILE_COPY') prof = AsterProfil(fprof, run) if not run.IsRemote(forig): export_fname = run.PathOnly(get_absolute_path(forig)) prof.absolutize_filename(export_fname) if not prof['mode'][0] in ('batch', 'interactif'): run.Mess(_(u"Unknown mode (%s), use 'interactif' instead") % \ repr(prof['mode'][0]), 'UNEXPECTED_VALUE') prof['mode'] = ['interactif'] run.DBG("Input export : %s" % fprof, prof) # 1.2. get AsterConfig and AsterBuild objects REPREF = prof.get_version_path() conf = build_config_from_export(run, prof) build = AsterBuild(run, conf) DbgPara = { 'debug': { 'exe': conf['BIN_DBG'][0], 'suff': conf['BINOBJ_DBG'][0], 'libast': conf['BINLIB_DBG'][0], 'libfer': conf['BINLIBF_DBG'][0] }, 'nodebug': { 'exe': conf['BIN_NODBG'][0], 'suff': conf['BINOBJ_NODBG'][0], 'libast': conf['BINLIB_NODBG'][0], 'libfer': conf['BINLIBF_NODBG'][0] }, } # 1.3. set environment depending on version for f in conf.get_with_absolute_path('ENV_SH'): run.AddToEnv(f) # 1.4. set runner parameters klass = Runner # allow to customize of the execution objects if run.get('schema_execute'): schem = get_plugin(run['schema_execute']) run.DBG("calling plugin : %s" % run['schema_execute']) klass = schem(prof) runner = klass(conf.get_defines()) iret = runner.set_cpuinfo(prof['mpi_nbnoeud'][0], prof['mpi_nbcpu'][0]) if iret == 1: run.Mess( ufmt( _(u"%s is not a MPI version of Code_Aster. " "The number of nodes/processors must be equal to 1."), REPREF), "<F>_INVALID_PARAMETER") elif iret != 0: run.Mess(_(u"incorrect value for mpi_nbnoeud (%s) or mpi_nbcpu (%s)") \ % (prof['mpi_nbnoeud'][0], prof['mpi_nbcpu'][0]), '<F>_INVALID_PARAMETER') # 1.5. rep_trav from profile or from run[...] reptrav = runner.set_rep_trav(prof['rep_trav'][0], prof['mode'][0]) # write reptrav in the export prof['rep_trav'] = reptrav prof.WriteExportTo(prof.get_filename()) #XXX overrides the original export if forig != prof.get_filename(): run.Copy(forig, prof.get_filename(), niverr='<A>_ALARM') # add reptrav to LD_LIBRARY_PATH (to find dynamic libs provided by user) old = os.environ.get("LD_LIBRARY_PATH", "") os.environ["LD_LIBRARY_PATH"] = (reptrav + os.pathsep + old).strip( os.pathsep) # do not reinitialize rep_trav if if prof['prep_env'][0] not in NO_VALUES: run.MkDir(reptrav, chmod=0700) if prof['detr_rep_trav'][0] not in NO_VALUES: run.ToDelete(reptrav) # 1.6. copy profile in rep_trav kret = run.Copy(osp.join(reptrav, jn + '.export'), fprof) # ... and config file as ./config.txt conf.WriteConfigTo(osp.join(reptrav, 'config.txt')) # 1.7. debug/nodebug dbg = prof['debug'][0] if dbg == '': dbg = 'nodebug' # 1.8. default values exetmp = osp.join(REPREF, DbgPara[dbg]['exe']) cmdetmp = osp.join(REPREF, conf['BINCMDE'][0]) eletmp = osp.join(REPREF, conf['BINELE'][0]) # 2. ----- read profile values # it's valid because exec, cmde and ele must appear only once # these values will be overidden if they are available in reptrav # after an occurence of 'make_...' if prof.Get('DR', 'exec'): exetmp = prof.Get('DR', 'exec')[0]['path'] if prof.Get('DR', 'cmde'): cmdetmp = prof.Get('DR', 'cmde')[0]['path'] if prof.Get('DR', 'ele'): eletmp = prof.Get('DR', 'ele')[0]['path'] # order of actions : list_actions = [ 'make_exec', 'make_cmde', 'make_ele', 'make_etude', 'make_dbg', 'make_env', 'astout', 'distribution', 'multiple', 'exec_crs', 'exec_crp' ] # 3. ==> Let's go ! # 3.0. check if we know what to do for act in prof['actions']: if act == '': run.Mess(_(u'nothing to do'), 'OK') elif not act in list_actions: run.Mess(_(u'unknown action : %s') % act, '<A>_ALARM') # check if the version allows developments if conf['DEVEL'][0] in NO_VALUES and \ ( 'make_exec' in prof['actions'] or \ 'make_cmde' in prof['actions'] or \ 'make_ele' in prof['actions'] ): run.Mess( _(u'The configuration of this version does not allow ' 'user developments.'), '<F>_ERROR') # # 3.1. ----- make_exec # iret = 0 if 'make_exec' in prof['actions']: run.DBG(u'Start make_exec action') exetmp = osp.join(reptrav, 'aster.exe') tit = _(u'Compilation of source files') run.Mess(tit, 'TITLE') run.timer.Start(tit) repact = osp.join(reptrav, 'make_exec') repobj = osp.join(repact, 'repobj') run.MkDir(repact) lf = [] for typ in ('c', 'f', 'f90'): for rep in [l['path'] for l in prof.Get('D', typ=typ)]: jret, lbi = build.Compil(typ.upper(), rep, repobj, dbg, rep_trav=repact, error_if_empty=True, numthread='auto') iret = max(iret, jret) lf.extend(lbi) # liste des fichiers surchargés vers = get_aster_version(REPREF) vers = '.'.join(vers[:3]) fsurch = osp.join(repact, 'surchg.f') listsurcharge(vers, fsurch, lf) jret, lbi = build.Compil('F', fsurch, repobj, dbg, repact) run.timer.Stop(tit) run.CheckOK() tit = _(u'Build executable') run.Mess(tit, 'TITLE') run.timer.Start(tit) libaster = osp.join(REPREF, DbgPara[dbg]['libast']) libferm = osp.join(REPREF, DbgPara[dbg]['libfer']) # for backward compatibility if run.IsDir(libaster): libaster = osp.join(libaster, 'lib_aster.lib') if run.IsDir(libferm): libferm = osp.join(libferm, 'ferm.lib') lobj = glob(osp.join(repobj, '*.o')) # build an archive if there are more than NNN object files if len(lobj) > 500: run.timer.Stop(tit) tit2 = _(u'Add object files to library') run.timer.Start(tit2) libtmp = osp.join(repobj, 'libsurch.a') run.Copy(libtmp, libaster) kret = build.Archive(repobj, libtmp, force=True) lobj = [] libaster = libtmp run.timer.Stop(tit2) run.timer.Start(tit) kret = build.Link(exetmp, lobj, libaster, libferm, repact) run.timer.Stop(tit) run.CheckOK() tit = _(u'Copying results') run.timer.Start(tit, num=999) if prof.Get('R', typ='exec'): exe = prof.Get('R', typ='exec')[0] run.Delete(exe['path'], remove_dirs=False) iret = run.MkDir(osp.dirname(exe['path'])) iret = run.Copy(exe['path'], exetmp) exedata = prof.Get('D', typ='exec') if exedata and exedata[0]['path'] != exe['path']: exetmp = exedata[0]['path'] run.timer.Stop(tit) run.Mess(_(u'Code_Aster executable successfully created'), 'OK') # # 3.2. ----- make_cmde # if 'make_cmde' in prof['actions']: run.DBG(u'Start make_cmde action') tit = _(u"Compilation of commands catalogue") cmdetmp = osp.join(reptrav, 'cata_commande') run.timer.Start(tit) repact = osp.join(reptrav, 'make_cmde') run.MkDir(repact) kargs = { 'exe': exetmp, 'cmde': cmdetmp, } kargs['capy'] = [l['path'] for l in prof.Get('D', typ='capy')] lfun = prof.Get('D', typ='unig') if lfun: kargs['unigest'] = build.GetUnigest(lfun[0]['path']) if prof.Get('D', typ='py'): kargs['py'] = [l['path'] for l in prof.Get('D', typ='py')] jret = build.CompilCapy(REPREF, repact, **kargs) #i18n=True, run.timer.Stop(tit) run.CheckOK() tit = _(u'Copying results') run.timer.Start(tit) if prof.Get('R', typ='cmde'): cmde = prof.Get('R', typ='cmde')[0] iret = run.MkDir(cmde['path']) iret = run.Copy(cmde['path'], osp.join(cmdetmp, 'cata*.py*')) run.timer.Stop(tit) # # 3.3. ----- make_ele # if 'make_ele' in prof['actions']: run.DBG(u'Start make_ele action') tit = _(u"Compilation of elements") eletmp = osp.join(reptrav, 'elem.1') run.timer.Start(tit) repact = osp.join(reptrav, 'make_ele') run.MkDir(repact) kargs = { 'exe': exetmp, 'cmde': cmdetmp, 'ele': eletmp, } kargs['cata'] = [l['path'] for l in prof.Get('D', typ='cata')] lfun = prof.Get('D', typ='unig') if lfun: kargs['unigest'] = build.GetUnigest(lfun[0]['path']) if prof.Get('D', typ='py'): kargs['py'] = [l['path'] for l in prof.Get('D', typ='py')] jret = build.CompilEle(REPREF, repact, **kargs) run.timer.Stop(tit) run.CheckOK() tit = _(u'Copying results') run.timer.Start(tit) if prof.Get('R', typ='ele'): ele = prof.Get('R', typ='ele')[0] iret = run.MkDir(osp.dirname(ele['path'])) iret = run.Copy(ele['path'], eletmp) run.timer.Stop(tit) # # 3.4. ----- make_env / make_etude / make_dbg # if 'make_env' in prof['actions'] or 'make_etude' in prof['actions'] or \ 'make_dbg' in prof['actions']: run.DBG(u'Start make_etude/make_env/make_dbg action') os.chdir(reptrav) run.Mess(_(u'Code_Aster execution'), 'TITLE') # 3.4.1. prepare reptrav to run Code_Aster (proc# = 0) only_env = 'make_env' in prof['actions'] kargs = { 'exe': exetmp, 'cmde': cmdetmp, 'ele': eletmp, 'lang': prof['lang'][0], 'only_env': only_env, } lfun = prof.Get('D', typ='unig') if lfun: kargs['unigest'] = build.GetUnigest(lfun[0]['path']) if prof.Get('D', typ='py'): kargs['py'] = [l['path'] for l in prof.Get('D', typ='py')] tit = _(u'Preparation of environment') run.timer.Start(tit) run.Mess(ufmt(_(u'prepare environment in %s'), reptrav)) if prof['prep_env'][0] != 'no': build.PrepEnv(REPREF, reptrav, dbg=dbg, **kargs) else: run.Mess(_(u'... skipped (%s = no) !') % 'prep_env', 'SILENT') run.timer.Stop(tit) # 3.4.2. copy datas (raise <E> errors if failed) tit = _(u'Copying datas') run.Mess(tit, 'TITLE') run.timer.Start(tit) if prof['copy_data'][0] not in NO_VALUES: copyfiles(run, 'DATA', prof) else: run.Mess(_(u'... skipped (%s = no) !') % 'copy_data', 'SILENT') print3(os.getcwdu()) run.timer.Stop(tit) # 3.4.3. execution diag, tcpu, tsys, ttot, copybase = execute(reptrav, multiple=False, with_dbg='make_dbg' in prof['actions'], only_env=only_env, runner=runner, run=run, conf=conf, prof=prof, build=build, exe=exetmp) if not 'make_env' in prof['actions']: # 3.4.4. copy results tit = _(u'Copying results') run.Mess(tit, 'TITLE') run.timer.Start(tit) if prof['copy_result'][0] not in NO_VALUES: emit_alarm = prof['copy_result_alarm'][0] not in NO_VALUES copyfiles(run, 'RESU', prof, copybase, emit_alarm) else: run.Mess( _(u'... skipped (%s = no) !') % 'copy_result', 'SILENT') run.timer.Stop(tit) run.Mess(_(u'Code_Aster run ended'), diag) # 3.4.5. add .resu/.erre to output for testcases ctest = prof['parent'][0] == "astout" if ctest: run.Mess(_(u'Content of RESU file'), 'TITLE') run.FileCat('fort.8', magic.get_stdout()) run.Mess(_(u'Content of ERROR file'), 'TITLE') run.FileCat('fort.9', magic.get_stdout()) # 3.4.6. notify the user if prof['notify'][0]: content = _( '[Code_Aster] job %(job)s on %(server)s ended: %(diag)s') content = content % { 'job': prof.get_jobname(), 'diag': diag, 'server': prof['serveur'][0], } dest = ','.join(prof['notify']) run.SendMail(dest=dest, text=content, subject=content.splitlines()[0]) run.Mess(_(u'Email notification sent to %s') % dest) run.CheckOK() os.chdir(prev) # 3.5. ----- astout if 'astout' in prof['actions']: run.DBG(u'Start astout action') kargs = { 'exe': exetmp, 'cmde': cmdetmp, 'ele': eletmp, 'numthread': prof['numthread'][0], } os.chdir(reptrav) RunAstout(run, conf, prof, runner=runner, **kargs) os.chdir(prev) # 3.6. ----- distribution if 'distribution' in prof['actions']: run.DBG(u'Start distribution action') kargs = { 'exe': exetmp, 'cmde': cmdetmp, 'ele': eletmp, 'numthread': prof['numthread'][0], } Parametric(run, prof, runner=runner, **kargs) # 3.7. ----- multiple if 'multiple' in prof['actions']: run.DBG(u'Start multiple action') Multiple(run, prof, runner=runner, numthread=prof['numthread'][0]) # 4. ----- clean up if 'make_env' in prof['actions'] and prof['detr_rep_trav'][ 0] not in NO_VALUES: run.DoNotDelete(reptrav)
class AsterCalcul(BaseCalcul): """This class read user's profile and (if needed) call as_run through or not a terminal, or just write a btc file... """ _supported_services = ( 'study', 'parametric_study', 'testcase', 'meshtool', 'stanley', 'convbase', 'distribution', 'exectool', 'multiple', ) def __init__(self, run, filename=None, prof=None, pid=None, differ_init=False): """Initializations """ BaseCalcul.__init__(self) assert filename or prof, 'none of (filename, prof) provided!' self.run = run if pid is None: self.pid = self.run['num_job'] else: self.pid = pid self.studyid = self.pid if prof is not None: self.prof = prof else: # ----- profile filename fprof = get_tmpname(self.run, self.run['tmp_user'], basename='profil_astk') self.run.ToDelete(fprof) kret = self.run.Copy(fprof, filename, niverr='<F>_PROFILE_COPY') self.prof = AsterProfil(fprof, self.run) if self.prof['nomjob'][0] == '': self.prof['nomjob'] = 'unnamed' # attributes self.dict_info = None self.as_exec_ref = self.run.get('as_exec_ref') self.diag = '?' self.__initialized = False if not differ_init: self.finalize_init() def finalize_init(self): """Finalize initialization. Allow to adapt prof object before customization.""" # decode service called self.decode_special_service() # add memory self.add_memory() # allow customization of calcul object if self.run['schema_calcul']: schem = get_plugin(self.run['schema_calcul']) self.run.DBG("calling plugin : %s" % self.run['schema_calcul']) self.prof = schem(self) self.__initialized = True def decode_special_service(self): """Return the profile modified for the "special" service. """ self.serv, self.prof = apply_special_service(self.prof, self.run) if self.serv == '': if self.prof['parent'][0] == 'parametric': self.serv = 'parametric_study' elif self.prof['parent'][0] == 'astout': self.serv = 'testcase' else: self.serv = 'study' self.prof['service'] = self.serv self.run.DBG("service name : %s" % self.serv) if self.serv not in self._supported_services: self.error(_(u'Unknown service : %s') % self.serv) def add_memory(self): """Add an amount of memory (MB) to the export parameters""" if self.serv in ('parametric_study', ): return conf = build_config_from_export(self.run, self.prof) self.run.DBG("memory to add: %s" % conf['ADDMEM'][0]) try: addmem = float(conf['ADDMEM'][0]) except ValueError: addmem = 0. if not addmem: return memory = float(self.prof['memjob'][0] or 0.) / 1024. + addmem self.prof.set_param_memory(memory) self.run.DBG("new memory parameters: memjob=%s memjeveux=%s" % \ (self.prof['memjob'][0], self.prof.args['memjeveux'])) def build_dict_info(self, opts): """Build a dictionnary grouping all parameters. """ sep = "-----------------------------------------------" self.mode = self.prof['mode'][0] if not self.mode or self.run.get(self.mode) not in YES_VALUES: self.mode = self.prof['mode'] = "interactif" if self.mode == 'batch': self.scheduler = BatchSystemFactory(self.run, self.prof) node = self.prof['noeud'][0] or self.prof['serveur'][0] self.dict_info = { 'sep': sep, 'export': self.prof.get_filename(), 'mcli': self.prof['mclient'][0], 'ucli': self.prof['uclient'][0], 'serv': self.prof['serveur'][0], 'user': self.prof['username'][0], 'mode': self.mode, 'node': node, 'plt': self.run['plate-forme'], 'vers': self.prof.get_version_path(), 'tpsjob': self.prof['tpsjob'][0], 'vmem': float(self.prof['memjob'][0] or 0.) / 1024., 'ncpus': self.prof['ncpus'][0] or 'auto', 'mpi_nbnoeud': self.prof['mpi_nbnoeud'][0], 'mpi_nbcpu': self.prof['mpi_nbcpu'][0], 'dbg': self.prof['debug'][0], 'prof_content': self.prof.get_content(), 'nomjob': self.prof['nomjob'][0], 'nomjob_': self.flash('', ''), 'nomjob_p': self.flash('export', '$num_job'), 'as_run_cmd': " ".join(self.run.get_as_run_cmd(with_args=False)), 'who': self.run.system.getuser_host()[0], 'opts': opts, 'remote_args': " ".join(self.run.get_as_run_args()), } if self.prof['srv_dbg'][0] in YES_VALUES: self.dict_info['opts'] += ' --debug' if self.prof['srv_verb'][0] in YES_VALUES: self.dict_info['opts'] += ' --verbose' # rep_trav from profile or config(_nodename) / keep consistancy with job.py rep_trav = self.prof['rep_trav'][0] if rep_trav == '': rep_trav = get_nodepara(node, 'rep_trav', self.run['rep_trav']) self.dict_info['rep_trav'] = rep_trav # set message using previous content self.dict_info['message'] = self.message() def message(self): """Format information message. """ # No "' in ASTK_MESSAGE ! ASTK_MESSAGE = [] # check client and server versions serv_vers = self.run.__version__ try: client_vers = self.prof['origine'][0].split()[1] except Exception, msg: self.run.DBG('Error : unexpected "origine" value :', self.prof['origine'][0]) client_vers = '' if client_vers == '': ASTK_MESSAGE.append(msg_cli_version % serv_vers) elif serv_vers != client_vers: ASTK_MESSAGE.append(msg_pb_version % (serv_vers, client_vers)) ASTK_MESSAGE.append(msg_info % self.dict_info) if self.prof['classe'][0]: ASTK_MESSAGE.append(msg_classe % self.prof['classe'][0]) if self.prof['depart'][0]: ASTK_MESSAGE.append(msg_depart % self.prof['depart'][0]) ASTK_MESSAGE.append(self.dict_info['sep']) if self.prof['consbtc'][0] not in NO_VALUES: msg = "generated" else: msg = "provided by user" ASTK_MESSAGE.append(msg_consbtc % msg) ASTK_MESSAGE.append(self.dict_info['sep']) ASTK_MESSAGE.append(msg_vers % (serv_vers, client_vers)) return convert(os.linesep.join(ASTK_MESSAGE))