def __coupled_run_cfg__(self, distrep, coupled_domains=[]): """ """ if len(coupled_domains) == 0: return run_conf_path = os.path.join(distrep, 'run.cfg') run_conf = cs_run_conf.run_conf(run_conf_path, package=self.package, create_if_missing=True) domains = "" for i, dom in enumerate(coupled_domains): dom_name = dom["domain"] if i != 0: domains+=":" domains+=dom_name for key in dom.keys(): run_conf.set(dom_name, key, dom[key]) if domains != "": run_conf.set('setup', 'coupled_domains', domains) run_conf.save()
def generate_run_config_file(path, resource_name, r_c, s_c, pkg): """ Generate a minimalist run configuration file in the execution directory for successive run steps. Returns job submission info dictionnary """ sections = {} if path and r_c['casedir']: in_case = cs_case.is_exec_dir_in_case(r_c['casedir'], os.path.dirname(path)) if not in_case: sections['paths'] = {'case': r_c['casedir']} if r_c['dest_dir']: sections['paths']['top_results_directory'] = r_c['dest_dir'] if 'coupled_domains' in r_c: if r_c['coupled_domains'] != []: dom_str='' for i, d in enumerate(r_c['coupled_domains']): dom_name = d['domain'] if i != 0: dom_str += ':' dom_str += dom_name # Add the domain section sections[dom_name] = {key:str(d[key]) for key in d.keys()} sections['setup'] = {'coupled_domains': dom_str} sections['run'] = {'compute_build': r_c['compute_build'], 'id': r_c['run_id'], 'stage': False, 'initialize': s_c['initialize'], 'compute': s_c['run_solver'], 'finalize': s_c['save_results']} r_d = {} for kw in ('n_procs', 'n_threads', 'time_limit'): if r_c[kw]: r_d[kw] = r_c[kw] for kw in ('job_parameters', 'job_header', 'compute_prologue', 'compute_epilogue'): if r_c[kw]: r_d[kw] = r_c[kw] sections[resource_name] = r_d run_conf = cs_run_conf.run_conf(None) run_conf.sections = sections run_conf.save(path, new=True)
def __init__(self, path=None, pkg=None, import_legacy=False): """ Constructor. """ self.pkg = pkg if self.pkg == None: from code_saturne.cs_package import package pkg = package() self.run_conf = None self.path = path # Configuration-based information i_c = cs_run_conf.get_install_config_info(self.pkg) self.resource_name = cs_run_conf.get_resource_name(i_c) self.compute_builds = i_c['compute_builds'] self.batch = cs_batch.batch(self.pkg) # Convert from legacy runcase if not updated yet # (delaying application of file changes to save). # in this case, the run_conf object is pre-loaded so that # the "save" operation can apply the (conversion) changes # even when the configuration file has not been loaded. self.runcase_path = None if self.path: if import_legacy and not os.path.isfile(self.path): dirname = os.path.dirname(self.path) if os.path.basename(dirname) == 'DATA': dirname = os.path.join(os.path.basename(dirname), 'SCRIPTS') runcase_path = os.path.join(dirname, 'runcase') if os.path.isfile(runcase_path): self.runcase_path = runcase_path if self.runcase_path: from code_saturne import cs_runcase runcase = cs_runcase.runcase(runcase_path, package=self.pkg) sections = runcase.run_conf_sections(resource_name=self.resource_name, batch_template=i_c['batch']) self.run_conf = cs_run_conf.run_conf(self.path, package=self.pkg, create_if_missing=True) for sn in sections: if not sn in self.run_conf.sections: run_conf.sections[sn] = {} for kw in sections[sn]: self.run_conf.sections[sn][kw] = sections[sn][kw]
def load_cfg_file(self, cfgfile=None): """ Load a configuration file """ self.cfgfile = cfgfile if self.cfgfile: from code_saturne import cs_run_conf self.run_conf = cs_run_conf.run_conf(self.cfgfile) if self.run_conf: for dom in self.run_conf.get_coupling_parameters(): self.add_tab_data(dom) if self.data != []: self.file_loaded = True self.data_modified = False
def __build_run_cfg__(self, distrep, casename, coupling=None, cathare_path=None): """ Retrieve batch file for the current system Update batch file for the study """ run_conf_path = os.path.join(distrep, 'run.cfg') if self.copy is not None: ref_run_conf_path = os.path.join(self.copy, 'DATA', 'run.cfg') try: shutil.copy(ref_run_conf_path, run_conf_path) except Exception: pass # Add info from parent in case of copy run_conf = cs_run_conf.run_conf(run_conf_path, package=self.package, create_if_missing=True) if coupling: run_conf.set('setup', 'coupling', coupling) # If a cathare LIBPATH is given, it is added to LD_LIBRARY_PATH. # This modification is needed for the dlopen of the cathare .so file if cathare_path: i_c = cs_run_conf.get_install_config_info(self.package) resource_name = cs_run_conf.get_resource_name(i_c) v25_3_line = "export v25_3=%s\n" % cathare_path new_line = "export LD_PATH_LIBRARY=$v25_3/%s/" + ":$LD_LIBRARY_PATH\n" add_lines = v25_3_line add_lines += new_line % ("lib") add_lines += new_line % ("ICoCo/lib") run_conf.set(resource_name, 'compute_prologue', add_lines) run_conf.save()
def __build_run_cfg__(self, distrep, casename): """ Retrieve batch file for the current system Update batch file for the study """ run_conf_path = os.path.join(distrep, 'run.cfg') if self.copy is not None: ref_run_conf_path = os.path.join(self.copy, 'DATA', 'run.cfg') try: shutil.copy(ref_run_conf_path, run_conf_path) except Exception: pass # Add info from parent in case of copy run_conf = cs_run_conf.run_conf(run_conf_path, package=self.package, create_if_missing=True) run_conf.save()
def generate_run_config_file(path, resource_name, r_c, s_c, pkg): """ Generate a minimalist run configuration file in the execution directory for successive run steps. Returns job submission info dictionnary """ sections = {} if 'coupling' in r_c: if r_c['coupling']: sections['setup'] = {'coupling': r_c['coupling']} sections['run'] = { 'id': r_c['run_id'], 'stage': False, 'initialize': s_c['initialize'], 'compute': s_c['run_solver'], 'finalize': s_c['save_results'] } r_d = {} for kw in ('n_procs', 'n_threads', 'time_limit'): if r_c[kw]: r_d[kw] = r_c[kw] for kw in ('job_parameters', 'job_header', 'compute_prologue', 'compute_epilogue'): if r_c[kw]: r_d[kw] = r_c[kw] sections[resource_name] = r_d run_conf = cs_run_conf.run_conf(None) run_conf.sections = sections run_conf.save(path, new=True)
def load(self): """ Load or the associated run_conf object if not already done. """ if self.run_conf: return # Load or build run configuration self.run_conf = cs_run_conf.run_conf(self.path, package=self.pkg, create_if_missing=True) self.run_dict = {} self.job_dict = {} # Generic job running information (subset of possible "run" info) self.run_dict['id'] = self.run_conf.get('run', 'id') self.run_dict['compute_build'] = self.run_conf.get('run', 'compute_build') self.run_dict['initialize'] = self.run_conf.get_bool('run', 'initialize') self.run_dict['compute'] = None self.run_dict['finalize'] = None # Resource-specific info (subset of resource-based info, and batch) self.job_dict['n_procs'] = self.run_conf.get_int(self.resource_name, 'n_procs') self.job_dict['n_threads'] = self.run_conf.get_int(self.resource_name, 'n_threads') self.job_header_lines = None if self.batch.rm_type: job_header = self.run_conf.get(self.resource_name, 'job_header') if not job_header: self.run_conf.rebuild_resource() job_header = self.run_conf.get(self.resource_name, 'job_header') if job_header != None: self.job_header_lines = job_header.split(os.linesep) if self.job_header_lines != None: self.batch.parse_lines(self.job_header_lines) # Save initial values (to determine which are changed) self.setup_ini = self.run_dict.get('setup', 'param') self.run_dict_ini = {} for k in self.run_dict: self.run_dict_ini[k] = self.run_dict[k] self.job_dict_ini = {} for k in self.job_dict: self.job_dict_ini[k] = self.job_dict[k] self.job_header_lines_ini = None if self.job_header_lines: self.job_header_lines_ini = list(self.job_header_lines) # Fix incompatible options if needed # (add entries to dictionnaries so __is_changed__ will detect update) if self.run_conf.get_bool('run', 'stage') == False: if not self.run_dict_ini['id']: self.run_dict_ini['stage'] = False self.run_dict['stage'] = None # Query info related to compute build self.updateComputeBuildInfo(self.run_dict['compute_build'])
def update_case(options, pkg): topdir = os.getcwd() study_name = os.path.basename(os.getcwd()) i_c = cs_run_conf.get_install_config_info(pkg) resource_name = cs_run_conf.get_resource_name(i_c) for case in options.case_names: os.chdir(topdir) if case == ".": case, staging_dir = get_case_dir() if not case: sys.stderr.write(" o Skipping '%s', which does not seem " "to be a case directory\n" % topdir) continue casename = os.path.basename(case) else: casename = case if options.verbose > 0: sys.stdout.write(" o Updating case '%s' paths...\n" % casename) datadir = os.path.join(pkg.get_dir("pkgdatadir")) os.chdir(case) # Write a local wrapper to main command data = 'DATA' if not os.path.isdir(data): os.mkdir(data) dataref_distpath = os.path.join(datadir, 'user') user = os.path.join(data, 'REFERENCE') # Only update user_scripts reference, not data # (we should try to deprecate the copying of reference data # or use the GUI to align it with the active options) if os.path.exists(user): abs_f = os.path.join(datadir, 'data', 'user', 'cs_user_scripts.py') shutil.copy(abs_f, user) unset_executable(user) for s in ("SaturneGUI", "NeptuneGUI"): old_gui_script = os.path.join(data, s) if os.path.isfile(old_gui_script): os.remove(old_gui_script) # Rebuild launch script create_local_launcher(pkg, data) # User source files directory src = 'SRC' if not os.path.isdir(src): os.mkdir(src) user_ref_distpath = os.path.join(datadir, 'user_sources') for srcdir in ('REFERENCE', 'EXAMPLES', 'EXAMPLES_neptune_cfd'): if os.path.isdir(os.path.join(user_ref_distpath, srcdir)): copy_directory(os.path.join(user_ref_distpath, srcdir), os.path.join(src, srcdir), True) unset_executable(os.path.join(src, srcdir)) # Results directory (only one for all instances) resu = 'RESU' if not os.path.isdir(resu): os.mkdir(resu) # Script directory (only one for all instances) run_conf_file = os.path.join(topdir, case, 'DATA', 'run.cfg') batch_file = os.path.join(topdir, case, 'SCRIPTS', 'runcase') if sys.platform.startswith('win'): batch_file = batch_file + '.bat' run_conf = cs_run_conf.run_conf(run_conf_file, package=pkg, rebuild=True) if os.path.isfile(batch_file): runcase = cs_runcase.runcase(batch_file, package=pkg) sections = runcase.run_conf_sections(resource_name=resource_name, batch_template=i_c['batch']) for sn in sections: if not sn in run_conf.sections: run_conf.sections[sn] = {} for kw in sections[sn]: run_conf.sections[sn][kw] = sections[sn][kw] os.remove(batch_file) scripts_dir = os.path.join(topdir, case, 'SCRIPTS') try: os.rmdir(scripts_dir) except Exception: pass run_conf.save()
def create_case(self, casename): """ Create a case for a Code_Saturne study. """ casedirname = casename datadir = self.package.get_dir("pkgdatadir") data_distpath = os.path.join(datadir, 'data') if os.path.exists(casedirname): sys.stdout.write(" o Case '%s' already exists\n" % casename) return if self.verbose > 0: sys.stdout.write(" o Creating case '%s'...\n" % casename) os.mkdir(casedirname) os.chdir(casedirname) if self.copy is not None: if not (os.path.exists(os.path.join(self.copy, 'DATA', 'REFERENCE')) \ or os.path.exists(os.path.join(self.copy, 'SRC', 'REFERENCE'))): self.use_ref = False # Data directory data = 'DATA' os.mkdir(data) abs_setup_distpath = os.path.join(data_distpath, 'setup.xml') if os.path.isfile(abs_setup_distpath) and not self.copy: shutil.copy(abs_setup_distpath, data) unset_executable(data) if self.use_ref: thch_distpath = os.path.join(data_distpath, 'user') ref = os.path.join(data, 'REFERENCE') shutil.copytree(thch_distpath, ref) unset_executable(ref) # Write a wrapper for code and launching create_local_launcher(self.package, data) # Generate run.cfg file or copy one run_conf = None run_conf_path = os.path.join(data, 'run.cfg') if not self.copy: run_conf = cs_run_conf.run_conf(run_conf_path, package=self.package, rebuild=True) # User source files directory src = 'SRC' if self.use_ref: user_distpath = os.path.join(datadir, 'user_sources') shutil.copytree(user_distpath, src) unset_executable(src) else: os.mkdir(src) # Copy data and source files from another case if self.copy is not None: # Data files ref_data = os.path.join(self.copy, data) data_files = os.listdir(ref_data) for f in data_files: abs_f = os.path.join(ref_data, f) if os.path.isfile(abs_f) and \ f not in ["SaturneGUI", "NeptuneGUI", self.package.name]: shutil.copy(abs_f, data) unset_executable(os.path.join(data, f)) # Source files ref_src = os.path.join(self.copy, src) if os.path.exists(ref_src): src_files = os.listdir(ref_src) else: src_files = [] for f in src_files: abs_f = os.path.join(ref_src, f) if os.path.isfile(abs_f): shutil.copy(abs_f, src) unset_executable(os.path.join(src, f)) # If run.cfg was not present in initial case, generate it if not os.path.isfile(run_conf_path): run_conf = cs_run_conf.run_conf(run_conf_path, package=self.package, rebuild=True) # Runcase (for legacy structures) runcase_path = os.path.join(self.copy, 'SCRIPTS', 'runcase') if os.path.isfile(runcase_path): i_c = cs_run_conf.get_install_config_info(self.package) resource_name = cs_run_conf.get_resource_name(i_c) runcase = cs_runcase.runcase(runcase_path, package=self.package) sections = runcase.run_conf_sections(resource_name=resource_name, batch_template=i_c['batch']) for sn in sections: if not sn in run_conf.sections: run_conf.sections[sn] = {} for kw in sections[sn]: run_conf.sections[sn][kw] = sections[sn][kw] # Now write run.cfg if not copied if run_conf != None: run_conf.save() # Results directory resu = 'RESU' if not os.path.isdir(resu): os.mkdir(resu)
def create_case(self, casename): """ Create a case for a Code_Saturne study. """ casedirname = casename if self.verbose > 0: sys.stdout.write(" o Creating case '%s'...\n" % casename) datadir = self.package.get_dir("pkgdatadir") data_distpath = os.path.join(datadir, 'data') try: os.mkdir(casedirname) except: sys.exit(1) os.chdir(casedirname) if self.copy is not None: if not (os.path.exists(os.path.join(self.copy, 'DATA', 'REFERENCE')) \ or os.path.exists(os.path.join(self.copy, 'SRC', 'REFERENCE'))): self.use_ref = False # Data directory data = 'DATA' os.mkdir(data) abs_setup_distpath = os.path.join(data_distpath, 'setup.xml') if os.path.isfile(abs_setup_distpath) and not self.copy: shutil.copy(abs_setup_distpath, data) unset_executable(data) if self.use_ref: thch_distpath = os.path.join(data_distpath, 'thch') ref = os.path.join(data, 'REFERENCE') os.mkdir(ref) for f in [ 'dp_C3P', 'dp_C3PSJ', 'dp_C4P', 'dp_ELE', 'dp_FUE', 'dp_transformers', 'meteo' ]: abs_f = os.path.join(thch_distpath, f) if os.path.isfile(abs_f): shutil.copy(abs_f, ref) unset_executable(ref) abs_f = os.path.join(datadir, 'cs_user_scripts.py') shutil.copy(abs_f, ref) unset_executable(ref) # Write a wrapper for code and launching create_local_launcher(self.package, data) # Generate run.cfg file or copy one run_conf = None run_conf_path = os.path.join(data, 'run.cfg') if not self.copy: run_conf = cs_run_conf.run_conf(run_conf_path, package=self.package, rebuild=True) # User source files directory src = 'SRC' os.mkdir(src) if self.use_ref: user_distpath = os.path.join(datadir, 'user') user_examples_distpath = os.path.join(datadir, 'user_examples') user = os.path.join(src, 'REFERENCE') user_examples = os.path.join(src, 'EXAMPLES') shutil.copytree(user_distpath, user) shutil.copytree(user_examples_distpath, user_examples) add_datadirs = [] if self.package.name == 'neptune_cfd': add_datadirs.append( os.path.join(self.package.get_dir("datadir"), self.package.name)) for d in add_datadirs: user_distpath = os.path.join(d, 'user') user_examples_distpath = os.path.join(d, 'user_examples') if os.path.isdir(user_distpath): s_files = os.listdir(user_distpath) for f in s_files: shutil.copy(os.path.join(user_distpath, f), user) if os.path.isdir(user_examples_distpath): s_files = os.listdir(user_examples_distpath) for f in s_files: shutil.copy(os.path.join(user_examples_distpath, f), user_examples) unset_executable(user) unset_executable(user_examples) # Copy data and source files from another case if self.copy is not None: # Data files ref_data = os.path.join(self.copy, data) data_files = os.listdir(ref_data) for f in data_files: abs_f = os.path.join(ref_data, f) if os.path.isfile(abs_f) and \ f not in [self.package.guiname, self.package.name]: shutil.copy(abs_f, data) unset_executable(os.path.join(data, f)) # Source files ref_src = os.path.join(self.copy, src) if os.path.exists(ref_src): src_files = os.listdir(ref_src) else: src_files = [] for f in src_files: abs_f = os.path.join(ref_src, f) if os.path.isfile(abs_f): shutil.copy(abs_f, src) unset_executable(os.path.join(src, f)) # If run.cfg was not present in initial case, generate it if not os.path.isfile(run_conf_path): run_conf = cs_run_conf.run_conf(run_conf_path, package=self.package, rebuild=True) # Runcase (for legacy structures) runcase_path = os.path.join(self.copy, 'SCRIPTS', 'runcase') if os.path.isfile(runcase_path): i_c = cs_run_conf.get_install_config_info(self.package) resource_name = cs_run_conf.get_resource_name(i_c) runcase = cs_runcase.runcase(runcase_path, package=self.package) sections = runcase.run_conf_sections( resource_name=resource_name, batch_template=i_c['batch']) for sn in sections: if not sn in run_conf.sections: run_conf.sections[sn] = {} for kw in sections[sn]: run_conf.sections[sn][kw] = sections[sn][kw] # Now write run.cfg if not copied if run_conf != None: run_conf.save() # Results directory resu = 'RESU' if not os.path.isdir(resu): os.mkdir(resu)
def read_run_config_file(i_c, r_c, s_c, pkg, run_conf=None): """ Process the passed command line arguments. """ run_config_path = "" if r_c['staging_dir']: run_config_path = os.path.join(r_c['staging_dir'], 'run.cfg') elif r_c['casedir']: casedir = r_c['casedir'] if r_c['coupled_domains'] != []: run_config_path = os.path.join(casedir, 'run.cfg') else: run_config_path = os.path.join(casedir, 'DATA', 'run.cfg') # Ensure some keys are set in all cases to simplify future tests run_conf_kw= ('job_parameters', 'job_header', 'run_prologue', 'run_epilogue', 'compute_prologue', 'compute_epilogue') for kw in run_conf_kw: if not kw in r_c: r_c[kw] = None if run_conf == None: if not os.path.isfile(run_config_path): print('Warning:', file = sys.stderr) print(' \'run.cfg\' not found in case directory; case update recommended.', file = sys.stderr) print('', file = sys.stderr) return # Only load run.cfg if not already done if run_conf and s_c['stage'] != False: if not run_conf.path == run_config_path: run_conf = None if not run_conf: run_conf = cs_run_conf.run_conf(run_config_path, package=pkg) # Case path if not determined yet # (when destination or staging directory is outside case directory) if 'paths' in run_conf.sections: if not r_c['casedir']: if 'case' in run_conf.sections['paths']: r_c['casedir'] = run_conf.sections['paths']['case'] if not r_c['dest_dir']: if 'top_results_directory' in run_conf.sections['paths']: r_c['dest_dir'] = run_conf.sections['paths']['top_results_directory'] # Parameters file for kw in ('param',): if run_conf.get('setup', kw): r_c[kw] = run_conf.get('setup', kw) # Run id if not r_c['run_id']: r_c['run_id'] = run_conf.get('run', 'id') if not r_c['force_id']: r_c['force_id'] = run_conf.get_bool('run', 'force_id') # Compute stages update_run_steps(s_c, run_conf) # Resources: try to find a matching section, using # resource_name, batch, and job_defaults in decreasing priority. if not r_c['compute_build']: r_c['compute_build'] = run_conf.get_bool('run', 'compute_build') resource_name = i_c['resource_name'] if not resource_name or not resource_name in run_conf.sections: resource_name = i_c['batch'] if resource_name: resource_name = os.path.basename(resource_name).lower() if not resource_name or not resource_name in run_conf.sections: resource_name = 'job_defaults' run_conf_r = None if resource_name in run_conf.sections: run_conf_r = run_conf.sections[resource_name] if run_conf_r: for kw in ('n_procs', 'n_threads', 'time_limit'): if kw in r_c: if r_c[kw] != None: continue r_c[kw] = None v = run_conf.get_int(resource_name, kw) if v: r_c[kw] = v if run_conf_r: for kw in run_conf_kw: if r_c[kw] != None: continue if kw in run_conf_r: r_c[kw] = run_conf_r[kw] # Handle case where files are used if not (r_c['job_parameters'] or r_c['job_header']): kw = 'job_header_file' f_path = None if kw in run_conf_kw: f_path = run_conf_kw[kw] if f_path: if not os.path.isabs(f_path): f_prefix = os.path.basename(run_config_path) f_path= os.path.join(f_prefix, f_path) if os.path.isfile(f_path): f = file.open(f_path) r_c['job_header'] = f.read() f.close else: err_str = """warning in run.cfg: [{0}] {1} = {2} "{3}" not present (use defaults)""" print(err_str.format(resource_name, kw, r_c[kw], f_path), file = sys.stderr) r_c['job_header'] = None elif 'jobmanager' in r_c: err_str = 'warning in run.cfg: [{0}] {1} = {2}; not currently handled (ignored)' print(err_str.format(resource_name, kw, r_c[kw]), file = sys.stderr) r_c[kw] = None
def process_options(options, pkg): """ Process the passed command line arguments. """ # Stages to run (if no filter given, all are done). s_c = {'stage': options.stage, 'initialize': options.initialize, 'compute': options.compute, 'finalize': options.finalize} filter_stages = False for k in s_c: if s_c[k]: filter_stages = True # Try to determine case directory casedir = None staging_dir = None param = None compute_build = None # Also check for possible settings file run_id = options.id run_conf = None run_config_path = os.path.join(os.getcwd(), 'run.cfg') if os.path.isfile(run_config_path): run_conf = cs_run_conf.run_conf(run_config_path, package=pkg) if not run_id and not filter_stages: if 'run' in run_conf.sections and not filter_stages: update_run_steps(s_c, run_conf) if s_c['stage'] == False: if 'id' in run_conf.sections['run']: run_id = run_conf.sections['run']['id'] if s_c['stage'] == False and not run_id: err_str = os.linesep + os.linesep + 'Error:' + os.linesep err_str += 'Incompatible options in the run.cfg file or command arguments' err_str += os.linesep err_str += 'When the "stage" step is set to False, a run id is required.' raise cs_case_domain.RunCaseError(err_str) # Check for multiple domain case # Kept the 'coupling' file def for the definition of the case_dir function coupling = None coupled_domains =[] if run_conf: coupled_domains = run_conf.get_coupling_parameters() if coupled_domains != []: coupling = run_config_path if coupling and options.param: cmd_line = sys.argv[0] for arg in sys.argv[1:]: cmd_line += ' ' + arg err_str = os.linesep + os.linesep + 'Error:' + os.linesep err_str += cmd_line + os.linesep err_str += '-p/--param option is incompatible with ' err_str += '"coupled_domains" option defined within the run.cfg file.' raise cs_case_domain.RunCaseError(err_str) casedir, staging_dir = cs_case.get_case_dir(case=options.case, param=options.param, coupling=coupling, id=run_id) if casedir == None and staging_dir == None: cmd_line = sys.argv[0] for arg in sys.argv[1:]: cmd_line += ' ' + arg print('Error:', file = sys.stderr) print(cmd_line, file = sys.stderr) print('run from directory \"' + str(os.getcwd()) + '\",', file = sys.stderr) print('which does not seem to be inside a case directory.', file = sys.stderr) param = options.param compute_build = options.compute_build if not options.force: force_id = False else: force_id = True # Return associated dictionary (also force number of ranks and threads) r_c = {'casedir': casedir, 'dest_dir': options.dest, 'staging_dir': staging_dir, 'run_id': run_id, 'param': param, 'coupled_domains': coupled_domains, 'id_prefix': options.id_prefix, 'id_suffix': options.id_suffix, 'suggest_id': options.suggest_id, 'force_id': force_id, 'n_procs': options.nprocs, 'n_threads': options.nthreads, 'time_limit': None, 'compute_build': compute_build} return r_c, s_c, run_conf
def process_cmd_line(argv, pkg): """ Process the passed command line arguments. """ if sys.argv[0][-3:] == '.py': usage = "usage: %prog [options]" else: usage = "usage: %prog run [options]" parser = OptionParser(usage=usage) parser.add_option("--compute-build", dest="compute_build", type="string", metavar="<build>", help="base name or full path to the compute build") parser.add_option("-n", "--nprocs", dest="nprocs", type="int", metavar="<nprocs>", help="number of MPI processes for the computation") parser.add_option("--nt", "--threads-per-task", dest="nthreads", type="int", help="number of OpenMP threads per task") parser.add_option("-p", "--param", dest="param", type="string", metavar="<param>", help="path or name of the parameters file") parser.add_option("--case", dest="case", type="string", metavar="<case>", help="path to the case's directory") parser.add_option("--coupling", dest="coupling", type="string", metavar="<coupling>", help="path or name of the coupling descriptor file") parser.add_option("--id", dest="id", type="string", metavar="<id>", help="use the given run id") parser.add_option("--id-prefix", dest="id_prefix", type="string", metavar="<prefix>", help="prefix the run id with the given string") parser.add_option("--id-suffix", dest="id_suffix", type="string", metavar="<suffix>", help="suffix the run id with the given string") parser.add_option("--suggest-id", dest="suggest_id", action="store_true", help="suggest a run id for the next run") parser.add_option("--force", dest="force", action="store_true", help="run the data preparation stage even if " \ + "the matching execution directory exists") parser.add_option("--stage", dest="stage", action="store_true", help="stage data prior to preparation and execution") parser.add_option("--initialize", "--preprocess", dest="initialize", action="store_true", help="run the data preparation stage") parser.add_option("--compute", "--execute", dest="compute", action="store_true", help="run the compute stage") parser.add_option("--finalize", dest="finalize", action="store_true", help="run the results copy/cleanup stage") parser.set_defaults(compute_build=False) parser.set_defaults(suggest_id=False) parser.set_defaults(stage=None) parser.set_defaults(initialize=None) parser.set_defaults(compute=None) parser.set_defaults(finalize=None) parser.set_defaults(param=None) parser.set_defaults(coupling=None) parser.set_defaults(domain=None) parser.set_defaults(id=None) parser.set_defaults(nprocs=None) parser.set_defaults(nthreads=None) # Note: we could use args to pass a calculation status file as an argument, # which would allow pursuing the later calculation stages. (options, args) = parser.parse_args(argv) # Stages to run (if no filter given, all are done). s_c = { 'stage': options.stage, 'initialize': options.initialize, 'compute': options.compute, 'finalize': options.finalize } filter_stages = False for k in s_c: if s_c[k]: filter_stages = True # Try to determine case directory casedir = None staging_dir = None param = None compute_build = None if options.coupling and options.param: # Multiple domain case cmd_line = sys.argv[0] for arg in sys.argv[1:]: cmd_line += ' ' + arg err_str = 'Error:' + os.linesep err_str += cmd_line + os.linesep err_str += '--coupling and -p/--param options are incompatible.' raise RunCaseError(err_str) # Also check for possible settings file coupling = options.coupling run_id = options.id run_conf = None run_config_path = os.path.join(os.getcwd(), 'run.cfg') if os.path.isfile(run_config_path): run_conf = cs_run_conf.run_conf(run_config_path, package=pkg) if not coupling: if 'setup' in run_conf.sections: if 'coupling' in run_conf.sections['setup']: coupling = run_conf.sections['setup']['coupling'] if not run_id and not filter_stages: if 'run' in run_conf.sections and not filter_stages: update_run_steps(s_c, run_conf) if s_c['stage'] == False: if 'id' in run_conf.sections['run']: run_id = run_conf.sections['run']['id'] casedir, staging_dir = cs_case.get_case_dir(case=options.case, param=options.param, coupling=coupling, id=run_id) if casedir == None: cmd_line = sys.argv[0] for arg in sys.argv[1:]: cmd_line += ' ' + arg print('Error:', file=sys.stderr) print(cmd_line, file=sys.stderr) print('run from directory \"' + str(os.getcwd()) + '\",', file=sys.stderr) print('which does not seem to be inside a case directory.', file=sys.stderr) param = options.param # If no parameter file passed, and a setup.xml is present in DATA, run it if param is None: has_setup = os.path.isfile(os.path.join(casedir, 'DATA', 'setup.xml')) if has_setup: param = "setup.xml" compute_build = options.compute_build if not options.force: force_id = False else: force_id = True # Return associated dictionary (also force number of ranks and threads) r_c = { 'casedir': casedir, 'staging_dir': staging_dir, 'run_id': run_id, 'param': param, 'coupling': coupling, 'id_prefix': options.id_prefix, 'id_suffix': options.id_suffix, 'suggest_id': options.suggest_id, 'force_id': force_id, 'n_procs': options.nprocs, 'n_threads': options.nthreads, 'time_limit': None, 'compute_build': compute_build } return r_c, s_c, run_conf
def coupling(package, domains, casedir, dest_dir=None, staging_dir=None, verbose=True, package_compute=None): use_saturne = False use_syrthes = False use_neptune = False use_cathare = False use_py_code = False # Use alternate compute (back-end) package if defined config = configparser.ConfigParser() config.read(package.get_global_configfile()) if package_compute == None: package_compute = package # Initialize code domains sat_domains = [] syr_domains = [] nep_domains = [] cat_domains = [] py_domains = [] if domains == None: raise RunCaseError('No domains defined.') for d in domains: domain_s = d.get('domain') solver_s = d.get('solver').lower() script_s = None param_s = None if (domain_s == None): msg = 'Check your coupling definition.\n' msg += 'domain key is missing.' raise RunCaseError(msg) # First, determine parameter file to use for code_saturne # or associated modules (ensuring backwards compatibiliy) if solver_s in package.config.solver_modules.keys() \ or solver_s == 'cathare': param = None script = d.get('script') # v6.1 and older structure if script != None: if script[-4:] == '.xml': param = script else: param = d.get('param') if not param: param = d.get('paramfile') # older_name s_dir = staging_dir if not s_dir: s_dir = casedir if not s_dir: s_dir = os.getcwd() if param == None: run_conf_path = os.path.join(s_dir, domain_s, 'DATA', 'run.cfg') if os.path.isfile(run_conf_path): run_conf = cs_run_conf.run_conf(run_conf_path) if 'setup' in run_conf.sections: if 'param' in run_conf.sections['setup']: param = run_conf.sections['setup']['param'] if script and not param: # v6.1 and older case structure runcase_path = os.path.join(s_dir, domain_s, 'SCRIPTS', script) if os.path.isfile(runcase_path): try: runcase = cs_runcase.runcase(runcase_path) param = runcase.get_parameters() except Exception: err_str = 'Cannot read ' + d.get('solver') \ + ' script: ' + runcase_path raise RunCaseError(err_str) # Remark: if param is undefined, the code_saturne domain will # default to 'setup.xml' if present. d['param'] = param # Now build case domain for the different solvers: if solver_s in package.config.solver_modules.keys(): dom = domain(package, package_compute=package_compute, name=domain_s, param=d.get('param'), n_procs_weight=d.get('n_procs_weight'), n_procs_min=d.get('n_procs_min'), n_procs_max=d.get('n_procs_max')) if solver_s == 'code_saturne': use_saturne = True sat_domains.append(dom) elif solver_s == 'neptune_cfd': use_neptune = True nep_domains.append(dom) elif solver_s == 'syrthes': param_s = d.get('param') if param_s == None: param_s = d.get('script') # older name if (param_s == None): msg = 'Check your coupling definition.\n' msg += 'parameters file selection is missing for domain: ' msg += domain_s + '.\n' raise RunCaseError(msg) try: dom = syrthes_domain(package, cmd_line=d.get('opt'), name=domain_s, param=d.get('param'), n_procs_weight=d.get('n_procs_weight'), n_procs_min=d.get('n_procs_min'), n_procs_max=d.get('n_procs_max'), verbose=verbose) except Exception: err_str = 'Cannot create SYRTHES domain. Opt = ' + d.get( 'opt') + '\n' err_str += ' domain = ' + domain_s + '\n' err_str += ' n_procs_weight = ' + str( d.get('n_procs_weight')) + '\n' raise RunCaseError(err_str) use_syrthes = True syr_domains.append(dom) elif solver_s == 'cathare': # Current version using Cathare2: the cathare case is converted to a # .so library which is opened and launched by a neptune_cfd executable dom = cathare_domain(package, package_compute=package_compute, name=domain_s, param=d.get('param'), n_procs_weight=None, n_procs_min=1, n_procs_max=1, cathare_case_file=d.get('cathare_case_file'), neptune_cfd_dom=d.get('neptune_cfd_domain')) use_cathare = True cat_domains.append(dom) elif solver_s == 'python_code': script_s = d.get('script') if (script_s == None): msg = 'Check your coupling definition.\n' msg += 'Python script file selection is missing for domain: ' msg += domain_s + '.\n' raise RunCaseError(msg) # Generic Code_Saturne/Python Script coupling # The python script can contain any MPI compatible code or supervisor try: dom = python_domain(package, name=domain_s, cmd_line=d.get('command_line'), script_name=script_s) except Exception: err_str = 'Cannot create Python code domain.\n' err_str += ' domain = ' + domain_s + '\n' err_str += ' script = ' + str(d.get('script')) + '\n' raise RunCaseError(err_str) use_py_code = True py_domains.append(dom) else: err_str = 'Unknown code type : ' + d.get('solver') + '.\n' raise RunCaseError(err_str) # Now handle case for the corresponding calculation domain(s). c = case(package, package_compute=package_compute, case_dir=casedir, dest_dir=dest_dir, staging_dir=staging_dir, domains=sat_domains + nep_domains + cat_domains, syr_domains=syr_domains, py_domains=py_domains) if verbose: msg = ' Coupling execution between: \n' if use_saturne == True: msg += ' o code_saturne [' + str( len(sat_domains)) + ' domain(s)];\n' if use_syrthes == True: msg += ' o SYRTHES [' + str( len(syr_domains)) + ' domain(s)];\n' if use_neptune == True: msg += ' o neptune_cfd [' + str( len(nep_domains)) + ' domain(s)];\n' if use_cathare == True: msg += ' o CATHARE2 [' + str( len(cat_domains)) + ' domain(s)];\n' if use_py_code == True: msg += ' o Python Script [' + str( len(py_domains)) + ' domain(s)];\n' sys.stdout.write(msg + '\n') return c
def read_run_config_file(i_c, r_c, s_c, pkg, run_conf=None): """ Process the passed command line arguments. """ casedir = r_c['casedir'] run_config_path = "" setup_default_path = "" if r_c['coupled_domains'] != []: run_config_path = os.path.join(casedir, 'run.cfg') else: run_config_path = os.path.join(casedir, 'DATA', 'run.cfg') setup_default_path = os.path.join(casedir, 'DATA', 'setup.xml') # Ensure some keys are set in all cases to simplify future tests run_conf_kw = ('job_parameters', 'job_header', 'run_prologue', 'run_epilogue', 'compute_prologue', 'compute_epilogue') for kw in run_conf_kw: if not kw in r_c: r_c[kw] = None if run_conf == None: if not os.path.isfile(run_config_path): print('Warning:', file=sys.stderr) print( ' \'run.cfg\' not found in case directory; case update recommended.', file=sys.stderr) print('', file=sys.stderr) return # Only load run.cfg if not already done if run_conf and s_c['stage'] != False: if not run_conf.path == run_config_path: run_conf = None if not run_conf: run_conf = cs_run_conf.run_conf(run_config_path, package=pkg) # Parameters file for kw in ('param', ): if run_conf.get('setup', kw): r_c[kw] = run_conf.get('setup', kw) if not r_c['param'] and setup_default_path: if os.path.isfile(setup_default_path): r_c['param'] = setup_default_path # Print warning if setup.xml exists but another xml was provided if r_c['param'] and setup_default_path: if os.path.basename( r_c['param']) != os.path.basename(setup_default_path): if os.path.isfile(setup_default_path): msg = '*****************************************************\n' msg += 'Warning:\n' msg += ' Both %s and %s exist in the DATA folder.\n' % \ (os.path.basename(r_c['param']), \ os.path.basename(setup_default_path)) msg += ' %s will be used for the computation.\n' % \ os.path.basename(r_c['param']) msg += ' Be aware that to follow code_saturne best practices\n' msg += ' only one of the two should be present in DATA.\n' msg += '*****************************************************\n' print(msg, file=sys.stderr) # Check that an XML file was provided if not r_c['param'] and setup_default_path: msg = 'Remark:\n' msg += ' No setup.xml file was provided in the DATA folder.\n' msg += ' Default settings will be used.\n' print(msg, file=sys.stderr) # Run id if not r_c['run_id']: r_c['run_id'] = run_conf.get('run', 'id') if not r_c['force_id']: r_c['force_id'] = run_conf.get_bool('run', 'force_id') # Compute stages update_run_steps(s_c, run_conf) # Resources: try to find a matching section, using # resource_name, batch, and job_defaults in decreasing priority. if not r_c['compute_build']: r_c['compute_build'] = run_conf.get_bool('run', 'compute_build') resource_name = i_c['resource_name'] if not resource_name or not resource_name in run_conf.sections: resource_name = i_c['batch'] if resource_name: resource_name = os.path.basename(resource_name).lower() if not resource_name or not resource_name in run_conf.sections: resource_name = 'job_defaults' run_conf_r = None if resource_name in run_conf.sections: run_conf_r = run_conf.sections[resource_name] if run_conf_r: for kw in ('n_procs', 'n_threads', 'time_limit'): if kw in r_c: if r_c[kw] != None: continue r_c[kw] = None v = run_conf.get_int(resource_name, kw) if v: r_c[kw] = v if run_conf_r: for kw in run_conf_kw: if r_c[kw] != None: continue if kw in run_conf_r: r_c[kw] = run_conf_r[kw] # Handle case where files are used if not (r_c['job_parameters'] or r_c['job_header']): kw = 'job_header_file' f_path = None if kw in run_conf_kw: f_path = run_conf_kw[kw] if f_path: if not os.path.isabs(f_path): f_prefix = os.path.basename(run_config_path) f_path = os.path.join(f_prefix, f_path) if os.path.isfile(f_path): f = file.open(f_path) r_c['job_header'] = f.read() f.close else: err_str = """warning in run.cfg: [{0}] {1} = {2} "{3}" not present (use defaults)""" print(err_str.format(resource_name, kw, r_c[kw], f_path), file=sys.stderr) r_c['job_header'] = None elif 'jobmanager' in r_c: err_str = 'warning in run.cfg: [{0}] {1} = {2}; not currently handled (ignored)' print(err_str.format(resource_name, kw, r_c[kw]), file=sys.stderr) r_c[kw] = None