def get_value(self, item, attribute=None, resolved=True, subgroup=None): result = None for env_file in self._env_entryid_files: # Wait and resolve in self rather than in env_file result = env_file.get_value(item, attribute, resolved=False, subgroup=subgroup) if result is not None: if resolved and type(result) is str: result = self.get_resolved_value(result) vtype = env_file.get_type_info(item) result = convert_to_type(result, vtype, item) return result for env_file in self._env_generic_files: result = env_file.get_value(item, attribute, resolved=False, subgroup=subgroup) if result is not None: if resolved and type(result) is str: return self.get_resolved_value(result) return result # Return empty result return result
def get_values(self, item, attribute=None, resolved=True, subgroup=None): results = [] for env_file in self._env_entryid_files: # Wait and resolve in self rather than in env_file results = env_file.get_values(item, attribute, resolved=False, subgroup=subgroup) if len(results) > 0: new_results = [] vtype = env_file.get_type_info(item) if resolved: for result in results: if type(result) is str: result = self.get_resolved_value(result) new_results.append(convert_to_type(result, vtype, item)) else: new_results.append(result) else: new_results = results return new_results for env_file in self._env_generic_files: results = env_file.get_values(item, attribute, resolved=False, subgroup=subgroup) if len(results) > 0: if resolved: for result in results: if type(result) is str: new_results.append(self.get_resolved_value(result)) else: new_results.append(result) else: new_results = results return new_results # Return empty result return results
def get_value(self, vid, attribute=None, resolved=True, subgroup=None): """ Get a value for entry with id attribute vid. or from the values field if the attribute argument is provided and matches """ value = None vid, comp, iscompvar = self.check_if_comp_var(vid, attribute) logger.debug("vid {} comp {} iscompvar {}".format(vid, comp, iscompvar)) if iscompvar: if comp is None: if subgroup is not None: comp = subgroup else: logger.debug("Not enough info to get value for {}".format(vid)) return value if attribute is None: attribute = {"compclass" : comp} else: attribute["compclass"] = comp node = self.scan_optional_child("entry", {"id":vid}) if node is not None: type_str = self._get_type_info(node) values = self.get_optional_child("values", root=node) node = values if values is not None else node val = self.get_element_text("value", attribute, root=node) if val is not None: if val.startswith("$"): value = val else: value = convert_to_type(val,type_str, vid) return value return EntryID.get_value(self, vid, attribute=attribute, resolved=resolved, subgroup=subgroup)
def get_value(self, vid, attribute=None, resolved=True, subgroup=None): """ Get a value for entry with id attribute vid. or from the values field if the attribute argument is provided and matches """ root = self.root if subgroup is None else self.get_optional_child( "group", {"id": subgroup}) node = self.scan_optional_child("entry", {"id": vid}, root=root) if node is None: return val = self._get_value(node, attribute=attribute, resolved=resolved, subgroup=subgroup) # Return value as right type if we were able to fully resolve # otherwise, we have to leave as string. if val is None: return val elif "$" in val: return val else: type_str = self._get_type_info(node) return convert_to_type(val, type_str, vid)
def get_value(self, item, attribute=None, resolved=True, subgroup="case.run"): """ Must default subgroup to something in order to provide single return value """ value = None if subgroup is None: node = self.get_optional_node(item, attribute) if node is not None: value = node.text if resolved: value = self.get_resolved_value(value) else: value = EnvBase.get_value(self, item, attribute, resolved) else: job_node = self.get_optional_node("job", {"name": subgroup}) if job_node is not None: node = self.get_optional_node("entry", {"id": item}, root=job_node) if node is not None: value = node.get("value") if resolved: value = self.get_resolved_value(value) # Return value as right type if we were able to fully resolve # otherwise, we have to leave as string. if "$" not in value: type_str = self._get_type_info(node) value = convert_to_type(value, type_str, item) return value
def get_value(self, item, attribute=None, resolved=True, subgroup="case.run"): """ Must default subgroup to something in order to provide single return value """ value = None if subgroup is None: node = self.get_optional_node(item, attribute) if node is not None: value = node.text if resolved: value = self.get_resolved_value(value) else: value = EnvBase.get_value(self,item,attribute,resolved) else: job_node = self.get_optional_node("job", {"name":subgroup}) if job_node is not None: node = self.get_optional_node("entry", {"id":item}, root=job_node) if node is not None: value = node.get("value") if resolved: value = self.get_resolved_value(value) # Return value as right type if we were able to fully resolve # otherwise, we have to leave as string. if "$" not in value: type_str = self._get_type_info(node) value = convert_to_type(value, type_str, item) return value
def get_value(self, vid, attribute=None, resolved=True, subgroup=None): """ Get a value for entry with id attribute vid. or from the values field if the attribute argument is provided and matches """ node = self.get_optional_node("entry", {"id":vid}) if node is None: return val = self._get_value(node, attribute=attribute, resolved=resolved, subgroup=subgroup) # Return value as right type if we were able to fully resolve # otherwise, we have to leave as string. if "$" in val: return val else: type_str = self._get_type_info(node) return convert_to_type(val, type_str, vid)
def set_initial_values(self, case): """ The values to initialize a test are defined in env_test.xml copy them to the appropriate case env files to initialize a test ignore fields set in the BUILD and RUN clauses, they are set in the appropriate build and run phases. """ tnode = self.get_node("test") for child in tnode: if child.text is not None: logger.debug("Setting %s to %s for test" % (child.tag, child.text)) if "$" in child.text: case.set_value(child.tag, child.text, ignore_type=True) else: item_type = case.get_type_info(child.tag) value = convert_to_type(child.text, item_type, child.tag) case.set_value(child.tag, value)
def set_initial_values(self, case): """ The values to initialize a test are defined in env_test.xml copy them to the appropriate case env files to initialize a test ignore fields set in the BUILD and RUN clauses, they are set in the appropriate build and run phases. """ tnode = self.get_child("test") for child in self.get_children(root=tnode): if self.text(child) is not None: logger.debug("Setting {} to {} for test".format(self.name(child), self.text(child))) if "$" in self.text(child): case.set_value(self.name(child),self.text(child),ignore_type=True) else: item_type = case.get_type_info(self.name(child)) value = convert_to_type(self.text(child),item_type,self.name(child)) case.set_value(self.name(child),value) case.flush() return
def set_initial_values(self, case): """ The values to initialize a test are defined in env_test.xml copy them to the appropriate case env files to initialize a test ignore fields set in the BUILD and RUN clauses, they are set in the appropriate build and run phases. """ tnode = self.get_node("test") for child in tnode: if child.text is not None: logger.debug("Setting %s to %s for test"%(child.tag,child.text)) if "$" in child.text: case.set_value(child.tag,child.text,ignore_type=True) else: item_type = case.get_type_info(child.tag) value = convert_to_type(child.text,item_type,child.tag) case.set_value(child.tag,value) case.flush() return
def set_initial_values(self, case): """ The values to initialize a test are defined in env_test.xml copy them to the appropriate case env files to initialize a test ignore fields set in the BUILD and RUN clauses, they are set in the appropriate build and run phases. """ tnode = self.get_child("test") for child in self.get_children(root=tnode): if self.text(child) is not None: logger.debug("Setting {} to {} for test".format( self.name(child), self.text(child))) if "$" in self.text(child): case.set_value(self.name(child), self.text(child), ignore_type=True) else: item_type = case.get_type_info(self.name(child)) value = convert_to_type(self.text(child), item_type, self.name(child)) case.set_value(self.name(child), value)
def set_value(self, vid, value, subgroup=None, ignore_type=False): """ Set the value of an entry-id field to value Returns the value or None if not found subgroup is ignored in the general routine and applied in specific methods """ if self._pio_async_interface: vid, comp, iscompvar = self.check_if_comp_var(vid, None) if vid.startswith("PIO") and iscompvar: if comp and comp != "CPL": logger.warning("Only CPL settings are used for PIO in async mode") subgroup = "CPL" if vid == "PIO_ASYNC_INTERFACE": if type(value) == type(True): self._pio_async_interface = value else: self._pio_async_interface = convert_to_type(value, "logical" , vid) return EnvBase.set_value(self, vid, value, subgroup, ignore_type)
def get_values(self, vid, attribute=None, resolved=True, subgroup=None): """ Same functionality as get_value but it returns a list, if the value in xml contains commas the list have multiple elements split on commas """ results = [] node = self.get_optional_node("entry", {"id":vid}) if node is None: return results str_result = self._get_value(node, attribute=attribute, resolved=resolved, subgroup=subgroup) str_results = str_result.split(',') for result in str_results: # Return value as right type if we were able to fully resolve # otherwise, we have to leave as string. if "$" in result: results.append(result) else: type_str = self._get_type_info(node) results.append( convert_to_type(result, type_str, vid)) return results
def get_value(self, vid, attribute={}, resolved=True, subgroup=None): """ Get a value for entry with id attribute vid. or from the values field if the attribute argument is provided and matches """ logger.debug("Get Value") val = None node = self.get_optional_node("entry", {"id":vid}) if node is None: logger.debug("No node") return val logger.debug("Found node %s with attributes %s" , node.tag , node.attrib) if attribute: valnodes = self.get_optional_node("value", attribute, root=node) if valnodes is not None: val = valnodes.text elif node.get("value") is not None: val = node.get("value") else: val = self.get_default_value(node) if resolved: val = self.get_resolved_value(val) if val is None: return val # Return value as right type if we were able to fully resolve # otherwise, we have to leave as string. if "$" in val: return val else: type_str = self._get_type_info(node) return convert_to_type(val, type_str, vid)
def configure(self, compset_name, grid_name, machine_name=None, project=None, pecount=None, compiler=None, mpilib=None, user_compset=False, pesfile=None, user_grid=False, gridfile=None, ninst=1, test=False): #-------------------------------------------- # compset, pesfile, and compset components #-------------------------------------------- self._set_compset_and_pesfile(compset_name, user_compset=user_compset, pesfile=pesfile) self._components = self.get_compset_components() #FIXME - if --user-compset is True then need to determine that #all of the compset settings are valid #-------------------------------------------- # grid #-------------------------------------------- if user_grid is True and gridfile is not None: self.set_value("GRIDS_SPEC_FILE", gridfile); grids = Grids(gridfile) gridinfo = grids.get_grid_info(name=grid_name, compset=self._compsetname) self._gridname = gridinfo["GRID"] for key,value in gridinfo.items(): logger.debug("Set grid %s %s"%(key,value)) self.set_value(key,value) #-------------------------------------------- # component config data #-------------------------------------------- self._get_component_config_data() self.get_compset_var_settings() # Add the group and elements for the config_files.xml for idx, config_file in enumerate(self._component_config_files): self.set_value(config_file[0],config_file[1]) #-------------------------------------------- # machine #-------------------------------------------- # set machine values in env_xxx files machobj = Machines(machine=machine_name) machine_name = machobj.get_machine_name() self.set_value("MACH",machine_name) nodenames = machobj.get_node_names() nodenames = [x for x in nodenames if '_system' not in x and '_variables' not in x and 'mpirun' not in x and\ 'COMPILER' not in x and 'MPILIB' not in x] for nodename in nodenames: value = machobj.get_value(nodename) type_str = self.get_type_info(nodename) if type_str is not None: self.set_value(nodename, convert_to_type(value, type_str, nodename)) if compiler is None: compiler = machobj.get_default_compiler() else: expect(machobj.is_valid_compiler(compiler), "compiler %s is not supported on machine %s" %(compiler, machine_name)) self.set_value("COMPILER",compiler) if mpilib is None: mpilib = machobj.get_default_MPIlib({"compiler":compiler}) else: expect(machobj.is_valid_MPIlib(mpilib, {"compiler":compiler}), "MPIlib %s is not supported on machine %s" %(mpilib, machine_name)) self.set_value("MPILIB",mpilib) machdir = machobj.get_machines_dir() self.set_value("MACHDIR", machdir) # Overwriting an existing exeroot or rundir can cause problems exeroot = self.get_value("EXEROOT") rundir = self.get_value("RUNDIR") for wdir in (exeroot, rundir): if os.path.exists(wdir): expect(not test, "Directory %s already exists, aborting test"% wdir) response = raw_input("\nDirectory %s already exists, (r)eplace, (a)bort, or (u)se existing?"% wdir) if response.startswith("r"): shutil.rmtree(wdir) else: expect(response.startswith("u"), "Aborting by user request") # the following go into the env_mach_specific file vars = ("module_system", "environment_variables", "mpirun") env_mach_specific_obj = self._get_env("mach_specific") for var in vars: nodes = machobj.get_first_child_nodes(var) for node in nodes: env_mach_specific_obj.add_child(node) #-------------------------------------------- # pe payout #-------------------------------------------- pesobj = Pes(self._pesfile) #FIXME - add pesize_opts as optional argument below pes_ntasks, pes_nthrds, pes_rootpe = pesobj.find_pes_layout(self._gridname, self._compsetname, machine_name, pesize_opts=pecount) mach_pes_obj = self._get_env("mach_pes") totaltasks = {} for key, value in pes_ntasks.items(): totaltasks[key[-3:]] = int(value) mach_pes_obj.set_value(key,int(value)) for key, value in pes_rootpe.items(): totaltasks[key[-3:]] += int(value) mach_pes_obj.set_value(key,int(value)) for key, value in pes_nthrds.items(): totaltasks[key[-3:]] *= int(value) mach_pes_obj.set_value(key,int(value)) maxval = 1 pes_per_node = mach_pes_obj.get_value("PES_PER_NODE") for key, val in totaltasks.items(): if val < 0: val = -1*val*pes_per_node if val > maxval: maxval = val # Make sure that every component has been accounted for # set, nthrds and ntasks to 1 otherwise. Also set the ninst values here. for compclass in self._component_classes: if compclass == "DRV": continue key = "NINST_%s"%compclass mach_pes_obj.set_value(key, ninst) key = "NTASKS_%s"%compclass if key not in pes_ntasks.keys(): mach_pes_obj.set_value(key,1) key = "NTHRDS_%s"%compclass if compclass not in pes_nthrds.keys(): mach_pes_obj.set_value(compclass,1) # FIXME - this is a short term fix for dealing with the restriction that # CISM1 cannot run on multiple cores if "CISM1" in self._compsetname: mach_pes_obj.set_value("NTASKS_GLC",1) mach_pes_obj.set_value("NTHRDS_GLC",1) #-------------------------------------------- # batch system #-------------------------------------------- batch_system_type = machobj.get_value("BATCH_SYSTEM") batch = Batch(batch_system=batch_system_type, machine=machine_name) bjobs = batch.get_batch_jobs() env_batch = self._get_env("batch") env_batch.set_batch_system(batch, batch_system_type=batch_system_type) env_batch.create_job_groups(bjobs) env_batch.set_job_defaults(bjobs, pesize=maxval) self._env_files_that_need_rewrite.add(env_batch) self.set_value("COMPSET",self._compsetname) self._set_pio_xml() logger.info(" Compset is: %s " %self._compsetname) logger.info(" Grid is: %s " %self._gridname ) logger.info(" Components in compset are: %s " %self._components) # miscellaneous settings if self.get_value("RUN_TYPE") == 'hybrid': self.set_value("GET_REFCASE", True) # Set project id if project is None: project = get_project(machobj) if project is not None: self.set_value("PROJECT", project) elif machobj.get_value("PROJECT_REQUIRED"): expect(project is not None, "PROJECT_REQUIRED is true but no project found")
def configure(self, compset_name, grid_name, machine_name=None, project=None, pecount=None, compiler=None, mpilib=None, user_compset=False, pesfile=None, user_grid=False, gridfile=None, ninst=1, test=False, walltime=None, queue=None): #-------------------------------------------- # compset, pesfile, and compset components #-------------------------------------------- self._set_compset_and_pesfile(compset_name, user_compset=user_compset, pesfile=pesfile) self._components = self.get_compset_components() #FIXME - if --user-compset is True then need to determine that #all of the compset settings are valid #-------------------------------------------- # grid #-------------------------------------------- if user_grid is True and gridfile is not None: self.set_value("GRIDS_SPEC_FILE", gridfile) grids = Grids(gridfile) gridinfo = grids.get_grid_info(name=grid_name, compset=self._compsetname) self._gridname = gridinfo["GRID"] for key,value in gridinfo.items(): logger.debug("Set grid %s %s"%(key,value)) self.set_lookup_value(key,value) #-------------------------------------------- # component config data #-------------------------------------------- self._get_component_config_data() self.get_compset_var_settings() #-------------------------------------------- # machine #-------------------------------------------- # set machine values in env_xxx files machobj = Machines(machine=machine_name) machine_name = machobj.get_machine_name() self.set_value("MACH",machine_name) nodenames = machobj.get_node_names() nodenames = [x for x in nodenames if '_system' not in x and '_variables' not in x and 'mpirun' not in x and\ 'COMPILER' not in x and 'MPILIB' not in x] for nodename in nodenames: value = machobj.get_value(nodename, resolved=False) type_str = self.get_type_info(nodename) if type_str is not None: logger.debug("machine nodname %s value %s"%(nodename, value)) self.set_value(nodename, convert_to_type(value, type_str, nodename)) if compiler is None: compiler = machobj.get_default_compiler() else: expect(machobj.is_valid_compiler(compiler), "compiler %s is not supported on machine %s" %(compiler, machine_name)) self.set_value("COMPILER",compiler) if mpilib is None: mpilib = machobj.get_default_MPIlib({"compiler":compiler}) else: expect(machobj.is_valid_MPIlib(mpilib, {"compiler":compiler}), "MPIlib %s is not supported on machine %s" %(mpilib, machine_name)) self.set_value("MPILIB",mpilib) machdir = machobj.get_machines_dir() self.set_value("MACHDIR", machdir) # Create env_mach_specific settings from machine info. env_mach_specific_obj = self.get_env("mach_specific") env_mach_specific_obj.populate(machobj) self.schedule_rewrite(env_mach_specific_obj) #-------------------------------------------- # pe payout #-------------------------------------------- match1 = re.match('([0-9]+)x([0-9]+)', "" if pecount is None else pecount) match2 = re.match('([0-9]+)', "" if pecount is None else pecount) pes_ntasks = {} pes_nthrds = {} pes_rootpe = {} if match1: opti_tasks = match1.group(1) opti_thrds = match1.group(2) elif match2: opti_tasks = match2.group(1) opti_thrds = 1 other = {} if match1 or match2: for component_class in self._component_classes: if component_class == "DRV": component_class = "CPL" string = "NTASKS_" + component_class pes_ntasks[string] = opti_tasks string = "NTHRDS_" + component_class pes_nthrds[string] = opti_thrds string = "ROOTPE_" + component_class pes_rootpe[string] = 0 else: pesobj = Pes(self._pesfile) pes_ntasks, pes_nthrds, pes_rootpe, other = pesobj.find_pes_layout(self._gridname, self._compsetname, machine_name, pesize_opts=pecount) mach_pes_obj = self.get_env("mach_pes") totaltasks = {} # Since other items may include PES_PER_NODE we need to do this first # we can get rid of this code when all of the perl is removed for key, value in other.items(): self.set_value(key, value) pes_per_node = self.get_value("PES_PER_NODE") for key, value in pes_ntasks.items(): totaltasks[key[-3:]] = int(value) mach_pes_obj.set_value(key,int(value), pes_per_node=pes_per_node) for key, value in pes_rootpe.items(): totaltasks[key[-3:]] += int(value) mach_pes_obj.set_value(key,int(value), pes_per_node=pes_per_node) for key, value in pes_nthrds.items(): totaltasks[key[-3:]] *= int(value) mach_pes_obj.set_value(key,int(value), pes_per_node=pes_per_node) maxval = 1 if mpilib != "mpi-serial": for key, val in totaltasks.items(): if val < 0: val = -1*val*pes_per_node if val > maxval: maxval = val # Make sure that every component has been accounted for # set, nthrds and ntasks to 1 otherwise. Also set the ninst values here. for compclass in self._component_classes: if compclass == "DRV": continue key = "NINST_%s"%compclass mach_pes_obj.set_value(key, ninst) key = "NTASKS_%s"%compclass if key not in pes_ntasks.keys(): mach_pes_obj.set_value(key,1) key = "NTHRDS_%s"%compclass if compclass not in pes_nthrds.keys(): mach_pes_obj.set_value(compclass,1) # FIXME - this is a short term fix for dealing with the restriction that # CISM1 cannot run on multiple cores if "CISM1" in self._compsetname: mach_pes_obj.set_value("NTASKS_GLC",1) mach_pes_obj.set_value("NTHRDS_GLC",1) #-------------------------------------------- # batch system #-------------------------------------------- batch_system_type = machobj.get_value("BATCH_SYSTEM") batch = Batch(batch_system=batch_system_type, machine=machine_name) bjobs = batch.get_batch_jobs() env_batch = self.get_env("batch") env_batch.set_batch_system(batch, batch_system_type=batch_system_type) env_batch.create_job_groups(bjobs) env_batch.set_job_defaults(bjobs, pesize=maxval, walltime=walltime, force_queue=queue) self.schedule_rewrite(env_batch) self.set_value("COMPSET",self._compsetname) self._set_pio_xml() logger.info(" Compset is: %s " %self._compsetname) logger.info(" Grid is: %s " %self._gridname ) logger.info(" Components in compset are: %s " %self._components) # Set project id if project is None: project = get_project(machobj) if project is not None: self.set_value("PROJECT", project) elif machobj.get_value("PROJECT_REQUIRED"): expect(project is not None, "PROJECT_REQUIRED is true but no project found") # Overwriting an existing exeroot or rundir can cause problems exeroot = self.get_value("EXEROOT") rundir = self.get_value("RUNDIR") for wdir in (exeroot, rundir): logging.debug("wdir is %s"%wdir) if os.path.exists(wdir): expect(not test, "Directory %s already exists, aborting test"% wdir) response = raw_input("\nDirectory %s already exists, (r)eplace, (a)bort, or (u)se existing?"% wdir) if response.startswith("r"): shutil.rmtree(wdir) else: expect(response.startswith("u"), "Aborting by user request") # miscellaneous settings if self.get_value("RUN_TYPE") == 'hybrid': self.set_value("GET_REFCASE", True) # Turn on short term archiving as cesm default setting model = get_model() if model == "cesm" and not test: self.set_value("DOUT_S",True)
def configure(self, compset_name, grid_name, machine_name=None, project=None, pecount=None, compiler=None, mpilib=None, user_compset=False, pesfile=None, user_grid=False, gridfile=None, ninst=1, test=False, walltime=None, queue=None, output_root=None): #-------------------------------------------- # compset, pesfile, and compset components #-------------------------------------------- self._set_compset_and_pesfile(compset_name, user_compset=user_compset, pesfile=pesfile) self._components = self.get_compset_components() #FIXME - if --user-compset is True then need to determine that #all of the compset settings are valid #-------------------------------------------- # grid #-------------------------------------------- if user_grid is True and gridfile is not None: self.set_value("GRIDS_SPEC_FILE", gridfile) grids = Grids(gridfile) gridinfo = grids.get_grid_info(name=grid_name, compset=self._compsetname) self._gridname = gridinfo["GRID"] for key, value in gridinfo.items(): logger.debug("Set grid %s %s" % (key, value)) self.set_lookup_value(key, value) #-------------------------------------------- # component config data #-------------------------------------------- self._get_component_config_data() self.get_compset_var_settings() #-------------------------------------------- # machine #-------------------------------------------- # set machine values in env_xxx files machobj = Machines(machine=machine_name) machine_name = machobj.get_machine_name() self.set_value("MACH", machine_name) nodenames = machobj.get_node_names() nodenames = [x for x in nodenames if '_system' not in x and '_variables' not in x and 'mpirun' not in x and\ 'COMPILER' not in x and 'MPILIB' not in x] for nodename in nodenames: value = machobj.get_value(nodename, resolved=False) type_str = self.get_type_info(nodename) if type_str is not None: logger.debug("machine nodname %s value %s" % (nodename, value)) self.set_value(nodename, convert_to_type(value, type_str, nodename)) if compiler is None: compiler = machobj.get_default_compiler() else: expect( machobj.is_valid_compiler(compiler), "compiler %s is not supported on machine %s" % (compiler, machine_name)) self.set_value("COMPILER", compiler) if mpilib is None: mpilib = machobj.get_default_MPIlib({"compiler": compiler}) else: expect( machobj.is_valid_MPIlib(mpilib, {"compiler": compiler}), "MPIlib %s is not supported on machine %s" % (mpilib, machine_name)) self.set_value("MPILIB", mpilib) machdir = machobj.get_machines_dir() self.set_value("MACHDIR", machdir) # Create env_mach_specific settings from machine info. env_mach_specific_obj = self.get_env("mach_specific") env_mach_specific_obj.populate(machobj) self.schedule_rewrite(env_mach_specific_obj) #-------------------------------------------- # pe payout #-------------------------------------------- match1 = re.match('([0-9]+)x([0-9]+)', "" if pecount is None else pecount) match2 = re.match('([0-9]+)', "" if pecount is None else pecount) pes_ntasks = {} pes_nthrds = {} pes_rootpe = {} if match1: opti_tasks = match1.group(1) opti_thrds = match1.group(2) elif match2: opti_tasks = match2.group(1) opti_thrds = 1 other = {} if match1 or match2: for component_class in self._component_classes: if component_class == "DRV": component_class = "CPL" string = "NTASKS_" + component_class pes_ntasks[string] = opti_tasks string = "NTHRDS_" + component_class pes_nthrds[string] = opti_thrds string = "ROOTPE_" + component_class pes_rootpe[string] = 0 else: pesobj = Pes(self._pesfile) pes_ntasks, pes_nthrds, pes_rootpe, other = pesobj.find_pes_layout( self._gridname, self._compsetname, machine_name, pesize_opts=pecount) mach_pes_obj = self.get_env("mach_pes") totaltasks = {} # Since other items may include PES_PER_NODE we need to do this first # we can get rid of this code when all of the perl is removed for key, value in other.items(): self.set_value(key, value) pes_per_node = self.get_value("PES_PER_NODE") for key, value in pes_ntasks.items(): totaltasks[key[-3:]] = int(value) mach_pes_obj.set_value(key, int(value), pes_per_node=pes_per_node) for key, value in pes_rootpe.items(): totaltasks[key[-3:]] += int(value) mach_pes_obj.set_value(key, int(value), pes_per_node=pes_per_node) for key, value in pes_nthrds.items(): totaltasks[key[-3:]] *= int(value) mach_pes_obj.set_value(key, int(value), pes_per_node=pes_per_node) maxval = 1 if mpilib != "mpi-serial": for key, val in totaltasks.items(): if val < 0: val = -1 * val * pes_per_node if val > maxval: maxval = val # Make sure that every component has been accounted for # set, nthrds and ntasks to 1 otherwise. Also set the ninst values here. for compclass in self._component_classes: if compclass == "DRV": continue key = "NINST_%s" % compclass mach_pes_obj.set_value(key, ninst) key = "NTASKS_%s" % compclass if key not in pes_ntasks.keys(): mach_pes_obj.set_value(key, 1) key = "NTHRDS_%s" % compclass if compclass not in pes_nthrds.keys(): mach_pes_obj.set_value(compclass, 1) # FIXME - this is a short term fix for dealing with the restriction that # CISM1 cannot run on multiple cores if "CISM1" in self._compsetname: mach_pes_obj.set_value("NTASKS_GLC", 1) mach_pes_obj.set_value("NTHRDS_GLC", 1) #-------------------------------------------- # batch system #-------------------------------------------- batch_system_type = machobj.get_value("BATCH_SYSTEM") batch = Batch(batch_system=batch_system_type, machine=machine_name) bjobs = batch.get_batch_jobs() env_batch = self.get_env("batch") env_batch.set_batch_system(batch, batch_system_type=batch_system_type) env_batch.create_job_groups(bjobs) env_batch.set_job_defaults(bjobs, pesize=maxval, walltime=walltime, force_queue=queue) self.schedule_rewrite(env_batch) self.set_value("COMPSET", self._compsetname) self._set_pio_xml() logger.info(" Compset is: %s " % self._compsetname) logger.info(" Grid is: %s " % self._gridname) logger.info(" Components in compset are: %s " % self._components) # Set project id if project is None: project = get_project(machobj) if project is not None: self.set_value("PROJECT", project) elif machobj.get_value("PROJECT_REQUIRED"): expect(project is not None, "PROJECT_REQUIRED is true but no project found") # Resolve the CIME_OUTPUT_ROOT variable, other than this # we don't want to resolve variables until we need them if output_root is None: output_root = self.get_value("CIME_OUTPUT_ROOT") self.set_value("CIME_OUTPUT_ROOT", output_root) # Overwriting an existing exeroot or rundir can cause problems exeroot = self.get_value("EXEROOT") rundir = self.get_value("RUNDIR") for wdir in (exeroot, rundir): logging.debug("wdir is %s" % wdir) if os.path.exists(wdir): expect(not test, "Directory %s already exists, aborting test" % wdir) response = raw_input( "\nDirectory %s already exists, (r)eplace, (a)bort, or (u)se existing?" % wdir) if response.startswith("r"): shutil.rmtree(wdir) else: expect(response.startswith("u"), "Aborting by user request") # miscellaneous settings if self.get_value("RUN_TYPE") == 'hybrid': self.set_value("GET_REFCASE", True) # Turn on short term archiving as cesm default setting model = get_model() self.set_model_version(model) if model == "cesm" and not test: self.set_value("DOUT_S", True) self.set_value("TIMER_LEVEL", 4) if test: self.set_value("TEST", True) self.initialize_derived_attributes()