def _main_func(options, work_dir): ############################################################################### """Construct a `NamelistDefinition` from an XML file.""" # Initialize variables for the html template html_dict = dict() cesm_version = 'CESM2' comp = '' if options.comp: comp = options.comp[0] # Create a definition object from the xml file filename = options.nmlfile[0] expect(os.path.isfile(filename), "File %s does not exist"%filename) if not options.JSON: try: definition = GenericXML(infile=filename) except: sys.exit("Error: unable to parse file %s" %filename) # Determine if have new or old schema basepath = os.path.dirname(filename) default_files = glob.glob(os.path.join(basepath,"namelist_defaults*.xml")) defaults = [] if len(default_files) > 0: schema = "old" if comp not in _exclude_defaults_comps: for default_file in default_files: default = GenericXML(infile=default_file) default.read(infile=default_file, schema=schema) defaults.append(default) else: schema = "new" # read the file into the definition object definition.read(infile=filename, schema=schema) else: schema = "MARBL JSON" derived_desc = dict() derived_entry_root = dict() derived_entry_type = dict() derived_category = dict() derived_default_value = dict() import json with open(filename) as settings_file: MARBL_json_dict = json.load(settings_file) # Set up MARBL_settings_file_class object with CESM (gx1v7) default values MARBL_root = os.path.join(os.path.dirname(filename), "../..") sys.path.append(MARBL_root) from MARBL_tools import MARBL_settings_file_class MARBL_args=dict() MARBL_args["default_settings_file"] = filename MARBL_args["input_file"] = None MARBL_args["grid"] = "CESM_x1" MARBL_args["saved_state_vars_source"] = "settings_file" MARBL_default_settings = MARBL_settings_file_class.MARBL_settings_class(**MARBL_args) # get the component tag from the command line args comptag = '' if options.comptag: comptag = options.comptag[0] # get the component version from the command line args compversion = '' if options.compversion: compversion = options.compversion[0] # Create a dictionary with a category key and a list of all entry nodes for each key category_dict = dict() if schema == "MARBL JSON": # Special category for MARBL derived types category_dict["MARBL_derived_types"] = dict() for category in [key for key in MARBL_json_dict.keys() if key[0] != "_"]: for marbl_varname in MARBL_json_dict[category].keys(): if isinstance(MARBL_json_dict[category][marbl_varname]['datatype'], dict): if marbl_varname not in category_dict["MARBL_derived_types"].keys(): category_dict["MARBL_derived_types"][marbl_varname] = dict() category_dict["MARBL_derived_types"][marbl_varname][category] = [] for component in [key for key in MARBL_json_dict[category][marbl_varname]["datatype"] if key[0] != "_"]: category_dict["MARBL_derived_types"][marbl_varname][category].append(component) else: if category in category_dict.keys(): category_dict[category].append(marbl_varname) else: category_dict[category] = [ marbl_varname ] else: for node in definition.get_children("entry"): if schema == "new": category = definition.get_element_text("category", root=node) elif schema == "old": category = definition.get(node, "category") if category in category_dict: category_dict[category].append(node) else: category_dict[category] = [ node ] # Loop over each category and load up the html_dict for category in category_dict: # Create a dictionary of groups with a group key and an array of group nodes for each key groups_dict = dict() if schema == "MARBL JSON": if category == "MARBL_derived_types": for root_varname in category_dict[category].keys(): for real_category in category_dict[category][root_varname].keys(): for component in category_dict[category][root_varname][real_category]: MARBL_json_var = MARBL_json_dict[real_category][root_varname]["datatype"][component] if "subcategory" in MARBL_json_dict[real_category][root_varname]["datatype"][component].keys(): group = MARBL_json_var["subcategory"] if group not in _exclude_groups[comp]: marbl_varname = "%s%%%s" % (root_varname, component) derived_desc[marbl_varname] = MARBL_json_var["longname"] ## if root_varname == "autotrophs": if "autotroph" in root_varname: derived_entry_root[marbl_varname] = "dtype(%d)" % MARBL_default_settings.settings_dict['autotroph_cnt'] derived_default_value[marbl_varname] = [] for key in ['((autotroph_sname)) == "sp"', '((autotroph_sname)) == "diat"', '((autotroph_sname)) == "diaz"']: if isinstance(MARBL_json_var["default_value"], dict): if key in MARBL_json_var["default_value"].keys(): derived_default_value[marbl_varname].append(MARBL_json_var["default_value"][key]) else: derived_default_value[marbl_varname].append(MARBL_json_var["default_value"]["default"]) else: derived_default_value[marbl_varname].append(MARBL_json_var["default_value"]) ## elif root_varname == "zooplankton": elif "zooplankton" in root_varname: derived_entry_root[marbl_varname] = "dtype(%d)" % MARBL_default_settings.settings_dict['zooplankton_cnt'] derived_default_value[marbl_varname] = [] for key in ['((zooplankton_sname)) == "zoo"']: if isinstance(MARBL_json_var["default_value"], dict): if key in MARBL_json_var["default_value"].keys(): derived_default_value[marbl_varname].append(MARBL_json_var["default_value"][key]) else: derived_default_value[marbl_varname].append(MARBL_json_var["default_value"]["default"]) else: derived_default_value[marbl_varname].append(MARBL_json_var["default_value"]) ## elif root_varname == "grazing": elif "grazing" in root_varname: derived_entry_root[marbl_varname] = "dtype(%d,%d)" % \ (MARBL_default_settings.settings_dict['autotroph_cnt'] , MARBL_default_settings.settings_dict['zooplankton_cnt']) derived_default_value[marbl_varname] = [] for key in ['((grazing_sname)) == "sp_zoo"', '((grazing_sname)) == "diat_zoo"', '((grazing_sname)) == "diaz_zoo"']: if isinstance(MARBL_json_var["default_value"], dict): if key in MARBL_json_var["default_value"].keys(): derived_default_value[marbl_varname].append(MARBL_json_var["default_value"][key]) else: derived_default_value[marbl_varname].append(MARBL_json_var["default_value"]["default"]) else: derived_default_value[marbl_varname].append(MARBL_json_var["default_value"]) else: sys.exit("Error: unknown derived type root '%s'" % root_varname) derived_entry_type[marbl_varname] = MARBL_json_dict[real_category][root_varname]["datatype"][component]["datatype"].encode('utf-8') if "_array_shape" in MARBL_json_dict[real_category][root_varname]["datatype"][component].keys(): derived_entry_type[marbl_varname] = derived_entry_type[marbl_varname] + "(%d)" % \ MARBL_get_array_len(MARBL_json_dict[real_category][root_varname]["datatype"][component]["_array_shape"], MARBL_default_settings) derived_category[marbl_varname] = real_category if group in groups_dict: groups_dict[group].append(marbl_varname) else: groups_dict[group] = [ marbl_varname ] else: for marbl_varname in category_dict[category]: if 'subcategory' in MARBL_json_dict[category][marbl_varname].keys(): group = MARBL_json_dict[category][marbl_varname]['subcategory'] if group not in _exclude_groups[comp]: if group in groups_dict: groups_dict[group].append(marbl_varname) else: groups_dict[group] = [ marbl_varname ] else: for node in category_dict[category]: if schema == "new": group = definition.get_element_text("group", root=node) elif schema == "old": group = definition.get(node, "group") if group not in _exclude_groups[comp]: if group in groups_dict: groups_dict[group].append(node) else: groups_dict[group] = [ node ] # Loop over the keys group_list = list() for group_name in groups_dict: # Loop over the nodes in each group for node in groups_dict[group_name]: # Determine the name # @ is used in a namelist to put the same namelist variable in multiple groups # in the write phase, all characters in the namelist variable name after # the @ and including the @ should be removed if schema == "MARBL JSON": name = node #print name else: name = definition.get(node, "id") if "@" in name: name = re.sub('@.+$', "", name) # Create the information for this node - start with the description if schema == "MARBL JSON": if category == "MARBL_derived_types": desc = derived_desc[node] else: if MARBL_json_dict[category][node]['subcategory'] == group_name: desc = MARBL_json_dict[category][node]['longname'] else: if schema == "new": raw_desc = definition.get_element_text("desc", root=node) elif schema == "old": raw_desc = definition.text(node) desc = re.sub(r"{{ hilight }}", hilight, raw_desc) desc = re.sub(r"{{ closehilight }}", closehilight, desc) # add type if schema == "new": entry_type = definition.get_element_text("type", root=node) elif schema == "old": entry_type = definition.get(node, "type") elif schema == "MARBL JSON": if category == "MARBL_derived_types": entry_type = "%s%%%s" % (derived_entry_root[node], derived_entry_type[node]) else: if MARBL_json_dict[category][node]['subcategory'] == group_name: entry_type = MARBL_json_dict[category][node]['datatype'].encode('utf-8') # Is this an array? if "_array_shape" in MARBL_json_dict[category][node].keys(): entry_type = entry_type + "(%d)" % \ MARBL_get_array_len(MARBL_json_dict[category][node]["_array_shape"], MARBL_default_settings) # add valid_values if schema == "new": valid_values = definition.get_element_text("valid_values", root=node) elif schema == "old": valid_values = definition.get(node, "valid_values") if schema == "MARBL JSON": if category == "MARBL_derived_types": valid_values = '' else: if MARBL_json_dict[category][node]["subcategory"] == group_name: if "valid_values" in MARBL_json_dict[category][node].keys(): valid_values = ",".join(MARBL_json_dict[category][node]["valid_values"]).encode('utf-8') else: valid_values = None if entry_type == "logical": valid_values = ".true.,.false." else: if not valid_values: if category == "MARBL_derived_types": valid_values = "any " + derived_entry_type[node] else: valid_values = "any " + entry_type if "char" in valid_values: valid_values = "any char" if valid_values is not None: valid_values = valid_values.split(',') # add default values values = "" if schema == "new": value_nodes = definition.get(node,'value') if value_nodes is not None and len(value_nodes) > 0: for value_node in value_nodes: try: value = value_node.text.strip() except: value = 'undefined' if value_node.attrib: values += " is %s for: %s <br/>" %(value, value_node.attrib) else: values += " %s <br/>" %(value) elif schema == "MARBL JSON": if node in MARBL_default_settings.settings_dict.keys(): values = MARBL_default_settings.settings_dict[node] # print "%s = %s" % (node, values) else: if category == "MARBL_derived_types": if node in derived_default_value.keys(): default_values = derived_default_value[node] else: default_values = [] else: if "default_value" in MARBL_json_dict[category][node].keys(): if isinstance(MARBL_json_dict[category][node]["default_value"], dict): if 'PFT_defaults == "CESM2"' in MARBL_json_dict[category][node]["default_value"].keys(): default_values = MARBL_json_dict[category][node]["default_value"]['PFT_defaults == "CESM2"'] elif 'GCM == "CESM"' in MARBL_json_dict[category][node]["default_value"].keys(): default_values = MARBL_json_dict[category][node]["default_value"]['GCM == "CESM"'] else: default_values = MARBL_json_dict[category][node]["default_value"]["default"] else: default_values = MARBL_json_dict[category][node]["default_value"] if isinstance(default_values, list): values = [] for value in default_values: if type(value) == type (u''): values.append(value.encode('utf-8')) else: values.append(value) elif type(default_values) == type (u''): values = default_values.encode('utf-8') # exclude getting CAM and POP default value - it is included in the description text elif comp not in _exclude_defaults_comps: for default in defaults: for node in default.get_children(name=name): if default.attrib(node): values += " is %s for: %s <br/>" %(default.text(node), default.attrib(node)) else: values += " %s <br/>" %(default.text(node)) # create the node dictionary node_dict = { 'name' : name, 'desc' : desc, 'entry_type' : entry_type, 'valid_values' : valid_values, 'default_values' : values, 'group_name' : group_name } # append this node_dict to the group_list group_list.append(node_dict) if category == "MARBL_derived_types": real_category = derived_category[node] # update the group_list for this category in the html_dict if category == "MARBL_derived_types": category_group = real_category else: category_group = category html_dict[category_group] = group_list # load up jinja template templateLoader = jinja2.FileSystemLoader( searchpath='{0}/templates'.format(work_dir) ) templateEnv = jinja2.Environment( loader=templateLoader ) # populate the template variables tmplFile = 'nmldef2html.tmpl' template = templateEnv.get_template( tmplFile ) templateVars = { 'html_dict' : html_dict, 'today' : _now, 'cesm_version' : cesm_version, 'comp' : comp, 'comptag' : comptag, 'compversion' : compversion, 'hilight' : hilight, 'closehilight' : closehilight } # render the template nml_tmpl = template.render( templateVars ) # write the output file with open( options.htmlfile[0], 'w') as html: html.write(nml_tmpl) return 0
def __init__(self, machobj, infile=None, compiler=None, mpilib=None, files=None, version=None): """ initialize an object """ if infile is None: if files is None: files = Files() infile = files.get_value("COMPILERS_SPEC_FILE") schema = files.get_schema("COMPILERS_SPEC_FILE") GenericXML.__init__(self, infile, schema) self._machobj = machobj if version is not None: # this is used in scripts_regression_tests to force version 2, it should not be used otherwise self._version = version else: self._version = self.get_version() self.machine = machobj.get_machine_name() self.os = machobj.get_value("OS") if compiler is None: compiler = machobj.get_default_compiler() self.compiler = compiler if mpilib is None: if compiler is None: mpilib = machobj.get_default_MPIlib() else: mpilib = machobj.get_default_MPIlib( attributes={'compiler': compiler}) self.mpilib = mpilib self.compiler_nodes = None # Listed from last to first #Append the contents of $HOME/.cime/config_compilers.xml if it exists #This could cause problems if node matchs are repeated when only one is expected infile = os.path.join(os.environ.get("HOME"), ".cime", "config_compilers.xml") if os.path.exists(infile): GenericXML.read(self, infile) if self.compiler is not None: self.set_compiler(compiler) if self._version > 1.0: schema_db = GenericXML(infile=schema) compiler_vars = schema_db.get_child( "{http://www.w3.org/2001/XMLSchema}group", attributes={"name": "compilerVars"}) choice = schema_db.get_child( name="{http://www.w3.org/2001/XMLSchema}choice", root=compiler_vars) self.flag_vars = set( schema_db.get(elem, "name") for elem in schema_db.get_children( root=choice, attributes={"type": "flagsVar"}))
def _write_macros_file_v2(self, build_system, output, xml=None): """Write a Macros file for this machine. Arguments: build_system - Format of the file to be written. Currently the only valid values are "Makefile" and "CMake". output - Text I/O object (inheriting from io.TextIOBase) that output should be written to. Typically, this will be the Macros file, opened for writing. """ # Set up writer for this build system. if build_system == "Makefile": writer = MakeMacroWriter(output) elif build_system == "CMake": writer = CMakeMacroWriter(output) else: expect( False, "Unrecognized build system provided to write_macros: " + build_system) # Start processing the file. value_lists = dict() node_list = [] if xml is None: node_list = self.get_children(name="compiler") else: gen_xml = GenericXML() gen_xml.read_fd(xml) node_list = gen_xml.get_children(name="compiler") for compiler_elem in node_list: block = CompilerBlock(writer, compiler_elem, self._machobj, self) # If this block matches machine settings, use it. if block.matches_machine(): block.add_settings_to_lists(self.flag_vars, value_lists) # Now that we've scanned through the input, output the variable # settings. vars_written = set() while value_lists: # Variables that are ready to be written. ready_variables = [ var_name for var_name in value_lists if value_lists[var_name].depends <= vars_written ] expect( len(ready_variables) > 0, "The file {} has bad <var> references. " "Check for circular references or variables that " "are in a <var> tag but not actually defined.".format( self.filename)) big_normal_tree = None big_append_tree = None for var_name in ready_variables: # Note that we're writing this variable. vars_written.add(var_name) # Make the conditional trees and write them out. normal_tree, append_tree = \ value_lists[var_name].to_cond_trees() big_normal_tree = merge_optional_trees(normal_tree, big_normal_tree) big_append_tree = merge_optional_trees(append_tree, big_append_tree) # Remove this variable from the list of variables to handle # next iteration. del value_lists[var_name] if big_normal_tree is not None: big_normal_tree.write_out(writer) if big_append_tree is not None: big_append_tree.write_out(writer)
def __init__( self, machobj, infile=None, compiler=None, mpilib=None, files=None, version=None, extra_machines_dir=None, ): """ initialize an object If extra_machines_dir is provided, it should be a string giving a path to an additional directory that will be searched for a config_compilers.xml file; if found, the contents of this file will be appended to the standard config_compilers.xml. An empty string is treated the same as None. """ expect( "CIME_NO_CMAKE_MACRO" in os.environ, "Should not be using config_compilers.xml without CIME_NO_CMAKE_MACRO", ) if infile is None: if files is None: files = Files() infile = files.get_value("COMPILERS_SPEC_FILE") schema = files.get_schema("COMPILERS_SPEC_FILE") GenericXML.__init__(self, infile, schema) if version is not None: # this is used in scripts_regression_tests to force version 2, it should not be used otherwise self._version = version else: self._version = self.get_version() self._machobj = machobj self.machine = machobj.get_machine_name() self.os = machobj.get_value("OS") if compiler is None: compiler = machobj.get_default_compiler() self.compiler = compiler if mpilib is None: if compiler is None: mpilib = machobj.get_default_MPIlib() else: mpilib = machobj.get_default_MPIlib(attributes={"compiler": compiler}) self.mpilib = mpilib self.compiler_nodes = None # Listed from last to first # Append the contents of $HOME/.cime/config_compilers.xml if it exists. # # Also append the contents of a config_compilers.xml file in the directory given by # extra_machines_dir, if present. # # This could cause problems if node matches are repeated when only one is expected. infile = os.path.join(os.environ.get("HOME"), ".cime", "config_compilers.xml") if os.path.exists(infile): GenericXML.read(self, infile, schema=schema) if extra_machines_dir: infile = os.path.join(extra_machines_dir, "config_compilers.xml") if os.path.exists(infile): GenericXML.read(self, infile, schema=schema) if self.compiler is not None: self.set_compiler(compiler) if self._version > 1.0: schema_db = GenericXML(infile=schema) compiler_vars = schema_db.get_child( "{http://www.w3.org/2001/XMLSchema}group", attributes={"name": "compilerVars"}, ) choice = schema_db.get_child( name="{http://www.w3.org/2001/XMLSchema}choice", root=compiler_vars ) self.flag_vars = set( schema_db.get(elem, "name") for elem in schema_db.get_children( root=choice, attributes={"type": "flagsVar"} ) )