def parse_metadata_file(cls, filename): "Parse <filename> and return list of parsed metadata headers" # Read all lines of the file at once mheaders = list() with open(filename, 'r') as file: fin_lines = file.readlines() for index in range(len(fin_lines)): fin_lines[index] = fin_lines[index].rstrip('\n') # End for # End with # Look for a header start parse_obj = ParseObject(filename, fin_lines) curr_line, curr_line_num = parse_obj.curr_line() while curr_line is not None: if MetadataHeader.table_start(curr_line): if '[ccpp-table-properties]' in curr_line: mheaders.append( MetadataHeader(parse_obj, property_table=True)) else: mheaders.append(MetadataHeader(parse_obj)) curr_line, curr_line_num = parse_obj.curr_line() else: curr_line, curr_line_num = parse_obj.next_line() # End if # End while return mheaders
def add_constituent_vars(cap, host_model, suite_list, logger): ############################################################################### """Create a DDT library containing array reference variables for each constituent field for all suites in <suite_list>. Create and return a dictionary containing an index variable for each of the constituents as well as the variables from the DDT object. Also, write declarations for these variables to <cap>. Since the constituents are in a DDT (ccpp_constituent_properties_t), create a metadata table with the required information, then parse it to create the dictionary. """ # First create a MetadataTable for the constituents DDT stdname_layer = "ccpp_constituents_num_layer_consts" stdname_interface = "ccpp_constituents_num_interface_consts" stdname_2d = "ccpp_constituents_num_2d_consts" horiz_dim = "horizontal_dimension" vert_layer_dim = "vertical_layer_dimension" vert_interface_dim = "vertical_interface_dimension" array_layer = "vars_layer" array_interface = "vars_interface" array_2d = "vars_2d" # Table preamble (leave off ccpp-table-properties header) ddt_mdata = [ #"[ccpp-table-properties]", " name = {}".format(CONST_DDT_NAME), " type = ddt", "[ccpp-arg-table]", " name = {}".format(CONST_DDT_NAME), " type = ddt", "[ num_layer_vars ]", " standard_name = {}".format(stdname_layer), " units = count", " dimensions = ()", " type = integer", "[ num_interface_vars ]", " standard_name = {}".format(stdname_interface), " units = count", " dimensions = ()", " type = integer", "[ num_2d_vars ]", " standard_name = {}".format(stdname_2d), " units = count", " dimensions = ()", " type = integer", "[ {} ]".format(array_layer), " standard_name = ccpp_constituents_array_of_layer_consts", " units = none", " dimensions = ({}, {}, {})".format(horiz_dim, vert_layer_dim, stdname_layer), " type = real", " kind = kind_phys", "[ {} ]".format(array_interface), " standard_name = ccpp_constituents_array_of_interface_consts", " units = none", " dimensions = ({}, {}, {})".format(horiz_dim, vert_interface_dim, stdname_interface), " type = real", " kind = kind_phys", "[ {} ]".format(array_2d), " standard_name = ccpp_constituents_array_of_2d_consts", " units = none", " dimensions = ({}, {})".format(horiz_dim, stdname_2d), " type = real", " kind = kind_phys"] # Add entries for each constituent (once per standard name) const_stdnames = set() for suite in suite_list: if logger is not None: lmsg = "Adding constituents from {} to {}" logger.debug(lmsg.format(suite.name, host_model.name)) # end if scdict = suite.constituent_dictionary() for cvar in scdict.variable_list(): std_name = cvar.get_prop_value('standard_name') if std_name not in const_stdnames: # Add a metadata entry for this constituent # Check dimensions and figure vertical dimension # Currently, we only support variables with first dimension, # horizontal_dimension, and second (optional) dimension, # vertical_layer_dimension or vertical_interface_dimension dims = cvar.get_dimensions() if (len(dims) < 1) or (len(dims) > 2): emsg = "Unsupported constituent dimensions, '{}'" dimstr = "({})".format(", ".join(dims)) raise CCPPError(emsg.format(dimstr)) # end if hdim = dims[0].split(':')[-1] if hdim != 'horizontal_dimension': emsg = "Unsupported first constituent dimension, '{}', " emsg += "must be 'horizontal_dimension'" raise CCPPError(emsg.format(hdim)) # end if if len(dims) > 1: vdim = dims[1].split(':')[-1] if vdim == vert_layer_dim: cvar_array_name = array_layer elif vdim == vert_interface_dim: cvar_array_name = array_interface else: emsg = "Unsupported vertical constituent dimension, " emsg += "'{}', must be '{}' or '{}'" raise CCPPError(emsg.format(vdim, vert_layer_dim, vert_interface_dim)) # end if else: cvar_array_name = array_2d # end if # First, create an index variable for <cvar> ind_std_name = "index_of_{}".format(std_name) loc_name = "{}(:,:,{})".format(cvar_array_name, ind_std_name) ddt_mdata.append("[ {} ]".format(loc_name)) ddt_mdata.append(" standard_name = {}".format(std_name)) units = cvar.get_prop_value('units') ddt_mdata.append(" units = {}".format(units)) dimstr = "({})".format(", ".join(dims)) ddt_mdata.append(" dimensions = {}".format(dimstr)) vtype = cvar.get_prop_value('type') vkind = cvar.get_prop_value('kind') ddt_mdata.append(" type = {} | kind = {}".format(vtype, vkind)) const_stdnames.add(std_name) # end if # end for # end for # Parse this table using a fake filename parse_obj = ParseObject("{}_constituent_mod.meta".format(host_model.name), ddt_mdata) ddt_table = MetadataTable(parse_object=parse_obj, logger=logger) ddt_name = ddt_table.sections()[0].title ddt_lib = DDTLibrary('{}_constituent_ddtlib'.format(host_model.name), ddts=ddt_table.sections(), logger=logger) # A bit of cleanup del parse_obj del ddt_mdata # Now, create the "host constituent module" dictionary const_dict = VarDictionary("{}_constituents".format(host_model.name), parent_dict=host_model) # Add in the constituents object prop_dict = {'standard_name' : "ccpp_model_constituents_object", 'local_name' : constituent_model_object_name(host_model), 'dimensions' : '()', 'units' : "None", 'ddt_type' : ddt_name} const_var = Var(prop_dict, _API_SOURCE) const_var.write_def(cap, 1, const_dict) ddt_lib.collect_ddt_fields(const_dict, const_var) # Declare variable for the constituent standard names array max_csname = max([len(x) for x in const_stdnames]) if const_stdnames else 0 num_const_fields = len(const_stdnames) cs_stdname = constituent_model_const_stdnames(host_model) const_list = sorted(const_stdnames) if const_list: const_strs = ['"{}{}"'.format(x, ' '*(max_csname - len(x))) for x in const_list] cs_stdame_initstr = " = (/ " + ", ".join(const_strs) + " /)" else: cs_stdame_initstr = "" # end if cap.write("character(len={}) :: {}({}){}".format(max_csname, cs_stdname, num_const_fields, cs_stdame_initstr), 1) # Declare variable for the constituent standard names array array_name = constituent_model_const_indices(host_model) cap.write("integer :: {}({}) = -1".format(array_name, num_const_fields), 1) # Add individual variables for each index var to the const_dict for index, std_name in enumerate(const_list): ind_std_name = "index_of_{}".format(std_name) ind_loc_name = "{}({})".format(array_name, index + 1) prop_dict = {'standard_name' : ind_std_name, 'local_name' : ind_loc_name, 'dimensions' : '()', 'units' : 'index', 'protected' : "True", 'type' : 'integer', 'kind' : ''} ind_var = Var(prop_dict, _API_SOURCE) const_dict.add_variable(ind_var) # end for # Add vertical dimensions for DDT call strings pver = host_model.find_variable(standard_name=vert_layer_dim, any_scope=False) if pver is not None: prop_dict = {'standard_name' : vert_layer_dim, 'local_name' : pver.get_prop_value('local_name'), 'units' : 'count', 'type' : 'integer', 'protected' : 'True', 'dimensions' : '()'} if const_dict.find_variable(standard_name=vert_layer_dim, any_scope=False) is None: ind_var = Var(prop_dict, _API_SOURCE) const_dict.add_variable(ind_var) # end if # end if pver = host_model.find_variable(standard_name=vert_interface_dim, any_scope=False) if pver is not None: prop_dict = {'standard_name' : vert_interface_dim, 'local_name' : pver.get_prop_value('local_name'), 'units' : 'count', 'type' : 'integer', 'protected' : 'True', 'dimensions' : '()'} if const_dict.find_variable(standard_name=vert_interface_dim, any_scope=False) is None: ind_var = Var(prop_dict, _API_SOURCE) const_dict.add_variable(ind_var) # end if # end if return const_dict
def read_file(filename, preproc_defs=None, logger=None): """Read a file into an array of lines. Preprocess lines to consolidate continuation lines. Remove preprocessor directives and code eliminated by #if statements Remvoved code results in blank lines, not removed lines """ preproc_status = PreprocStack() if not os.path.exists(filename): raise IOError("read_file: file, '{}', does not exist".format(filename)) else: # We need special rules for fixed-form source fixed_form = filename[-2:].lower() == '.f' # Read all lines of the file at once with open(filename, 'r') as file: file_lines = file.readlines() for index in xrange(len(file_lines)): file_lines[index] = file_lines[index].rstrip('\n').rstrip() # End for # End with # create a parse object and context for this file pobj = ParseObject(filename, file_lines) continue_col = -1 # Active continue column in_schar = False # Single quote character context in_dchar = False # Double quote character context prev_line = None prev_line_num = -1 curr_line, curr_line_num = pobj.curr_line() while curr_line is not None: # Skip empty lines and comment-only lines skip_line = False if len(curr_line.strip()) == 0: skip_line = True elif fixed_form and (fixed_comment_re.match(curr_line) is not None): skip_line = True elif curr_line.lstrip()[0] == '!': skip_line = True # End if if skip_line: curr_line, curr_line_num = pobj.next_line() continue # End if # Handle preproc issues if preproc_status.process_line(curr_line, preproc_defs, pobj, logger): pobj.write_line(curr_line_num, "") curr_line, curr_line_num = pobj.next_line() continue # End if if not preproc_status.in_true_region(): # Special case to allow CCPP comment statements in False # regions to find DDT and module table code if (curr_line[0:2] != '!!') and (curr_line[0:2] != '!>'): pobj.write_line(curr_line_num, "") curr_line, curr_line_num = pobj.next_line() continue # End if # End if # scan the line for properties if fixed_form: res = scan_fixed_line(curr_line, in_schar, in_dchar, pobj) cont_in_col, in_schar, in_dchar, comment_col = res continue_col = cont_in_col # No warning in fixed form cont_out_col = -1 if (comment_col < 0) and (continue_col < 0): # Real statement, grab the line # in case is continued prev_line_num = curr_line_num prev_line = None # End if else: res = scan_free_line(curr_line, (continue_col >= 0), in_schar, in_dchar, pobj) cont_in_col, cont_out_col, in_schar, in_dchar, comment_col = res # End if # If in a continuation context, move this line to previous if continue_col >= 0: if fixed_form and (prev_line is None): prev_line = pobj.peek_line(prev_line_num)[0:72] # End if if prev_line is None: raise ParseInternalError("No prev_line to continue", context=pobj) # End if sindex = max(cont_in_col + 1, 0) if fixed_form: sindex = 6 eindex = 72 elif cont_out_col > 0: eindex = cont_out_col else: eindex = len(curr_line) # End if prev_line = prev_line + curr_line[sindex:eindex] if fixed_form: prev_line = prev_line.rstrip() # End if # Rewrite the file's lines pobj.write_line(prev_line_num, prev_line) pobj.write_line(curr_line_num, "") if (not fixed_form) and (cont_out_col < 0): # We are done with this line, reset prev_line prev_line = None prev_line_num = -1 # End if # End if continue_col = cont_out_col if (continue_col >= 0) and (prev_line is None): # We need to set up prev_line as it is continued prev_line = curr_line[0:continue_col] if not (in_schar or in_dchar): prev_line = prev_line.rstrip() # End if prev_line_num = curr_line_num # End if curr_line, curr_line_num = pobj.next_line() # End while return pobj