def __init__(self, name, ddts=None, logger=None): "Our dict is DDT definition headers, key is type" self._name = '{}_ddt_lib'.format(name) self._ddt_fields = {} # DDT field to DDT access map self._max_mod_name_len = 0 super(DDTLibrary, self).__init__() if ddts is None: ddts = list() elif not isinstance(ddts, list): ddts = [ddts] # End if # Add all the DDT headers, then process for ddt in ddts: if not isinstance(ddt, MetadataSection): errmsg = 'Invalid DDT metadata type, {}' raise ParseInternalError(errmsg.format(type(ddt))) # End if if not ddt.header_type == 'ddt': errmsg = 'Metadata table header is for a {}, should be DDT' raise ParseInternalError(errmsg.format(ddt.header_type)) # End if if ddt.title in self: errmsg = "Duplicate DDT, {}, found{}, original{}" ctx = context_string(ddt.source.context) octx = context_string(self[ddt.title].source.context) raise CCPPError(errmsg.format(ddt.title, ctx, octx)) # End if if logger is not None: lmsg = 'Adding DDT {} to {}' logger.debug(lmsg.format(ddt.title, self.name)) # End if self[ddt.title] = ddt dlen = len(ddt.module) if dlen > self._max_mod_name_len: self._max_mod_name_len = dlen
def __init__(self, typestr_in=None, kind_in=None, line_in=None, context=None): if context is None: self._context = ParseContext() else: self._context = ParseContext(context=context) # We have to distinguish which type of initialization we have if typestr_in is not None: if line_in is not None: raise ParseInternalError( "typestr_in and line_in cannot both be used in a single call", self._context) # End if self._typestr = typestr_in self.default_kind = kind_in is None if kind_in is None: self._kind = None elif kind_in[0] == '(': # Parse an explicit kind declaration self._kind = self.parse_kind_selector(kind_in) else: # The kind has already been parsed for us (e.g., by character) self._kind = kind_in elif kind_in is not None: raise ParseInternalError( "kind_in cannot be passed without typestr_in", self._context) elif line_in is not None: match = Ftype.type_match(line_in) self._match_len = len(match.group(0)) if match is None: raise ParseSyntaxError("type declaration", token=line_in, context=self._context) elif check_fortran_intrinsic(match.group(1)): self._typestr = match.group(1) if match.group(2) is not None: # Parse kind section self._kind = self.parse_kind_selector( match.group(2).strip()) else: self._kind = None # End if self.default_kind = self._kind is None else: raise ParseSyntaxError("type declaration", token=line_in, context=self._context) else: raise ParseInternalError( "At least one of typestr_in or line must be passed", self._context)
def parse_specification(pobj, statements, mod_name=None, prog_name=None, logger=None): "Parse specification part of a module or (sub)program" if (mod_name is not None) and (prog_name is not None): raise ParseInternalError( "<mod_name> and <prog_name> cannot both be used") elif mod_name is not None: spec_name = mod_name endmatch = endmodule_re inmod = True elif prog_name is not None: spec_name = prog_name endmatch = endprogram_re inmod = False else: raise ParseInternalError( "One of <mod_name> or <prog_name> must be used") # End if inspec = True mheaders = list() while inspec and (statements is not None): while len(statements) > 0: statement = statements.pop(0) # End program or module pmatch = endmatch.match(statement) asmatch = arg_table_start_re.match(statement) if pmatch is not None: # We never found a contains statement inspec = False break elif asmatch is not None: # Put table statement back to re-read statements.insert(0, statement) statements, new_hdrs = parse_preamble_data( statements, pobj, spec_name, endmatch, logger) for hdr in new_hdrs: mheaders.append(hdr) # End if inspec = pobj.in_region('MODULE', region_name=mod_name) break elif is_contains_statement(statement, inmod): inspec = False break # End if # End while if inspec and (len(statements) == 0): statements = read_statements(pobj) # End if # End while return statements, mheaders
def __init__(self, parse_object=None, title=None, type_in=None, module=None, var_dict=None, property_table=False, logger=None): self._pobj = parse_object """If <parse_object> is not None, initialize from the current file and location in <parse_object>. If <parse_object> is None, initialize from <title>, <type>, <module>, and <var_dict>. Note that if <parse_object> is not None, <title>, <type>, <module>, and <var_dict> are ignored. """ if parse_object is None: if title is None: raise ParseInternalError('MetadataHeader requires a title') else: self._table_title = title # End if if type_in is None: raise ParseInternalError( 'MetadataHeader requires a header type') else: self._header_type = type # End if if module is None: raise ParseInternalError( 'MetadataHeader requires a module name') else: self._module_name = module # End if # Initialize our ParseSource parent super(MetadataHeader, self).__init__(self.title, self.header_type, self._pobj) self._variables = VarDictionary(self.title, logger=logger) for var in var_dict.variable_list(): # Let this crash if no dict self._variables.add_variable(var) # End for else: self.__init_from_file__(parse_object, property_table, logger) # End if # Categorize the variables self._var_intents = {'in': list(), 'out': list(), 'inout': list()} for var in self.variable_list(): intent = var.get_prop_value('intent') if intent is not None: self._var_intents[intent].append(var)
def __init__(self, name, variables=None, parent_dict=None, logger=None): "Unlike dict, VarDictionary only takes a Var or Var list" super(VarDictionary, self).__init__() self._name = name self._logger = logger self._parent_dict = parent_dict if parent_dict is not None: parent_dict.add_sub_scope(self) # End if self._sub_dicts = list() if isinstance(variables, Var): self.add_variable(variables) elif isinstance(variables, list): for var in variables: self.add_variable(var) # End for elif isinstance(variables, VarDictionary): for stdname in variables.keys(): self[stdname] = variables[stdname] # End for elif isinstance(variables, dict): # variables will not be in 'order', but we accept them anyway for stdname in variables.keys(): self[stdname] = variables[stdname] # End for elif variables is not None: raise ParseInternalError('Illegal type for variables, {} in {}'.format(type(variables), self.name))
def _write_index_check(self, outfile, indent, suite_name, err_vars, use_errcode): """Write a check to <outfile> to make sure the "index" input is in bounds. Write code to set error variables if index is out of bounds.""" if use_errcode: errcode, errmsg = self.__errcode_names(err_vars) if self: outfile.write("if (index < 1) then", indent + 1) outfile.write("{} = 1".format(errcode), indent + 2) stmt = "write({}, '(a,i0,a)') 'ERROR: index (',index,') " stmt += "too small, must be >= 1'" outfile.write(stmt.format(errmsg), indent + 2) stmt = "else if (index > SIZE({})) then" outfile.write(stmt.format(self.constituent_prop_array_name()), indent + 1) outfile.write("{} = 1".format(errcode), indent + 2) stmt = "write({}, '(2(a,i0))') 'ERROR: index (',index,') " stmt += "too large, must be <= ', SIZE({})" outfile.write( stmt.format(errmsg, self.constituent_prop_array_name()), indent + 2) outfile.write("end if", indent + 1) else: outfile.write("{} = 1".format(errcode), indent + 1) stmt = "write({}, '(a,i0,a)') 'ERROR: {}, " stmt += "has no constituents'" outfile.write(stmt.format(errmsg, self.name), indent + 1) # end if else: raise ParseInternalError("Alternative to errcode not implemented")
def write(self, outfile, indent_level, var_dict): """Write this object's code block to <outfile> using <indent_level> as a basic offset. Format each line using the variables from <var_dict>. It is an error for <var_dict> to not contain any variable indicated in the code block.""" for line in self.__code_block: stmt = line[0] if indent_level >= 0: indent = indent_level + line[1] else: indent = 0 # end if # Check that <var_dict> contains all required items errmsg = '' sep = '' for var in self.__write_vars: if var not in var_dict: errmsg += "'{}' missing from <var_dict>".format(sep, var) sep = '\n' # end if # end for if errmsg: raise ParseInternalError(errmsg) # end if outfile.write(stmt.format(**var_dict), indent)
def constituent_module_name(self): """Return the name of host model constituent module""" if not ((self.parent is not None) and hasattr(self.parent.parent, "constituent_module")): emsg = "ConstituentVarDict parent not HostModel?" emsg += "\nparent is '{}'".format(type(self.parent.parent)) raise ParseInternalError(emsg) # end if return self.parent.parent.constituent_module
def __errcode_names(cls, err_vars): """Return the (<errcode> <errmsg>) where <errcode> is the local name for ccpp_error_code in <err_vars> and <errmsg> is the local name for ccpp_error_message in <err_vars>. if either variable is not found in <err_vars>, return None.""" errcode = None errmsg = None for evar in err_vars: stdname = evar.get_prop_value('standard_name') if stdname == 'ccpp_error_code': errcode = evar.get_prop_value('local_name') elif stdname == 'ccpp_error_message': errmsg = evar.get_prop_value('local_name') else: emsg = "Bad errcode variable, '{}'" raise ParseInternalError(emsg.format(stdname)) # end if # end for if (not errcode) or (not errmsg): raise ParseInternalError("Unsupported error scheme") # end if return errcode, errmsg
def get_var(self, standard_name=None, intent=None): if standard_name is not None: var = self._variables.find_variable(standard_name) return var elif intent is not None: if intent not in self._var_intents: raise ParseInternalError( "Illegal intent type, '{}', in {}".format( intent, self.title), context=self._pobj) # End if return self._var_intents[intent] else: return None
def __init__(self, code_list): """Initialize object with a list of statements. Capture and store all variables required for output. Each statement is a tuple consisting of a string and an indent level. Non-negative indents will be added to a current indent at write time while negative indents are written with no indentation. """ self.__code_block = code_list self.__write_vars = list() for line in self.__code_block: if len(line) != 2: raise ParseInternalError(CodeBlock.__fmt_msg.format(code_list)) # end if stmt = line[0] if not isinstance(stmt, str): raise ParseInternalError(CodeBlock.__fmt_msg.format(code_list)) # end if if not isinstance(line[1], int): raise ParseInternalError(CodeBlock.__fmt_msg.format(code_list)) # end if beg = 0 end = len(stmt) while beg < end: # Ignore double curly braces open_double_curly = stmt.find('{{', beg) close_double_curly = stmt.find('}}', max(open_double_curly, beg)) if 0 <= open_double_curly < close_double_curly: beg = close_double_curly + 2 else: match = CodeBlock.__var_re.search(stmt[beg:]) if match: self.__write_vars.append(match.group(1)) beg = stmt.index('}', beg) + 1 else: beg = end + 1
def create_file_list(files, suffices, file_type, logger, root_path=None): ############################################################################### """Create and return a master list of files from <files>. <files> is either a comma-separated string of pathnames or a list. If a pathname is a directory, all files with extensions in <suffices> are included. Wildcards in a pathname are expanded. <suffices> is a list of allowed file types. Filenames in <files> with an allowed suffix will be added to the master list. Filenames with a '.txt' suffix will be parsed to look for allowed filenames. <file_type> is a description of the allowed file types. <logger> is a logger used to print warnings (unrecognized filename types) and debug messages. If <root_path> is not None, it is used to create absolute paths for <files>, otherwise, the current working directory is used. """ master_list = list() txt_files = list() # Already processed txt files pathname = None if isinstance(files, str): file_list = [x.strip() for x in files.split(',')] elif isinstance(files, (list, tuple)): file_list = files else: raise ParseInternalError("Bad input, <files> = {}".format(files)) # end if if root_path is None: root_path = os.getcwd() # end if master_list, errors = _create_file_list_int(file_list, suffices, file_type, logger, txt_files, pathname, root_path, master_list) if errors: emsg = 'Error processing list of {} files:\n {}' raise CCPPError(emsg.format(file_type, '\n '.join(errors))) # end if return master_list
def _write_init_check(self, outfile, indent, suite_name, errvar_names, use_errflg): """Write a check to <outfile> to make sure the constituent properties are initialized. Write code to initialize the error variables and/or set them to error values.""" outfile.write('', 0) if use_errflg: outfile.write("errflg = 0", indent + 1) outfile.write("errmsg = ''", indent + 1) else: raise ParseInternalError("Alternative to errflg not implemented") # end if outfile.write("! Make sure that our constituent array is initialized", indent + 1) stmt = "if (.not. {}) then" outfile.write(stmt.format(self.constituent_prop_init_name()), indent + 1) if use_errflg: outfile.write("errflg = 1", indent + 2) stmt = 'errmsg = "constituent properties not ' stmt += 'initialized for suite, {}"' outfile.write(stmt.format(suite_name), indent + 2) outfile.write("end if", indent + 1)
def collect_ddt_fields(self, var_dict, var, run_env, ddt=None): """Add all the reachable fields from DDT variable <var> of type, <ddt> to <var_dict>. Each field is added as a VarDDT. """ if ddt is None: vtype = var.get_prop_value('type') if vtype in self: ddt = self[vtype] else: lname = var.get_prop_value('local_name') ctx = context_string(var.context) errmsg = "Variable, {}, is not a known DDT{}" raise ParseInternalError(errmsg.format(lname, ctx)) # End if # End if for dvar in ddt.variable_list(): subvar = VarDDT(dvar, var, self.run_env) dvtype = dvar.get_prop_value('type') if (dvar.is_ddt()) and (dvtype in self): # If DDT in our library, we need to add sub-fields recursively. subddt = self[dvtype] self.collect_ddt_fields(var_dict, subvar, run_env, ddt=subddt) else: # add_variable only checks the current dictionary. For a # DDT, the variable also cannot be in our parent dictionaries. stdname = dvar.get_prop_value('standard_name') pvar = var_dict.find_variable(standard_name=stdname, any_scope=True) if pvar: emsg = "Attempt to add duplicate DDT sub-variable, {}{}." emsg += "\nVariable originally defined{}" ntx = context_string(dvar.context) ctx = context_string(pvar.context) raise CCPPError(emsg.format(stdname, ntx, ctx)) # end if # Add this intrinsic to <var_dict> var_dict.add_variable(subvar, run_env)
def get_prop_value(self, name, index=0): "Return the indicated property value, defauling to the top-level DDT" if abs(index) >= self._vlen: raise ParseInternalError("VarDDT.get_prop_value index ({}) out of range".format(index)) # End if return self._var_ref_list[index].get_prop_value(name)
def _create_file_list_int(files, suffices, file_type, logger, txt_files, pathname, root_path, master_list): ############################################################################### """Create and return a master list of files from <files>. <files> is a list of pathnames which may include wildcards. <suffices> is a list of allowed file types. Filenames in <files> with an allowed suffix will be added to the master list. Filenames with a '.txt' suffix will be parsed to look for allowed filenames. <file_type> is a description of the allowed file types. <logger> is a logger used to print warnings (unrecognized filename types) and debug messages. <txt_files> is a list of previously-encountered text files (to prevent infinite recursion). <pathname> is the text file name from which <files> was read (if any). <master_list> is the list of files which have already been collected A list of error strings is also returned """ errors = list() if pathname: pdesc = '{} pathname file, found in {}'.format(file_type, pathname) else: pdesc = '{} pathnames file'.format(file_type) # end if if not isinstance(files, list): raise ParseInternalError("'{}' is not a list".format(files)) # end if for filename in files: # suff is filename's extension suff = os.path.splitext(filename)[1] if suff: suff = suff[1:] # end if if not os.path.isabs(filename): filename = os.path.normpath(os.path.join(root_path, filename)) # end if if os.path.isdir(filename): for suff_type in suffices: file_type = os.path.join(filename, '*.{}'.format(suff_type)) errs = add_unique_files(file_type, pdesc, master_list, logger) errors.extend(errs) # end for elif suff in suffices: errs = add_unique_files(filename, pdesc, master_list, logger) errors.extend(errs) elif suff == 'txt': tfiles = glob.glob(filename) if tfiles: for file in tfiles: if file in txt_files: lmsg = "WARNING: Ignoring duplicate '.txt' file, {}" logger.warning(lmsg.format(filename)) else: lmsg = 'Reading .{} filenames from {}' logger.debug(lmsg.format(', .'.join(suffices), file)) flist, errs = read_pathnames_from_file(file, file_type) errors.extend(errs) txt_files.append(file) root = os.path.dirname(file) _, errs = _create_file_list_int(flist, suffices, file_type, logger, txt_files, file, root, master_list) errors.extend(errs) # end if # end for else: emsg = "{} pathnames file, '{}', does not exist" errors.append(emsg.format(file_type, filename)) # end if else: lmsg = 'WARNING: Not reading {}, only reading .{} or .txt files' logger.warning(lmsg.format(filename, ', .'.join(suffices))) # end if # end for return master_list, errors
def find_variable(self, standard_name=None, source_var=None, any_scope=True, clone=None, search_call_list=False, loop_subst=False): """Attempt to return the variable matching <standard_name>. if <standard_name> is None, the standard name from <source_var> is used. It is an error to pass both <standard_name> and <source_var> if the standard name of <source_var> is not the same as <standard_name>. If <any_scope> is True, search parent scopes if not in current scope. Note: Unlike the <VarDictionary> version of this method, the case for CCPP_CONSTANT_VARS is not handled -- it should have been handled by a lower level. If the variable is not found but is a constituent variable type, create the variable in this dictionary Note that although the <clone> argument is accepted for consistency, cloning is not handled at this level. If the variable is not found and <source_var> is not a constituent variable, return None. """ if standard_name is None: if source_var is None: emsg = "One of <standard_name> or <source_var> must be passed." raise ParseInternalError(emsg) # end if standard_name = source_var.get_prop_value('standard_name') elif source_var is not None: stest = source_var.get_prop_value('standard_name') if stest != standard_name: emsg = ("Only one of <standard_name> or <source_var> may " + "be passed.") raise ParseInternalError(emsg) # end if # end if if standard_name in self: var = self[standard_name] elif any_scope and (self.parent is not None): srch_clist = search_call_list var = self.parent.find_variable(standard_name=standard_name, source_var=source_var, any_scope=any_scope, clone=None, search_call_list=srch_clist, loop_subst=loop_subst) else: var = None # end if if (var is None) and source_var and source_var.is_constituent(): # If we did not find the variable and it is a constituent type, # add a clone of <source_var> to our dictionary. # First, maybe do a loop substitution dims = source_var.get_dimensions() newdims = list() for dim in dims: dstdnames = dim.split(':') new_dnames = list() for dstdname in dstdnames: if dstdname == 'horizontal_loop_extent': new_dnames.append('horizontal_dimension') elif dstdname == 'horizontal_loop_end': new_dnames.append('horizontal_dimension') elif dstdname == 'horizontal_loop_begin': new_dnames.append('ccpp_constant_one') else: new_dnames.append(dstdname) # end if # end for newdims.append(':'.join(new_dnames)) # end for var = source_var.clone({'dimensions': newdims}, remove_intent=True, source_type=self.__constituent_type) self.add_variable(var, self.__run_env) return var
def write_host_routines(cap, host, reg_funcname, num_const_funcname, copy_in_funcname, copy_out_funcname, const_obj_name, const_names_name, const_indices_name, suite_list, err_vars): """Write out the host model <reg_funcname> routine which will instantiate constituent fields for all the constituents in <suite_list>. <err_vars> is a list of the host model's error variables. Also write out the following routines: <num_const_funcname>: Number of constituents <copy_in_funcname>: Collect constituent fields for host <copy_out_funcname>: Update constituent fields from host Output is written to <cap>. """ # XXgoldyXX: v need to generalize host model error var type support use_errcode = [ x.get_prop_value('standard_name') in ('ccpp_error_code' 'ccpp_error_message') for x in err_vars ] if not use_errcode: emsg = "Error object not supported for {}" raise ParseInternalError(emsg(host.name)) # end if herrcode, herrmsg = ConstituentVarDict.__errcode_names(err_vars) err_dummy_str = "{errcode}, {errmsg}".format(errcode=herrcode, errmsg=herrmsg) obj_err_callstr = "errcode={errcode}, errmsg={errmsg}" obj_err_callstr = obj_err_callstr.format(errcode=herrcode, errmsg=herrmsg) # XXgoldyXX: ^ need to generalize host model error var type support # First up, the registration routine substmt = "subroutine {}".format(reg_funcname) stmt = "{}(suite_list, ncols, num_layers, num_interfaces, {})" cap.write(stmt.format(substmt, err_dummy_str), 1) cap.write("! Create constituent object for suites in <suite_list>", 2) cap.write("", 0) ConstituentVarDict.write_constituent_use_statements(cap, suite_list, 2) cap.write("", 0) cap.write("! Dummy arguments", 2) cap.write("character(len=*), intent(in) :: suite_list(:)", 2) cap.write("integer, intent(in) :: ncols", 2) cap.write("integer, intent(in) :: num_layers", 2) cap.write("integer, intent(in) :: num_interfaces", 2) for evar in err_vars: evar.write_def(cap, 2, host, dummy=True, add_intent="out") # end for cap.write("! Local variables", 2) spc = ' ' * 37 cap.write("integer{} :: num_suite_consts".format(spc), 2) cap.write("integer{} :: num_consts".format(spc), 2) cap.write("integer{} :: index".format(spc), 2) cap.write("integer{} :: field_ind".format(spc), 2) cap.write("type({}), pointer :: const_prop".format(CONST_PROP_TYPE), 2) cap.write("", 0) cap.write("{} = 0".format(herrcode), 2) cap.write("num_consts = 0", 2) for suite in suite_list: const_dict = suite.constituent_dictionary() funcname = const_dict.num_consts_funcname() cap.write("! Number of suite constants for {}".format(suite.name), 2) errvar_str = ConstituentVarDict.__errcode_callstr( herrcode, herrmsg, suite) cap.write("num_suite_consts = {}({})".format(funcname, errvar_str), 2) cap.write("num_consts = num_consts + num_suite_consts", 2) # end for cap.write("if ({} == 0) then".format(herrcode), 2) cap.write("! Initialize constituent data and field object", 3) stmt = "call {}%initialize_table(num_consts)" cap.write(stmt.format(const_obj_name), 3) cap.write("end if", 2) for suite in suite_list: errvar_str = ConstituentVarDict.__errcode_callstr( herrcode, herrmsg, suite) cap.write("if ({} == 0) then".format(herrcode), 2) cap.write("! Add {} constituent metadata".format(suite.name), 3) const_dict = suite.constituent_dictionary() funcname = const_dict.num_consts_funcname() cap.write("num_suite_consts = {}({})".format(funcname, errvar_str), 3) cap.write("end if", 2) funcname = const_dict.copy_const_subname() cap.write("do index = 1, num_suite_consts", 2) cap.write("allocate(const_prop, stat={})".format(herrcode), 3) cap.write("if ({} /= 0) then".format(herrcode), 3) cap.write('{} = "ERROR allocating const_prop"'.format(herrmsg), 4) cap.write("end if", 3) cap.write("if ({} == 0) then".format(herrcode), 3) stmt = "call {}(index, const_prop, {})" cap.write(stmt.format(funcname, errvar_str), 4) cap.write("end if", 3) cap.write("if ({} == 0) then".format(herrcode), 3) stmt = "call {}%new_field(const_prop, {})" cap.write(stmt.format(const_obj_name, obj_err_callstr), 4) cap.write("end if", 3) cap.write("nullify(const_prop)", 3) cap.write("if ({} /= 0) then".format(herrcode), 3) cap.write("exit", 4) cap.write("end if", 3) cap.write("end do", 2) cap.write("", 0) # end for cap.write("if ({} == 0) then".format(herrcode), 2) stmt = "call {}%lock_table(ncols, num_layers, num_interfaces, {})" cap.write(stmt.format(const_obj_name, obj_err_callstr), 3) cap.write("end if", 2) cap.write("! Set the index for each active constituent", 2) cap.write("do index = 1, SIZE({})".format(const_indices_name), 2) stmt = "field_ind = {}%field_index({}(index), {})" cap.write( stmt.format(const_obj_name, const_names_name, obj_err_callstr), 3) cap.write("if (field_ind > 0) then", 3) cap.write("{}(index) = field_ind".format(const_indices_name), 4) cap.write("else", 3) cap.write("{} = 1".format(herrcode), 4) stmt = "{} = 'No field index for '//trim({}(index))" cap.write(stmt.format(herrmsg, const_names_name), 4) cap.write("end if", 3) cap.write("if ({} /= 0) then".format(herrcode), 3) cap.write("exit", 4) cap.write("end if", 3) cap.write("end do", 2) cap.write("end {}".format(substmt), 1) # Next, write num_consts routine substmt = "function {}".format(num_const_funcname) cap.write("", 0) cap.write("integer {}({})".format(substmt, err_dummy_str), 1) cap.write("! Return the number of constituent fields for this run", 2) cap.write("", 0) cap.write("! Dummy arguments", 2) for evar in err_vars: evar.write_def(cap, 2, host, dummy=True, add_intent="out") # end for cap.write("", 0) cap.write( "{} = {}%num_constituents({})".format(num_const_funcname, const_obj_name, obj_err_callstr), 2) cap.write("end {}".format(substmt), 1) # Next, write copy_in routine substmt = "subroutine {}".format(copy_in_funcname) cap.write("", 0) cap.write("{}(const_array, {})".format(substmt, err_dummy_str), 1) cap.write("! Copy constituent field info into <const_array>", 2) cap.write("", 0) cap.write("! Dummy arguments", 2) cap.write("real(kind_phys), intent(out) :: const_array(:,:,:)", 2) for evar in err_vars: evar.write_def(cap, 2, host, dummy=True, add_intent="out") # end for cap.write("", 0) cap.write( "call {}%copy_in(const_array, {})".format(const_obj_name, obj_err_callstr), 2) cap.write("end {}".format(substmt), 1) # Next, write copy_out routine substmt = "subroutine {}".format(copy_out_funcname) cap.write("", 0) cap.write("{}(const_array, {})".format(substmt, err_dummy_str), 1) cap.write("! Update constituent field info from <const_array>", 2) cap.write("", 0) cap.write("! Dummy arguments", 2) cap.write("real(kind_phys), intent(in) :: const_array(:,:,:)", 2) for evar in err_vars: evar.write_def(cap, 2, host, dummy=True, add_intent="out") # end for cap.write("", 0) cap.write( "call {}%copy_out(const_array, {})".format(const_obj_name, obj_err_callstr), 2) cap.write("end {}".format(substmt), 1)
def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger): "Parse dummy argument information from a subroutine" psrc = None mheader = None var_dict = None scheme_name = None # Find the subroutine line, should be first executable statement inpreamble = False insub = True if logger is not None: ctx = context_string(pobj, nodir=True) msg = "Parsing specification of {}{}" logger.debug(msg.format(table_name, ctx)) # End if ctx = context_string(pobj) # Save initial context with directory vdict = None # Initialized when we parse the subroutine arguments while insub and (statements is not None): while statements: statement = statements.pop(0) smatch = _SUBROUTINE_RE.match(statement) esmatch = _END_SUBROUTINE_RE.match(statement) pmatch = _ENDMODULE_RE.match(statement) asmatch = _ARG_TABLE_START_RE.match(statement) if asmatch is not None: # We have run off the end of something, hope that is okay # Put this statement back for the caller to deal with statements.insert(0, statement) insub = False break # End if if pmatch is not None: # We have run off the end of the module, hope that is okay pobj.leave_region('MODULE', region_name=spec_name) insub = False break # End if if smatch is not None: scheme_name = smatch.group(1) inpreamble = scheme_name.lower() == table_name.lower() if inpreamble: if smatch.group(2) is not None: smstr = smatch.group(2).strip() if len(smstr) > 0: smlist = smstr.strip().split(',') else: smlist = list() # End if scheme_args = [x.strip().lower() for x in smlist] else: scheme_args = list() # End if # Create a dict template with all the scheme's arguments # in the correct order vdict = OrderedDict() for arg in scheme_args: if len(arg) == 0: errmsg = 'Empty argument{}' raise ParseInternalError(errmsg.format(pobj)) # End if if arg in vdict: errmsg = 'Duplicate dummy argument, {}' raise ParseSyntaxError(errmsg.format(arg), context=pobj) # End if vdict[arg] = None # End for psrc = ParseSource(scheme_name, 'scheme', pobj) # End if elif inpreamble: # Process a preamble statement (use or argument declaration) if esmatch is not None: inpreamble = False insub = False elif ((not is_comment_statement(statement)) and (not parse_use_statement(statement, logger)) and is_dummy_argument_statement(statement)): dvars = parse_fortran_var_decl(statement, psrc, logger=logger) for var in dvars: lname = var.get_prop_value('local_name').lower() if lname in vdict: if vdict[lname] is not None: emsg = "Error: duplicate dummy argument, {}" raise ParseSyntaxError(emsg.format(lname), context=pobj) # End if vdict[lname] = var else: raise ParseSyntaxError('dummy argument', token=lname, context=pobj) # End if # End for # End if # End if # End while if insub and (len(statements) == 0): statements = read_statements(pobj) # End if # End while # Check for missing declarations missing = list() if vdict is None: errmsg = 'Subroutine, {}, not found{}' raise CCPPError(errmsg.format(scheme_name, ctx)) # End if for lname in vdict.keys(): if vdict[lname] is None: missing.append(lname) # End if # End for if len(missing) > 0: errmsg = 'Missing local_variables, {} in {}' raise CCPPError(errmsg.format(missing, scheme_name)) # End if var_dict = VarDictionary(scheme_name, variables=vdict) if (scheme_name is not None) and (var_dict is not None): mheader = MetadataTable(table_name_in=scheme_name, table_type_in='scheme', module=spec_name, var_dict=var_dict, logger=logger) # End if return statements, mheader
def parse_specification(pobj, statements, mod_name=None, prog_name=None, logger=None): """Parse specification part of a module or (sub)program""" if (mod_name is not None) and (prog_name is not None): raise ParseInternalError("<mod_name> and <prog_name> cannot both be used") # end if if mod_name is not None: spec_name = mod_name endmatch = _ENDMODULE_RE inmod = True elif prog_name is not None: spec_name = prog_name endmatch = _ENDPROGRAM_RE inmod = False else: raise ParseInternalError("One of <mod_name> or <prog_name> must be used") # End if if logger is not None: ctx = context_string(pobj, nodir=True) msg = "Parsing specification of {}{}" logger.debug(msg.format(spec_name, ctx)) # End if inspec = True mtables = list() while inspec and (statements is not None): while len(statements) > 0: statement = statements.pop(0) # End program or module pmatch = endmatch.match(statement) asmatch = _ARG_TABLE_START_RE.match(statement) if pmatch is not None: # We never found a contains statement inspec = False break elif asmatch is not None: # Put table statement back to re-read statements.insert(0, statement) statements, new_tbls = parse_preamble_data(statements, pobj, spec_name, endmatch, logger) for tbl in new_tbls: title = tbl.table_name if title in mtables: errmsg = duplicate_header(mtables[title], tbl) raise CCPPError(errmsg) # end if if logger is not None: ctx = tbl.start_context() mtype = tbl.table_type msg = "Adding metadata from {}, {}{}" logger.debug(msg.format(mtype, title, ctx)) # End if mtables.append(tbl) # End if inspec = pobj.in_region('MODULE', region_name=mod_name) break elif is_contains_statement(statement, inmod): inspec = False break # End if # End while if inspec and (len(statements) == 0): statements = read_statements(pobj) # End if # End while return statements, mtables
def read_file(filename, preproc_defs=None, logger=None): """Read a file into an array of lines. Preprocess lines to consolidate continuation lines. Remove preprocessor directives and code eliminated by #if statements Remvoved code results in blank lines, not removed lines """ preproc_status = PreprocStack() if not os.path.exists(filename): raise IOError("read_file: file, '{}', does not exist".format(filename)) else: # We need special rules for fixed-form source fixed_form = filename[-2:].lower() == '.f' # Read all lines of the file at once with open(filename, 'r') as file: file_lines = file.readlines() for index in xrange(len(file_lines)): file_lines[index] = file_lines[index].rstrip('\n').rstrip() # End for # End with # create a parse object and context for this file pobj = ParseObject(filename, file_lines) continue_col = -1 # Active continue column in_schar = False # Single quote character context in_dchar = False # Double quote character context prev_line = None prev_line_num = -1 curr_line, curr_line_num = pobj.curr_line() while curr_line is not None: # Skip empty lines and comment-only lines skip_line = False if len(curr_line.strip()) == 0: skip_line = True elif fixed_form and (fixed_comment_re.match(curr_line) is not None): skip_line = True elif curr_line.lstrip()[0] == '!': skip_line = True # End if if skip_line: curr_line, curr_line_num = pobj.next_line() continue # End if # Handle preproc issues if preproc_status.process_line(curr_line, preproc_defs, pobj, logger): pobj.write_line(curr_line_num, "") curr_line, curr_line_num = pobj.next_line() continue # End if if not preproc_status.in_true_region(): # Special case to allow CCPP comment statements in False # regions to find DDT and module table code if (curr_line[0:2] != '!!') and (curr_line[0:2] != '!>'): pobj.write_line(curr_line_num, "") curr_line, curr_line_num = pobj.next_line() continue # End if # End if # scan the line for properties if fixed_form: res = scan_fixed_line(curr_line, in_schar, in_dchar, pobj) cont_in_col, in_schar, in_dchar, comment_col = res continue_col = cont_in_col # No warning in fixed form cont_out_col = -1 if (comment_col < 0) and (continue_col < 0): # Real statement, grab the line # in case is continued prev_line_num = curr_line_num prev_line = None # End if else: res = scan_free_line(curr_line, (continue_col >= 0), in_schar, in_dchar, pobj) cont_in_col, cont_out_col, in_schar, in_dchar, comment_col = res # End if # If in a continuation context, move this line to previous if continue_col >= 0: if fixed_form and (prev_line is None): prev_line = pobj.peek_line(prev_line_num)[0:72] # End if if prev_line is None: raise ParseInternalError("No prev_line to continue", context=pobj) # End if sindex = max(cont_in_col + 1, 0) if fixed_form: sindex = 6 eindex = 72 elif cont_out_col > 0: eindex = cont_out_col else: eindex = len(curr_line) # End if prev_line = prev_line + curr_line[sindex:eindex] if fixed_form: prev_line = prev_line.rstrip() # End if # Rewrite the file's lines pobj.write_line(prev_line_num, prev_line) pobj.write_line(curr_line_num, "") if (not fixed_form) and (cont_out_col < 0): # We are done with this line, reset prev_line prev_line = None prev_line_num = -1 # End if # End if continue_col = cont_out_col if (continue_col >= 0) and (prev_line is None): # We need to set up prev_line as it is continued prev_line = curr_line[0:continue_col] if not (in_schar or in_dchar): prev_line = prev_line.rstrip() # End if prev_line_num = curr_line_num # End if curr_line, curr_line_num = pobj.next_line() # End while return pobj
def find_variable(self, standard_name=None, source_var=None, any_scope=False, clone=None, search_call_list=False, loop_subst=False): """Return the host model variable matching <standard_name> or None If <loop_subst> is True, substitute a begin:end range for an extent. """ my_var = super(HostModel, self).find_variable(standard_name=standard_name, source_var=source_var, any_scope=any_scope, clone=clone, search_call_list=search_call_list, loop_subst=loop_subst) if my_var is None: # Check our DDT library if standard_name is None: if source_var is None: emsg = ("One of <standard_name> or <source_var> " + "must be passed.") raise ParseInternalError(emsg) # end if standard_name = source_var.get_prop_value('standard_name') # end if # Since we are the parent of the DDT library, only check that dict my_var = self.__ddt_dict.find_variable(standard_name=standard_name, any_scope=False) # End if if loop_subst: if my_var is None: my_var = self.find_loop_subst(standard_name) # End if if my_var is not None: # If we get here, the host does not have the requested # variable but does have a replacement set. Create a new # variable to use to send to suites. ##XXgoldyXX: This cannot be working since find_loop_subst ## returns a tuple new_name = self.new_internal_variable_name(prefix=self.name) ctx = ParseContext(filename='host_model.py') new_var = my_var.clone(new_name, source_name=self.name, source_type="HOST", context=ctx) self.add_variable(new_var) my_var = new_var # End if # End if if my_var is None: if self.__deferred_finds is not None: self.__deferred_finds.add(standard_name) # End if elif self.__used_variables is not None: lname = my_var.get_prop_value('local_name') # Try to add any index references (should be method?) imatch = FORTRAN_SCALAR_REF_RE.match(lname) if imatch is not None: vdims = [ x.strip() for x in imatch.group(2).split(',') if ':' not in x ] for vname in vdims: _ = self.find_variable(standard_name=vname) # End for # End if if isinstance(my_var, VarDDT): lname = my_var.get_parent_prop('local_name') # End if self.__used_variables.add(lname) # End if return my_var
def get_dimensions(self, loop_subst=False, index=0): "Return the dimensions of the indicated var, defauling to the top-level DDT" if abs(index) >= self._vlen: raise ParseInternalError("VarDDT.get_prop_value index ({}) out of range".format(index)) # End if return self._var_ref_list[index].get_dimensions(loop_subst)
def __init__(self, typestr_in=None, kind_in=None, match_len_in=None, line_in=None, context=None): """Initialize this FType object, either using <typestr_in> and <kind_in>, OR using line_in.""" if context is None: self.__context = ParseContext() else: self.__context = ParseContext(context=context) # end if # We have to distinguish which type of initialization we have self.__typestr = typestr_in if typestr_in is not None: if line_in is not None: emsg = "Cannot pass both typestr_in and line_in as arguments" raise ParseInternalError(emsg, self.__context) # end if self.__default_kind = kind_in is None if kind_in is None: self.__kind = None elif kind_in[0] == '(': # Parse an explicit kind declaration self.__kind = self.parse_kind_selector(kind_in) else: # The kind has already been parsed for us (e.g., by character) self.__kind = kind_in # end if if match_len_in is not None: self.__match_len = match_len_in else: self.__match_len = len(self.typestr) if kind_in is not None: self.__match_len += len(self.__kind) + 2 # end if # end if elif kind_in is not None: emsg = "kind_in cannot be passed without typestr_in" raise ParseInternalError(emsg, self.__context) elif line_in is not None: match = Ftype.type_match(line_in) if match is None: emsg = "type declaration" raise ParseSyntaxError(emsg, token=line_in, context=self.__context) # end if if match_len_in is not None: self.__match_len = match_len_in else: self.__match_len = len(match.group(0)) # end if if check_fortran_intrinsic(match.group(1)): self.__typestr = match.group(1) if match.group(2) is not None: # Parse kind section kmatch = match.group(2).strip() self.__kind = self.parse_kind_selector(kmatch) else: self.__kind = None # end if self.__default_kind = self.__kind is None else: raise ParseSyntaxError("type declaration", token=line_in, context=self.__context) # end if else: emsg = "At least one of typestr_in or line_in must be passed" raise ParseInternalError(emsg, self.__context)