def load_mpi_mapping(api_mapping_txt): cur_map, cur_name = '', '' stage = '' with open(api_mapping_txt, "r") as In: for line in In: # -- stage header -- if RE.match(r'(\w+_KIND_MAP):', line): name = RE.m.group(1) stage = "MAP" cur_name = name if name not in G.MAPS: cur_map = {'_name': name} G.MAPS[name] = cur_map else: cur_map = G.MAPS[name] elif RE.match(r'Default Descriptions', line): stage = "default_descriptions" cur_name = "Default Descriptions" # -- per-stage parsing -- elif stage == "MAP": if RE.match(r'\s+\.base:\s*(\w+)', line): name = RE.m.group(1) cur_map = copy.deepcopy(G.MAPS[name]) cur_map['_name'] = cur_name G.MAPS[cur_name] = cur_map elif RE.match(r'\s+(\w+):\s*(.*)', line): name, param_type = RE.m.group(1, 2) cur_map[name] = param_type elif stage == "default_descriptions": if RE.match(r'\s*(\w+):\s*(.*)', line): key, val = RE.m.group(1, 2) G.default_descriptions[key] = val
def load_mpix_txt(): G.mpix_symbols = {} stage = "functions" if os.path.exists("src/binding/mpix.txt"): with open("src/binding/mpix.txt") as In: for line in In: if RE.match(r'#\s*mpi.h\s+symbols', line): stage = "symbols" if RE.match(r'(MPI_\w+)', line): name = RE.m.group(1) G.mpix_symbols[name] = stage if stage == "functions": G.FUNCS[name.lower()]['mpix'] = 1
def check_func_directives(func): if 'dir' in func and func['dir'] == "mpit": func['_skip_fortran'] = 1 elif RE.match(r'mpix_grequest_', func['name'], re.IGNORECASE): func['_skip_fortran'] = 1 elif RE.match(r'mpi_attr_', func['name'], re.IGNORECASE): func['_skip_fortran'] = 1 elif RE.match(r'.*_function', func['name'], re.IGNORECASE): func['_skip_fortran'] = 1 elif RE.match(r'mpi_pcontrol', func['name'], re.IGNORECASE): func['_skip_fortran'] = 1 elif RE.match(r'mpi_\w+_(f|f08|c)2(f|f08|c)$', func['name'], re.IGNORECASE): # implemented in mpi_f08_types.f90 func['_skip_fortran'] = 1
def dump_f90_file(f, lines): print(" --> [%s]" % f) with open(f, "w") as Out: for l in G.copyright_f90: print(l, file=Out) indent = 0 for l in lines: if RE.match(r'INDENT', l): indent += 1 elif RE.match(r'DEDENT', l): indent -= 1 else: if indent > 0: print(" " * indent, end='', file=Out) print(l, file=Out)
def get_userbuffer_group(func_name, parameters, i): """internal function used by process_func_parameters""" p = parameters[i] p2 = parameters[i + 1] p3 = parameters[i + 2] if RE.match(r'mpi_i?(alltoall|allgather|gather|scatter)', func_name, re.IGNORECASE): type = "inplace" if RE.search(r'send', p['name'], re.IGNORECASE) and RE.search( r'scatter', func_name, re.IGNORECASE): type = "noinplace" elif RE.search(r'recv', p['name'], re.IGNORECASE) and not RE.search( r'scatter', func_name, re.IGNORECASE): type = "noinplace" if RE.search(r'alltoallw', func_name, re.IGNORECASE): group_kind = "USERBUFFER-%s-w" % (type) group_count = 4 elif p3['kind'] == "DATATYPE": group_kind = "USERBUFFER-%s" % (type) group_count = 3 else: group_kind = "USERBUFFER-%s-v" % (type) group_count = 4 elif RE.match(r'mpi_i?neighbor', func_name, re.IGNORECASE): if RE.search(r'alltoallw', func_name, re.IGNORECASE): group_kind = "USERBUFFER-neighbor-w" group_count = 4 elif p3['kind'] == "DATATYPE": group_kind = "USERBUFFER-neighbor" group_count = 3 else: group_kind = "USERBUFFER-neighbor-v" group_count = 4 elif RE.match(r'mpi_i?(allreduce|reduce|scan|exscan)', func_name, re.IGNORECASE): group_kind = "USERBUFFER-reduce" group_count = 5 elif RE.match(r'mpi_p(send|recv)_init', func_name, re.IGNORECASE): group_kind = "USERBUFFER-partition" group_count = 4 elif RE.search(r'XFER_NUM_ELEM', p2['kind']) and RE.search( r'DATATYPE', p3['kind']): group_kind = "USERBUFFER-simple" group_count = 3 else: group_kind, group_count = None, 0 return (group_kind, group_count)
def load_coll_algos(algo_txt): All = {} with open(algo_txt) as In: (func_commkind, algo_list, algo) = (None, None, None) for line in In: if RE.match(r'(\w+-(intra|inter)):', line): func_commkind = RE.m.group(1) algo_list = [] All[func_commkind] = algo_list elif RE.match(r'\s+(\w+)\s*$', line): algo = {"name": RE.m.group(1), "func-commkind": func_commkind} algo_list.append(algo) elif RE.match(r'\s+(\w+):\s*(.+)', line): (key, value) = RE.m.group(1, 2) algo[key] = value return All
def get_algo_extra_args(algo, kind): (func_name, commkind) = algo['func-commkind'].split('-') extra_params = algo['extra_params'].replace(' ', '').split(',') cvar_params = algo['cvar_params'].replace(' ', '').split(',') if len(extra_params) != len(cvar_params): raise Exception( "algorithm %s-%s-%s: extra_params and cvar_params sizes mismatch!" % (func_name, commkind, algo['name'])) out_list = [] for i in range(len(extra_params)): if RE.match(r'\w+=(.+)', extra_params[i]): # constant parameter out_list.append(RE.m.group(1)) else: if kind == "csel": prefix = "cnt->u.%s.%s_%s." % (func_name, commkind, algo['name']) out_list.append(prefix + extra_params[i]) elif kind == "cvar": prefix = "MPIR_CVAR_%s_" % func_name.upper() tmp = prefix + cvar_params[i] if re.match(r"%sTREE_TYPE" % prefix, tmp): newname = "MPIR_%s_tree_type" % func_name.capitalize() tmp = re.sub(r"%sTREE_TYPE" % prefix, newname, tmp) elif re.match(r"%sTHROTTLE" % prefix, tmp): newname = "MPIR_CVAR_ALLTOALL_THROTTLE" tmp = re.sub(r"%sTHROTTLE" % prefix, newname, tmp) out_list.append(tmp) else: raise Exception("Wrong kind!") return ', '.join(out_list)
def is_pointer_type(param): if RE.match(r'(STRING\w*)$', param['kind']): return 1 elif RE.match(r'(STATUS|F90_STATUS|F08_STATUS)$', param['kind']): return 1 elif RE.match( r'(ATTRIBUTE_VAL\w*|(C_)?BUFFER\d?|EXTRA_STATE\d*|TOOL_MPI_OBJ|(POLY)?FUNCTION\w*)$', param['kind']): return 1 elif param['param_direction'] != 'in': return 1 elif param['length']: return 1 elif param['pointer']: return 1 else: return 0
def add_prototype(l): if RE.match(r'int\s+(\w+)\(', l): func_name = RE.m.group(1) if func_name not in G.prototypes_hash: G.prototypes_hash[func_name] = 1 G.prototypes.append(l) else: pass
def get_op_procname(a, op): if RE.match(r'MPIX?_(\w+)', a): a = RE.m.group(1) if op == '.EQ.': return a.lower() + 'eq' elif op == '.NE.': return a.lower() + 'neq' else: raise Exception("Unrecognized op: %s" % op)
def load_C_func_list(binding_dir="src/binding", silent=False): # -- Loading Standard APIs -- if os.path.exists("%s/apis.json" % binding_dir): if not silent: print("Loading %s/apis.json ..." % binding_dir) load_mpi_json("%s/apis.json" % binding_dir) else: if not silent: print("Loading %s/mpi_standard_api.txt ..." % binding_dir) load_mpi_api("%s/mpi_standard_api.txt" % binding_dir) if not silent: print("Loading %s/apis_mapping.txt ..." % binding_dir) load_mpi_mapping("%s/apis_mapping.txt" % binding_dir) if not silent: print("Loading %s/custom_mapping.txt ..." % binding_dir) load_mpi_mapping("%s/custom_mapping.txt" % binding_dir) # -- Loading MPICH APIs -- api_files = glob.glob("%s/c/*_api.txt" % binding_dir) for f in api_files: if RE.match(r'.*\/(\w+)_api.txt', f): # The name in eg pt2pt_api.txt indicates the output folder. # Only the api functions with output folder will get generated. # This allows simple control of what functions to generate. if not silent: print("Loading %s ..." % f) load_mpi_api(f, RE.m.group(1)) # -- filter and sort func_list -- func_list = [] for f in G.FUNCS.values(): if 'not_implemented' in f: if not silent: print(" skip %s (not_implemented)" % f['name']) elif RE.match(r'\w+_(function|FN)$', f['name']): # skip various callback functions continue elif not 'dir' in f: if not silent: print(" skip %s (not defined)" % f['name']) else: func_list.append(f) func_list.sort(key=lambda f: f['dir']) load_mpix_txt() return func_list
def get_algo_extra_params(algo): extra_params = algo['extra_params'].replace(' ', '').split(',') out_list = [] for a in extra_params: if RE.match(r'(\w+)=.+', a): # constant parameter out_list.append("int " + RE.m.group(1)) else: out_list.append("int " + a) return ', '.join(out_list)
def dump_c_file(f, lines): print(" --> [%s]" % f) with open(f, "w") as Out: indent = 0 for l in G.copyright_c: print(l, file=Out) for l in lines: if RE.match(r'(INDENT|DEDENT)', l): # indentations a = RE.m.group(1) if a == "INDENT": indent += 1 else: indent -= 1 elif RE.match(r'\s*(fn_exit|fn_fail|fallback):', l): # labels print(" %s:" % RE.m.group(1), file=Out) else: # print the line with correct indentations if indent > 0 and not RE.match(r'#(if|endif)', l): print(" " * indent, end='', file=Out) print(l, file=Out)
def parse_info_block(f): """Parse a source file with INFO_HINT_BLOCKs, and return a list of info hints""" hints = [] info = None # loop variable stage = 0 with open(f) as In: for line in In: if line.startswith("=== BEGIN_INFO_HINT_BLOCK ==="): stage = 1 elif line.startswith("=== END_INFO_HINT_BLOCK ==="): stage = 0 elif stage: if RE.match(r'\s*-\s*name\s*:\s*(\w+)', line): info = {"name":RE.m.group(1)} hints.append(info) elif RE.match(r'\s*(functions|type|default)\s*:\s*(.*)', line): info[RE.m.group(1)] = RE.m.group(2) elif RE.match(r'\s*description\s*:\s*>-', line): info['description'] = "" elif RE.match(r'\s+(\S.+)', line): info['description'] += RE.m.group(1) + ' ' else: info = None return hints
def dump_f90_sizeofs(): # deprecated in MPI-4, replaced by Fortran intrinsic c_sizeof() and storage_size() types = {} # list of types we support types['CH1'] = "CHARACTER" types["L%d" % int(G.opts['f-logical-size'])] = "LOGICAL" # NOTE: we assume the fixed-size types are available. The alternative is to use # integer kind and real kind. MPI_SIZEOF is deprecated. We'll keep it simple # until we encounter compilers doesn't support fixed-size types. types['I1'] = "INTEGER*1" types['I2'] = "INTEGER*2" types['I4'] = "INTEGER*4" types['I8'] = "INTEGER*8" types['R4'] = "REAL*4" types['R8'] = "REAL*8" types['CX8'] = "COMPLEX*8" types['CX16'] = "COMPLEX*16" G.out.append("PUBLIC :: MPI_SIZEOF") G.out.append("INTERFACE MPI_SIZEOF") for k in types.keys(): G.out.append(" MODULE PROCEDURE MPI_SIZEOF_%s" % k) G.out.append(" MODULE PROCEDURE MPI_SIZEOF_%sV" % k) G.out.append("END INTERFACE") G.out.append("") G.out.append("CONTAINS") for k, v in types.items(): if RE.match(r'[A-Z]+(\d+)', k): n = int(RE.m.group(1)) # Scalar G.out.append("") G.out.append("SUBROUTINE MPI_SIZEOF_%s(X, SIZE, IERRROR)" % k) G.out.append(" %s :: X" % v) G.out.append(" INTEGER :: SIZE, IERRROR") G.out.append(" SIZE = %d" % n) G.out.append(" IERRROR = 0") G.out.append("END SUBROUTINE MPI_SIZEOF_%s" % k) # Array G.out.append("") G.out.append("SUBROUTINE MPI_SIZEOF_%sV(X, SIZE, IERRROR)" % k) G.out.append(" %s :: X(*)" % v) G.out.append(" INTEGER :: SIZE, IERRROR") G.out.append(" SIZE = %d" % n) G.out.append(" IERRROR = 0") G.out.append("END SUBROUTINE MPI_SIZEOF_%sV" % k)
def load_mpi_api(api_txt, gen_in_dir=""): """Load mpi standard api into global (G) lists and dictionaries.""" cur_func, cur_name = '', '' stage = '' with open(api_txt, "r") as In: for line in In: # -- stage header -- if RE.match(r'(MPI\w+):\s*(.*)', line): name, attr = RE.m.group(1, 2) key = name.lower() stage = "FUNC" cur_name = name if key in G.FUNCS: cur_func = G.FUNCS[key] if RE.search(r'not_implemented', attr): cur_func['not_implemented'] = True else: cur_func = { 'name': name, 'parameters': [], 'attrs': attr, 'desc': "" } G.FUNCS[key] = cur_func if gen_in_dir: cur_func['dir'] = gen_in_dir elif RE.match(r'(\w+)', line): print("Unexpected leading word [%s] in %s" % (RE.m.group(1), api_txt), file=sys.stderr) # anything with unexpected unindented word resets stage stage = '' # -- per-stage parsing -- elif stage == "FUNC": if RE.match(r'\s+\.(\w+):\s*(.*)', line): key, val = RE.m.group(1, 2) cur_func[key] = val elif RE.match(r'\s+(\w+):\s*(\w+)(.*)', line): name, kind, t = RE.m.group(1, 2, 3) if name == 'index': # avoid -Wshadow warning name = 'indx' p = {'name': name, 'kind': kind} if RE.match(r'(.*),\s*\[(.*)\]\s*$', t): t, p['desc'] = RE.m.group(1, 2) p['t'] = t # we include all extra attributes in a 't' string for flexibity # we'll parse the common fields also to improve code readability parse_param_attributes(p) cur_func['parameters'].append(p) elif RE.match(r'{\s*-+\s*(\w+)\s*-+(.*)', line): stage = "code-" + RE.m.group(1) if stage not in cur_func: cur_func[stage] = [] # "error_check" may include list of parameters checked # "handle_ptr" may include list of parameters converted cur_func[stage + "-tail"] = RE.m.group(2).replace( ' ', '').split(',') elif RE.match(r'{', line): stage = "FUNC-body" if 'body' not in cur_func: cur_func['body'] = [] elif RE.match(r'\/\*', line): # man page notes stage = "FUNC-notes" # 'notes' and 'notes2' goes before and after auto-generated notes if RE.match(r'\/\*\s*-+\s*notes-2\s*-+', line): cur_func['notes2'] = [] elif 'notes' not in cur_func: cur_func['notes'] = [] else: cur_func['notes2'] = [] elif stage == "FUNC-body": if RE.match(r'}', line): stage = "FUNC" else: line = re.sub(r'^ ', '', line) cur_func['body'].append(line) elif RE.match(r'(code-\w+)', stage): if RE.match(r'}', line): stage = "FUNC" else: line = re.sub(r'^ ', '', line) cur_func[stage].append(line) elif stage == "FUNC-notes": if RE.match(r'\*\/', line): stage = "FUNC" else: line = re.sub(r'^ ', '', line) if 'notes2' in cur_func: cur_func['notes2'].append(line) else: cur_func['notes'].append(line)
def split_line_with_break(s, tail, N=100): """Breaks a long line with proper indentations. This simplistic routine splits on ", ", thus only works with function declarations and simple function calls such as those generated by this script. """ out_list = [] tlist = [] n = 0 # by default, segments indent by additional 4 spaces if RE.match(r'(\s*)', s): n_lead = len(RE.m.group(1)) + 4 if len(s) < N: tlist.append(s) n = len(s) elif RE.match(r'(.*?\()(.*)', s): # line with function pattern, match indent at opening parenthesis s_lead, s_next = RE.m.group(1, 2) n_lead = len(s_lead) for a in s_next.split(', '): if n == 0: # first line tlist = [s_lead, a] n = n_lead + len(a) elif n + 2 + len(a) < N: # just append to tlist tlist.append(', ') tlist.append(a) n += 2 + len(a) else: # break the line tlist.append(',') out_list.append(''.join(tlist)) # start new line with leading spaces # if lead is too much, it won't look good if n_lead > N - 40: tlist = [' ' * 20, a] n = 20 + len(a) else: tlist = [' ' * n_lead, a] n = n_lead + len(a) # leave last segment with tail else: # only break long function declaration or call for now tlist.append(s) n = len(s) # tail is mostly for "__attribute__ ((weak, alias(...))));", # which contains , that we do not desire to break if tail: if n + 1 + len(tail) < 100: out_list.append(''.join(tlist) + ' ' + tail) else: out_list.append(''.join(tlist)) out_list.append(' ' * n_lead + tail) else: out_list.append(''.join(tlist)) return out_list
def main(): # currently support -no-real128, -no-mpiio, -aint-is-int G.parse_cmdline() binding_dir = G.get_srcdir_path("src/binding") f08_dir = "src/binding/fortran/use_mpi_f08" G.check_write_path("%s/wrappers_f/" % f08_dir) G.check_write_path("%s/wrappers_c/" % f08_dir) func_list = load_C_func_list(binding_dir, True) # suppress noise if "no-mpiio" in G.opts: # a few MPI_File_xxx functions are already in (MPI_File_xxx_errhandler) func_list = [ f for f in func_list if not f['name'].startswith('MPI_File_') ] else: # FIXME: until romio interface is generated func_list.extend(get_mpiio_func_list()) func_list.extend(get_type_create_f90_func_list()) skip_large_list = [] # skip large variations because MPI_ADDRESS_KIND == MPI_COUNT_KIND if 'aint-is-int' not in G.opts: skip_large_list.extend([ "MPI_Op_create", "MPI_Register_datarep", "MPI_Type_create_resized", "MPI_Type_get_extent", "MPI_Type_get_true_extent", "MPI_File_get_type_extent", "MPI_Win_allocate", "MPI_Win_allocate_shared", "MPI_Win_create", "MPI_Win_shared_query" ]) # skip File large count functions because it is not implemented yet for func in func_list: if func['name'].startswith( 'MPI_File_') or func['name'] == 'MPI_Register_datarep': skip_large_list.append(func['name']) # preprocess for func in func_list: check_func_directives(func) if '_skip_fortran' in func: continue if function_has_POLY_parameters( func) and func['name'] not in skip_large_list: func['_need_large'] = True else: func['_need_large'] = False process_func_parameters(func) func_list = [f for f in func_list if '_skip_fortran' not in f] # f08_cdesc.c G.out = [] G.decls = [] for func in func_list: if need_cdesc(func): G.out.append("") dump_f08_wrappers_c(func, False) if func['_need_large']: G.out.append("") dump_f08_wrappers_c(func, True) f = "%s/wrappers_c/f08_cdesc.c" % f08_dir dump_cdesc_c(f, G.out) f = "%s/wrappers_c/cdesc_proto.h" % f08_dir dump_cdesc_proto_h(f, G.decls) # f08ts.f90 G.out = [] for func in func_list: dump_f08_wrappers_f(func, False) if func['_need_large']: dump_f08_wrappers_f(func, True) f = "%s/wrappers_f/f08ts.f90" % f08_dir dump_f90_file(f, G.out) do_profiling = True if do_profiling: temp_out = [] for l in G.out: temp_out.append( re.sub(r'(subroutine|function)\s+(MPIX?)_', r'\1 P\2R_', l, flags=re.IGNORECASE)) f = "%s/wrappers_f/pf08ts.f90" % f08_dir dump_f90_file(f, temp_out) temp_out = None # mpi_c_interface_{cdesc,nobuf}.f90 G.out = [] dump_interface_module_open("mpi_c_interface_cdesc") for func in func_list: if need_cdesc(func): dump_mpi_c_interface_cdesc(func, False) if func['_need_large']: dump_mpi_c_interface_cdesc(func, True) f_sync_reg = { 'name': "MPI_F_sync_reg", 'parameters': [{ 'name': "buf", 'kind': "BUFFER", 't': '', 'large_only': None, 'param_direction': "in" }] } dump_mpi_c_interface_cdesc(f_sync_reg, False) dump_interface_module_close("mpi_c_interface_cdesc") f = "%s/mpi_c_interface_cdesc.f90" % f08_dir dump_f90_file(f, G.out) G.out = [] dump_interface_module_open("mpi_c_interface_nobuf") for func in func_list: if not need_cdesc(func): dump_mpi_c_interface_nobuf(func, False) if func['_need_large']: dump_mpi_c_interface_nobuf(func, True) dump_interface_module_close("mpi_c_interface_nobuf") f = "%s/mpi_c_interface_nobuf.f90" % f08_dir dump_f90_file(f, G.out) # mpi_f08.f90 and pmpi_f08.f90 G.out = [] dump_F_module_open("mpi_f08") G.out.append("USE,intrinsic :: iso_c_binding, ONLY: c_ptr") G.out.append("USE :: pmpi_f08") G.out.append("USE :: mpi_f08_types") G.out.append("USE :: mpi_f08_compile_constants") G.out.append("USE :: mpi_f08_link_constants") G.out.append("USE :: mpi_f08_callbacks") G.out.append("") G.out.append("IMPLICIT NONE") for func in func_list: G.out.append("") func_name = get_function_name(func, False) G.out.append("INTERFACE %s" % func_name) G.out.append("INDENT") dump_mpi_f08(func, False) if func['_need_large']: G.out.append("") dump_mpi_f08(func, True) G.out.append("DEDENT") G.out.append("END INTERFACE %s" % func_name) G.out.append("") dump_F_module_close("mpi_f08") f = "%s/mpi_f08.f90" % f08_dir dump_f90_file(f, G.out) if do_profiling: temp_out = [] for l in G.out: if l == "USE :: pmpi_f08": pass elif RE.match(r'((?:\s*end\s+)?module)\s+(\w+)', l, re.IGNORECASE): temp_out.append(RE.m.group(1) + ' p' + RE.m.group(2)) elif RE.match(r'((?:\s*end\s+)?interface)\s+(\w+)', l, re.IGNORECASE): temp_out.append(RE.m.group(1) + ' P' + RE.m.group(2)) else: temp_out.append( re.sub(r'(subroutine|function)\s+(MPIX?)_', r'\1 P\2R_', l, flags=re.IGNORECASE)) f = "%s/pmpi_f08.f90" % f08_dir dump_f90_file(f, temp_out) temp_out = None # mpi_f08_types.f90 G.out = [] dump_mpi_f08_types() f = "%s/mpi_f08_types.f90" % f08_dir dump_f90_file(f, G.out)
def dump_f90_func(func): f90_mapping = get_kind_map('F90', False) f_param_list = [] decl_list = [] tkr_list = [] # variables that need IGNORE tkr (type-kind-rank) check uses = {} for p in func['parameters']: if re.search(r'suppress=.*f90_parameter', p['t']): continue if re.search(r'large_only', p['t']): continue f_param_list.append(p['name']) f_type = f90_mapping[p['kind']] if re.match(r'<type>', f_type, re.IGNORECASE): # TODO: configure it if False: # use assumed type f_type = 'TYPE(*), INTENT(IN)' else: f_type = 'REAL' tkr_list.append(p['name']) elif RE.match(r'.*(MPI_\w+_KIND)', f_type, re.IGNORECASE): int_kind = RE.m.group(1) if RE.match(r'MPI_Type_(lb|ub|extent|hvector|hindexed|struct)', func['name'], re.IGNORECASE): f_type = 'INTEGER' else: uses[int_kind] = 1 if p['kind'] == 'STRING_ARRAY': decl = "%s :: %s(*)" % (f_type, p['name']) elif p['kind'] == 'STRING_2DARRAY': if re.match(r'mpi_comm_spawn_multiple', func['name'], re.IGNORECASE): decl = "%s :: %s(count, *)" % (f_type, p['name']) else: raise Exception("Unexpected") elif p['length'] is not None: if re.match(r'CHARACTER.*\*', f_type, re.IGNORECASE): decl = "%s :: %s" % (f_type, p['name']) elif isinstance(p['length'], list): # assume [n, 3] as ranges in MPI_Group_range_excl decl = "%s :: %s(%s, *)" % (f_type, p['name'], p['length'][1]) elif p['length']: decl = "%s :: %s(%s)" % (f_type, p['name'], p['length']) else: decl = "%s :: %s(*)" % (f_type, p['name']) elif p['kind'] == 'STATUS': uses['MPI_STATUS_SIZE'] = 1 decl = "INTEGER :: %s(MPI_STATUS_SIZE)" % (p['name']) else: decl = "%s :: %s" % (f_type, p['name']) decl_list.append(decl) def dump_uses(): G.out.append("USE MPI_CONSTANTS, ONLY: %s" % ', '.join(uses.keys())) def dump_ignore_tkr(): tkr_vars = ', '.join(tkr_list) if G.opts['ignore-tkr'] == 'gcc': # e.g. gfort # gfortran since 4.9 G.out.append("!GCC$ ATTRIBUTES NO_ARG_CHECK :: " + tkr_vars) elif G.opts['ignore-tkr'] == 'dec': # e.g. ifort G.out.append("!DEC$ ATTRIBUTES NO_ARG_CHECK :: " + tkr_vars) elif G.opts['ignore-tkr'] == 'pragma': # e.g. sunfort G.out.append("!$PRAGMA IGNORE_TKR " + tkr_vars) elif G.opts['ignore-tkr'] == 'dir': # e.g. flang G.out.append("!DIR$ IGNORE_TKR " + tkr_vars) elif G.opts['ignore-tkr'] == 'ibm': # e.g. IBM G.out.append("!IBM* IGNORE_TKR " + tkr_vars) elif G.opts['ignore-tkr'] == 'assumed': # Assumed type and rank G.out.append("TYPE(*), DIMENSION(..) :: " + tkr_vars) else: raise Exception("Unrognized tkr options: %s" % G.opts['ignore-tkr']) if tkr_list and 'ignore-tkr' not in G.opts: # skip routines with choice buffers unless we can ignore TKR check return func_name = get_function_name(func) G.out.append("") if 'return' not in func: if not len(f_param_list) or not RE.match(r'ierr(or)?', f_param_list[-1]): f_param_list.append('ierr') decl_list.append("INTEGER :: ierr") dump_fortran_line("SUBROUTINE %s(%s)" % (func_name, ', '.join(f_param_list))) else: dump_fortran_line("FUNCTION %s(%s) result(res)" % (func_name, ', '.join(f_param_list))) ret_type = f90_mapping[func['return']] decl_list.append("%s :: res" % ret_type) G.out.append("INDENT") if uses: dump_uses() G.out.append("IMPLICIT NONE") if tkr_list: dump_ignore_tkr() G.out.extend(decl_list) G.out.append("DEDENT") if 'return' not in func: G.out.append("END SUBROUTINE %s" % func_name) else: G.out.append("END FUNCTION %s" % func_name)