def promote(inputfile, target_stmt, names, dimensions, targets, allocate, span): for target in targets: for promote_pair in target.split(','): label, idxname = promote_pair.split(':') promote_stmt = inputfile.get_stmt(label) if promote_stmt: lines = [] in_exepart = False for stmt, depth in walk(promote_stmt[0], -1): if isinstance(stmt, TypeDeclarationStatement): org_attrspec = stmt.attrspec if any( [ name in stmt.entity_decls for name in names ] ): entity_decls = [] name_decls = [] attrspec = stmt.attrspec for entity in stmt.entity_decls: if entity in names: name_decls.append(entity) else: entity_decls.append(entity) if len(stmt.entity_decls)>0: stmt.entity_decls = entity_decls lines.append(stmt.tooc()) if allocate: if 'allocatable' not in stmt.attrspec: stmt.attrspec.append('allocatable') stmt.entity_decls = [ name_decl+dimensions[0] for name_decl in name_decls ] else: stmt.entity_decls = [ name_decl+allocate[0] for name_decl in name_decls ] if len(stmt.entity_decls)>0: lines.append(stmt.tooc()) stmt.entity_decls = entity_decls else: if len(stmt.entity_decls)>0: lines.append(stmt.tooc()) elif not in_exepart and stmt.__class__ in execution_part: renames = [] for name in names: for dim in dimensions: if allocate: lines.append('allocate(%s)'%(name+allocate[0])) renames.append([name, name+idxname]) lines.append(stmt.tooc(name_rename=renames)) in_exepart = True elif in_exepart: renames = [] for name in names: for dim in dimensions: renames.append([name, name+idxname]) lines.append(stmt.tooc(name_rename=renames)) else: lines.append(stmt.tooc()) try: parsed = parse('\n'.join(lines), analyze=False, ignore_comments=False) if len(parsed.content)>0: for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(promote_stmt[0], parsed.content, remove_olditem=True) except: pass
def loop_interchange(outer_stmts, inner_stmts): for outer_stmt in outer_stmts: if not isinstance(outer_stmt, Do): Logger.warn('Outer statment is not Do type: %s' % outer_stmt.__class__) continue for inner_stmt in inner_stmts: if not isinstance(inner_stmt, Do): Logger.warn('Inner statment is not Do type: %s' % inner_stmt.__class__) continue lines = [] for stmt, depth in walk(outer_stmt, -1): if stmt is outer_stmt: lines.append(inner_stmt.tooc()) elif stmt is inner_stmt: lines.append(outer_stmt.tooc()) elif stmt is inner_stmt.content[-1]: lines.append(outer_stmt.content[-1].tooc()) elif stmt is outer_stmt.content[-1]: lines.append(inner_stmt.content[-1].tooc()) else: lines.append(stmt.tooc()) if lines: parsed = parse('\n'.join(lines), analyze=False) if len(parsed.content) > 0: parsed.content[0].parent = outer_stmt.parent for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(outer_stmt, parsed.content)
def loop_interchange(outer_stmts, inner_stmts): for outer_stmt in outer_stmts: if not isinstance(outer_stmt, Do): Logger.warn('Outer statment is not Do type: %s'%outer_stmt.__class__) continue for inner_stmt in inner_stmts: if not isinstance(inner_stmt, Do): Logger.warn('Inner statment is not Do type: %s'%inner_stmt.__class__) continue lines = [] for stmt, depth in walk(outer_stmt, -1): if stmt is outer_stmt: lines.append(inner_stmt.tooc()) elif stmt is inner_stmt: lines.append(outer_stmt.tooc()) elif stmt is inner_stmt.content[-1]: lines.append(outer_stmt.content[-1].tooc()) elif stmt is outer_stmt.content[-1]: lines.append(inner_stmt.content[-1].tooc()) else: lines.append(stmt.tooc()) if lines: parsed = parse('\n'.join(lines), analyze=False) if len(parsed.content)>0: parsed.content[0].parent = outer_stmt.parent for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(outer_stmt, parsed.content)
def loop_split(stmts, add_stmt, before=True): for stmt in stmts: parent = stmt.parent if not isinstance(parent, Do): Logger.warn('Parent of statment is not Do type: %s' % parent.__class__) continue doblk1 = [] doblk2 = [] #if add_stmt: doblk1.append(add_stmt[0]) doblk1.append(parent.tooc()) if add_stmt: doblk2.append(add_stmt[0]) doblk2.append(parent.tooc(remove_label=True)) enddo_stmt = parent.content[-1] doblk = doblk1 remove_label = False for childstmt, depth in walk(parent, -1): if childstmt not in [parent, enddo_stmt]: if not before: doblk.append(childstmt.tooc(remove_label=remove_label)) if childstmt == stmt: doblk = doblk2 remove_label = True if before: doblk.append(childstmt.tooc(remove_label=remove_label)) doblk1.append(enddo_stmt.tooc()) doblk2.append(enddo_stmt.tooc(remove_label=True)) if doblk1: parsed = parse('\n'.join(doblk1), analyze=False, ignore_comments=False) if len(parsed.content) > 0: parsed.content[0].parent = parent.parent for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(parent, parsed.content, remove_olditem=False) if doblk2: parsed = parse('\n'.join(doblk2), analyze=False, ignore_comments=False) if len(parsed.content) > 0: parsed.content[0].parent = parent.parent for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(parent, parsed.content, remove_olditem=True)
def loop_split(stmts, add_stmt, before=True): for stmt in stmts: parent = stmt.parent if not isinstance(parent, Do): Logger.warn('Parent of statment is not Do type: %s'%parent.__class__) continue doblk1 = [] doblk2 = [] #if add_stmt: doblk1.append(add_stmt[0]) doblk1.append(parent.tooc()) if add_stmt: doblk2.append(add_stmt[0]) doblk2.append(parent.tooc(remove_label=True)) enddo_stmt = parent.content[-1] doblk = doblk1 remove_label = False for childstmt, depth in walk(parent, -1): if childstmt not in [ parent, enddo_stmt]: if not before: doblk.append(childstmt.tooc(remove_label=remove_label)) if childstmt==stmt: doblk = doblk2 remove_label = True if before: doblk.append(childstmt.tooc(remove_label=remove_label)) doblk1.append(enddo_stmt.tooc()) doblk2.append(enddo_stmt.tooc(remove_label=True)) if doblk1: parsed = parse('\n'.join(doblk1), analyze=False, ignore_comments=False) if len(parsed.content)>0: parsed.content[0].parent = parent.parent for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(parent, parsed.content, remove_olditem=False) if doblk2: parsed = parse('\n'.join(doblk2), analyze=False, ignore_comments=False) if len(parsed.content)>0: parsed.content[0].parent = parent.parent for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(parent, parsed.content, remove_olditem=True)
def insert_stmt(inputfile, target_stmt, label, stmt_line, span): new_target_stmt = None for stmt, depth in walk(inputfile.tree, -1): if stmt.item.span==span: new_target_stmt = stmt break if stmt_line: parsed = parse(stmt_line[0], analyze=False, ignore_comments=False) if len(parsed.content)>0: for stmt, depth in walk(parsed, 1): if isinstance(stmt, Comment): stmt.label = int(label[0]) else: stmt.item.label = int(label[0]) stmt.parse_f2003() insert_content(new_target_stmt, parsed.content, remove_olditem=True)
def remove_stmt(inputfile, target_stmt, targets, span): for target in targets: if target: parsed = parse('!'+str(target), analyze=False, ignore_comments=False) if len(parsed.content)>0: for stmt, depth in walk(parsed, 1): stmt.parse_f2003() insert_content(target, parsed.content, remove_olditem=True)
def stmt_by_label(self, label): for stmt, depth in walk(self.tree, -1): if isinstance(stmt, Comment): if hasattr(stmt, 'label') and stmt.label == label: return stmt else: if stmt.item.label == label: return stmt #print stmt.item.name, stmt.item.label, stmt.item.span return
def openmp(inputfile, target_stmt, sentinel, directive, clauses, span): line = '' new_target_stmt = None for stmt, depth in walk(inputfile.tree, -1): if stmt.item.span==span: new_target_stmt = stmt if clauses: mapped_clauses = SrcFile.applymap(clauses[0]) else: mapped_clauses = '' line = '%s %s %s'%(SrcFile.applymap(sentinel[0]), SrcFile.applymap(directive[0]), mapped_clauses) break if line: parsed = parse(line, analyze=False, ignore_comments=False) if len(parsed.content)>0: #parsed.content[0].parent = target_stmt.parent #import pdb; pdb.set_trace() for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(new_target_stmt, parsed.content, remove_olditem=True)
def directive(inputfile, target_stmt, label, sentinel, directive, span): line = '' new_target_stmt = None for stmt, depth in walk(inputfile.tree, -1): if stmt.item.span==span: new_target_stmt = stmt line = '!%s$ %s'%(SrcFile.applymap(sentinel[0]), SrcFile.applymap(directive[0])) break if line: parsed = parse(line, analyze=False, ignore_comments=False) if len(parsed.content)>0: for stmt, depth in walk(parsed, 1): if isinstance(stmt, Comment): stmt.label = int(label[0]) else: stmt.item.label = int(label[0]) stmt.parse_f2003() insert_content(new_target_stmt, parsed.content, remove_olditem=True)
def name_change(targets, switch, rename): for target_stmt in targets: list_switch = [ (pair.split(':')[0].strip(), pair.split(':')[1].strip()) for pair in switch if pair] list_rename = [ (pair.split(':')[0].strip(), pair.split(':')[1].strip()) for pair in rename if pair] lines = target_stmt.tooc(name_switch=list_switch, name_rename=list_rename) if lines: parsed = parse(lines, analyze=False) if len(parsed.content)>0: parsed.content[0].parent = target_stmt.parent for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(target_stmt, parsed.content, remove_olditem=True)
def directive(inputfile, target_stmt, label, sentinel, directive, span): line = '' new_target_stmt = None for stmt, depth in walk(inputfile.tree, -1): if stmt.item.span == span: new_target_stmt = stmt line = '!%s$ %s' % (SrcFile.applymap( sentinel[0]), SrcFile.applymap(directive[0])) break if line: parsed = parse(line, analyze=False, ignore_comments=False) if len(parsed.content) > 0: for stmt, depth in walk(parsed, 1): if isinstance(stmt, Comment): stmt.label = int(label[0]) else: stmt.item.label = int(label[0]) stmt.parse_f2003() insert_content(new_target_stmt, parsed.content, remove_olditem=True)
def stmt_by_name(self, name, cls=None, lineafter=-1): from statements import Comment for stmt, depth in walk(self.tree, -1): if isinstance(cls, list): if not stmt.__class__ in cls: continue if lineafter > 0: if stmt.item.span[1] <= lineafter: continue if isinstance(stmt, Comment): continue expr = stmt.expr_by_name(name, stmt.f2003) if expr: return stmt, expr return None, None
def name_change(targets, switch, rename): for target_stmt in targets: list_switch = [(pair.split(':')[0].strip(), pair.split(':')[1].strip()) for pair in switch if pair] list_rename = [(pair.split(':')[0].strip(), pair.split(':')[1].strip()) for pair in rename if pair] lines = target_stmt.tooc(name_switch=list_switch, name_rename=list_rename) if lines: parsed = parse(lines, analyze=False) if len(parsed.content) > 0: parsed.content[0].parent = target_stmt.parent for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(target_stmt, parsed.content, remove_olditem=True)
def stmt_by_name(self, name, cls=None, lineafter=-1): _stmt = None _expr = None for stmt, depth in walk(self.tree, -1): if isinstance(cls, list): if not stmt.__class__ in cls: continue if lineafter > 0: if stmt.item.span[1] <= lineafter: continue if isinstance(stmt, Comment): continue expr = stmt.expr_by_name(name, stmt.f2003) if lineafter > 0 or expr is not None: _stmt = stmt _expr = expr break return _stmt, _expr
def reset_parsing(self): # fparse readerid = self.tree.reader.id self.tree = parse('\n'.join(self.prep), ignore_comments=False, analyze=True, isfree=True, isstrict=False, \ include_dirs=None, source_only=None ) self.tree.reader.id = readerid # f2003 parse for stmt, depth in walk(self.tree, -1): if isinstance(stmt, Comment) and stmt.item.comment.startswith( '!__OPENCASE_COMMENT#'): comment_split = stmt.item.comment.split(' ') lineno = int(comment_split[1]) stmt.item.span = (0, 0) else: if lineno > 0: linediff = stmt.item.span[0] - lineno lineno = 0 stmt.item.span = (stmt.item.span[0] - linediff, stmt.item.span[1] - linediff) stmt.parse_f2003()
def __init__(self, srcpath): import re import os.path from oc_utils import exec_cmd # set default values self.prep = None self.tree = None self.srcpath = srcpath self.filename = os.path.basename(self.srcpath) self.abspath = os.path.abspath(self.srcpath) self.relpath = os.path.relpath(self.abspath, Config.path['refdir']) self.searchtree = [] self.direct = {} # prepare include paths and macro definitions path_src = [] macros_src = '' if Config.include['file'].has_key(self.abspath): path_src = Config.include['file'][self.abspath]['path'] macros_src = ' '.join([ '-D%s=%s' % (k, v) for k, v in Config.include['file'][ self.abspath]['macro'].iteritems() ]) includes = '-I' + ' -I'.join(Config.include['path'] + path_src + ['.']) macros = ' '.join([ '-D%s=%s' % (k, v) for k, v in Config.include['macro'].iteritems() ]) + ' ' + macros_src # execute preprocessing Logger.info('Reading %s' % self.srcpath, stdout=True) prep = Config.bin['pp'] if prep.endswith('fpp'): flags = Config.bin['fpp_flags'] elif prep.endswith('cpp'): flags = Config.bin['cpp_flags'] else: raise UserException('Preprocessor is not either fpp or cpp') output = exec_cmd('%s %s %s %s %s' % (prep, flags, includes, macros, self.abspath)) # convert the preprocessed for fparser self.prep = map( lambda l: '!__OPENCASE_COMMENT' + l if l.startswith('#') else l, output.split('\n')) # fparse self.tree = parse('\n'.join(self.prep), ignore_comments=False, analyze=True, isfree=True, isstrict=False, \ include_dirs=None, source_only=None ) # parse f2003 lineno = 0 linediff = 0 pending_directs = [] for stmt, depth in walk(self.tree, -1): if isinstance(stmt, Comment) and stmt.item.comment.startswith( '!__OPENCASE_COMMENT#'): comment_split = stmt.item.comment.split(' ') lineno = int(comment_split[1]) stmt.item.span = (0, 0) else: if lineno > 0: linediff = stmt.item.span[0] - lineno lineno = 0 stmt.item.span = (stmt.item.span[0] - linediff, stmt.item.span[1] - linediff) if isinstance(stmt, Comment): match = re.match( r'\$opencase\s*(\w+)\s*([\(\{\[\<])(.+)([\)\}\]\>]\s*\*?\+?\d?)', stmt.content, re.I) if match: name = match.group(1).lower() value = match.group(3) if name == 'include': if value: casefile = value.strip() if casefile[0] == '/': inc_path = os.path.abspath(casefile) else: inc_path = os.path.join( os.path.dirname(self.abspath), value) if os.path.exists(inc_path): finc = open(inc_path, 'r') inc_directs = re.findall( r'(\!?)\s*(\w+)\s*([\(\{\[\<])(.+)([\)\}\]\>]\s*\*?\+?\d?)\s*\n', finc.read(), re.I) finc.close() for direct in inc_directs: if direct[0]: continue direct_line = ''.join(direct) direct_name = direct[1].lower() direct_tree = generate_searchtree( self._strmap(direct_line)) assert len( direct_tree ) == 1, 'Only one element is allowed in direct_tree' self.searchtree.extend(direct_tree) if direct_name in global_directs: if not State.direct.has_key( direct_name): State.direct[direct_name] = [] State.direct[direct_name].append( (direct_tree[0], stmt, stmt.item.span)) elif direct_name in local_directs: if not self.direct.has_key( direct_name): self.direct[direct_name] = [] self.direct[direct_name].append( (direct_tree[0], stmt, stmt.item.span)) else: raise UserException( 'Can not find caes file: %s' % inc_path) else: direct_line = match.group(0) direct_tree = generate_searchtree( self._strmap(direct_line[10:])) self.searchtree.extend(direct_tree) if name in global_directs: if not State.direct.has_key(name): State.direct[name] = [] State.direct[name].append( (direct_tree[0], stmt, stmt.item.span)) elif name in local_directs: if not self.direct.has_key(name): self.direct[name] = [] self.direct[name].append( (direct_tree[0], stmt, stmt.item.span)) #if match.group(1).lower() in ['refcase']: # State.direct[match.group(1).lower()] = direct_tree stmt.parse_f2003() # rename reader.id self.tree.reader.id = self.abspath
def __init__(self, srcpath, preprocess=True): import os.path from kgen_utils import run_shcmd from statements import Comment from block_statements import Module, Program # set default values self.tree = None self.srcpath = srcpath self.abspath = os.path.abspath(self.srcpath) # set source file format isfree = True isstrict = False if self.abspath in Config.source['file'].keys(): if Config.source['file'][self.abspath].has_key('isfree'): isfree = Config.source['file'][self.abspath]['isfree'] if Config.source['file'][self.abspath].has_key('isstrict'): isstrict = Config.source['file'][self.abspath]['isstrict'] else: if Config.source['isstrict']: isstrict = Config.source['isstrict'] if Config.source['isfree']: isfree = Config.source['isfree'] # prepare include paths and macro definitions path_src = [] macros_src = [] if Config.include['file'].has_key(self.abspath): path_src = Config.include['file'][self.abspath]['path'] + [ os.path.dirname(self.abspath) ] for k, v in Config.include['file'][ self.abspath]['macro'].iteritems(): if v: macros_src.append('-D%s=%s' % (k, v)) else: macros_src.append('-D%s' % k) includes = '-I' + ' -I'.join(Config.include['path'] + path_src) macros_common = [] for k, v in Config.include['macro'].iteritems(): if v: macros_common.append('-D%s=%s' % (k, v)) else: macros_common.append('-D%s' % k) macros = ' '.join(macros_common + macros_src) # execute preprocessing Logger.info('Reading %s' % self.srcpath, stdout=True) new_lines = [] with open(self.abspath, 'r') as f: if preprocess: pp = Config.bin['pp'] if pp.endswith('fpp'): if isfree: srcfmt = ' -free' else: srcfmt = ' -fixed' flags = Config.bin['fpp_flags'] + srcfmt elif pp.endswith('cpp'): flags = Config.bin['cpp_flags'] else: raise UserException( 'Preprocessor is not either fpp or cpp') output, err, retcode = run_shcmd('%s %s %s %s' % (pp, flags, includes, macros), input=f.read()) prep = map(lambda l: '!KGEN' + l if l.startswith('#') else l, output.split('\n')) new_lines = self.handle_include(prep) else: new_lines = f.read().split('\n') # add include paths if Config.include['file'].has_key( self.abspath) and Config.include['file'][self.abspath].has_key( 'path'): include_dirs = Config.include['file'][self.abspath]['path'] + [ os.path.dirname(self.abspath) ] else: include_dirs = None #if self.abspath=='/glade/scratch/youngsun/kgen_system_test/branches/initial/MPAS-Release/src/framework/mpas_derived_types.F': # print '\n'.join(new_lines) # sys.exit() # import pdb ; pdb.set_trace() # fparse self.tree = parse('\n'.join(new_lines), ignore_comments=False, analyze=True, isfree=isfree, \ isstrict=isstrict, include_dirs=include_dirs, source_only=None ) self.tree.prep = new_lines self.tree.used4genstate = False #if self.abspath=='/glade/scratch/youngsun/kgen_system_test/branches/initial/MPAS-Release/src/framework/mpas_derived_types.F': # print self.tree # sys.exit() # parse f2003 lineno = 0 linediff = 0 for stmt, depth in walk(self.tree, -1): stmt.parse_f2003() # rename reader.id self.tree.reader.id = self.abspath # collect module information for mod_name, mod_stmt in self.tree.a.module.iteritems(): if not State.modules.has_key(mod_name): State.modules[mod_name] = OrderedDict() #State.modules[mod_name]['num'] = State.mod_num #State.mod_num += 1 State.modules[mod_name]['stmt'] = mod_stmt State.modules[mod_name]['file'] = self State.modules[mod_name]['path'] = self.abspath #State.modules[mod_name]['extern'] = OrderedDict() #State.modules[mod_name]['extern']['names'] = [] #State.modules[mod_name]['extern']['typedecl_stmt'] = OrderedDict() #State.modules[mod_name]['extern']['tkdpat'] = [] #State.modules[mod_name]['mod_rw_var_depends'] = [] #State.modules[mod_name]['dtype'] = [] # collect program unit information for item in self.tree.content: if item.__class__ not in [Module, Comment, Program]: if item.reader.id not in State.program_units.keys(): State.program_units[item.reader.id] = [] State.program_units[item.reader.id].append(item) # create a tuple for file dependency State.srcfiles[self.abspath] = (self, [], [])
def stmt_by_lineno(self, lineno): for stmt, depth in walk(self.tree, -1): if stmt.item.span[0] == lineno: return stmt return
def locate_callsite(cs_tree): from statements import Comment from block_statements import executable_construct import re def get_next_non_comment(stmt): if not stmt: return if not hasattr(stmt, 'parent'): return started = False for s in stmt.parent.content: if s==stmt: if not isinstance(s, Comment): return s started = True elif started: if not isinstance(s, Comment): return s def get_names(node, bag, depth): from Fortran2003 import Name if isinstance(node, Name) and not node.string in bag: bag.append(node.string) # collect directives directs = [] for stmt, depth in walk(cs_tree): if isinstance(stmt, Comment): line = stmt.item.comment.strip() match = re.match(r'^[c!*]\$kgen\s+(.+)$', line, re.IGNORECASE) if match: dsplit = match.group(1).split(' ', 1) dname = dsplit[0].strip() if len(dsplit)>1: clause = dsplit[1].strip() else: clause = None if dname.startswith('begin_'): sname = dname[6:] directs.append(sname) State.kernel['name'] = clause elif dname.startswith('end_'): ename = dname[4:] if directs[-1]==ename: directs.pop() if ename=='callsite': pass else: raise UserException('WARNING: Not supported KGEN directive: %s'%ename) else: raise UserException('Directive name mismatch: %s, %s'%(dname_stack[-1], ename)) elif dname=='callsite': next_fort_stmt = get_next_non_comment(stmt) if next_fort_stmt: State.kernel['name'] = clause State.callsite['stmts'].append(next_fort_stmt) else: raise UserException('WARNING: callsite is not found') elif 'callsite' in directs: State.callsite['stmts'].append(stmt) elif 'callsite' in directs: State.callsite['stmts'].append(stmt) else: if Config.callsite['namepath'] and stmt.__class__ in executable_construct: names = [] traverse(stmt.f2003, get_names, names) for name in names: if match_namepath(Config.callsite['namepath'], pack_exnamepath(stmt, name), internal=False): State.kernel['name'] = name for _s, _d in walk(stmt): State.callsite['stmts'].append(_s) return elif len(directs)>0 and directs[-1]=='callsite': State.callsite['stmts'].append(stmt) if len(State.callsite['stmts'])==0: raise UserException('Can not find callsite')
def main(): version = [0, 1, '0'] outdir = '.' retval = 0 varid = 0 Logger.info('Starting simplify_ifcond', stdout=True) try: # option parser parser = optparse.OptionParser( version='simplify_ifcond version %d.%d.%s' % tuple(version)) # common options parser.add_option("--outdir", dest="outdir", action='store', type='string', default='output', help="path to create outputs") parser.add_option("--add-ext", dest="ext", action='store', type='string', default=None, help="File extensions to parse") parser.add_option( "-t", "--threshold", dest="threshold", action='store', type='int', default=20, help="Max number of identifiers if condition before simplifying.") opts, args = parser.parse_args() if len(args) < 1: print 'ERROR: Target source folders are not provided.' print 'Usage: simplify_ifcond [options] <target folder path[, target folder path, ...]>' sys.exit(-1) # create output directory outpath = os.path.abspath(opts.outdir) if not os.path.exists(outpath): os.makedirs(outpath) outsrcpath = '%s/src' % outpath shutil.rmtree(outsrcpath) os.makedirs(outsrcpath) # walk through source directory tree for srcdir in args: abssrcpath = os.path.abspath(srcdir) for dirName, subdirList, fileList in os.walk(abssrcpath): relpath = os.path.relpath(dirName, start=abssrcpath) outfilepath = '%s/%s' % (outsrcpath, relpath.replace('.', '')) if not os.path.exists(outfilepath): os.makedirs(outfilepath) for srcfile in fileList: if any(srcfile.endswith(ext) for ext in file_exts): try: # read source file parsed = SrcFile(os.path.join(dirName, srcfile), preprocess=False) # create analysis container parstmts = [] # anlyze last_span = (1, 1) for stmt, depth in api.walk(parsed.tree): if isinstance(stmt, Comment): if stmt.item.span[0] >= last_span[ 0] and stmt.item.span[ 1] <= last_span[1]: stmt.ignore = True elif isinstance(stmt, (IfThen, If, ElseIf)): if ifcondcheck(stmt.f2003.items[0], opts.threshold): p = stmt.ancestors()[-1] if p not in parstmts: parstmts.append(p) if not hasattr(p, 'simplify_ifstmts'): p.simplify_ifstmts = [] if stmt not in p.simplify_ifstmts: p.simplify_ifstmts.append(stmt) last_span = stmt.item.span # modify for parstmt in parstmts: # add openmp directive add_specstmts(parstmt, len(parstmt.simplify_ifstmts)) # simplify for i, ifstmt in enumerate( parstmt.simplify_ifstmts): simplify(ifstmt, ifstmt.f2003.items[0], i) # generate modified source files if len(parstmts) > 0: lines = [] for stmt, depth in api.walk(parsed.tree): if hasattr(stmt, 'forced_lines'): lines.extend(stmt.forced_lines) elif not stmt.ignore: start = stmt.item.span[0] - 1 end = stmt.item.span[1] for line in stmt.top.prep[start:end]: split = line.split() if len(split) > 2 and split[ 0].startswith( '!KGEN#' ) and split[1].isdigit(): continue lines.append(line) with open(os.path.join(outfilepath, srcfile), 'w') as f: print 'Generating %s\n' % outfilepath f.write('\n'.join(lines)) f.write('\n') except Exception as e: #import pdb; pdb.set_trace() raise pass # switch source files if directed except UserException as e: print 'ERROR: %s' % str(e) Logger.info(e) #Logger.critical(e) retval = -1 except ProgramException as e: Logger.critical(e) retval = -1 except Exception as e: Logger.critical(e) retval = -1 finally: pass Logger.info('simplify_ifcond is finished.', stdout=True) return retval
def check_mode(): from kgen_utils import Config, exec_cmd from utils import module_file_extensions from api import parse, walk from statements import Comment from kgen_search import f2003_search_unknowns, SearchException import logging logger = logging.getLogger('kgen') # KGEN addition logger.setLevel(logging.WARNING) files = [] # collect source files for path in Config.check_mode: if os.path.basename(path).startswith('.'): continue if os.path.isdir(path): for root, dirnames, filenames in os.walk(os.path.abspath(path)): for filename in filenames: if os.path.basename(filename).startswith('.'): continue fname, fext = os.path.splitext(filename) if len(fext) > 1 and fext.lower( ) in module_file_extensions: files.append(os.path.join(root, filename)) elif os.path.isfile(path): if os.path.isfile(path): files.append(os.path.abspath(path)) else: raise '%s is not a direcotory nor a file' % path # TODO: support #include cpp directive # parse source files for n, file in enumerate(files): print 'Reading(%d/%d): ' % (n + 1, len(files)), file # fsrc = open(file, 'rb') # prepare include paths and macro definitions path_src = [] macros_src = [] if Config.include['file'].has_key(self.abspath): path_src = Config.include['file'][self.abspath]['path'] + [ os.path.dirname(self.abspath) ] for k, v in Config.include['file'][ self.abspath]['macro'].iteritems(): if v: macros_src.append('-D%s=%s' % (k, v)) else: macros_src.append('-D%s' % k) includes = '-I' + ' -I'.join(Config.include['path'] + path_src) macros_common = [] for k, v in Config.include['macro'].iteritems(): if v: macros_common.append('-D%s=%s' % (k, v)) else: macros_common.append('-D%s' % k) macros = ' '.join(macros_common + macros_src) # execute preprocessing prep = Config.bin['pp'] if prep.endswith('fpp'): flags = Config.bin['fpp_flags'] elif prep.endswith('cpp'): flags = Config.bin['cpp_flags'] else: raise UserException('Preprocessor is not either fpp or cpp') output = exec_cmd('%s %s %s %s %s' % (prep, flags, includes, macros, file)) # convert the preprocessed for fparser prep = map(lambda l: '!KGEN' + l if l.startswith('#') else l, output.split('\n')) # fparse tree = parse('\n'.join(prep), ignore_comments=False, analyze=False, isfree=True, isstrict=False, \ include_dirs=None, source_only=None ) # parse f2003 Config.search['promote_exception'] = True lineno = 0 linediff = 0 for stmt, depth in walk(tree, -1): try: if isinstance( stmt, Comment) and stmt.item.comment.startswith('!KGEN#'): comment_split = stmt.item.comment.split(' ') lineno = int(comment_split[1]) stmt.item.span = (0, 0) else: if lineno > 0: linediff = stmt.item.span[0] - lineno lineno = 0 stmt.item.span = (stmt.item.span[0] - linediff, stmt.item.span[1] - linediff) stmt.parse_f2003() if stmt.f2003.__class__ not in exclude_list: f2003_search_unknowns(stmt, stmt.f2003, gentype=KGGenType.KERNEL) except (NoMatchError, AttributeError) as e: if file not in not_parsed: not_parsed[file] = [] not_parsed[file].append(stmt) except NameError as e: errmsg = str(e) pos = errmsg.find('search_') if len(errmsg) > 7 and pos > 0: clsname = errmsg[pos + 7:-16] #print "NOT SUPPORTED: '%s' Fortran statement is not supported yet"%clsname if file not in not_supported: not_supported[file] = [] not_supported[file].append((clsname, stmt.item.span[0])) except Exception as e: print 'WARNING: Following statement is not correctly parsed' print stmt print '' print '' print '********************' print '*** CHECK RESULT ***' print '********************' print '' print 'NOTE: KGEN may be able to extract kernel even though not all source code lines are parsed or supported.' print '' print '*** KGEN Parsing Error(s) ***' print '' for file, stmts in not_parsed.iteritems(): print file lines = [] for stmt in stmts: if hasattr(stmt, 'item'): lines.append('Near line # %d:' % stmt.item.span[0]) lines.append(stmt.tokgen() + '\n') else: lines.append(str(stmt) + '\n') print '\n'.join(lines), '\n' print '*** Not Supported Fortran Statement(s) ***' print '' for file, clsnames in not_supported.iteritems(): print file lines = [] for clsname, lineno in clsnames: lines.append("'%s' Fortran statment near line # %d" % (clsname, lineno)) print '\n'.join(lines), '\n' if len(not_parsed) == 0 and len(not_supported) == 0: print 'Current KGEN version can support all source code lines.'
def loop_unroll(targets, factor, method): for target_stmt in targets: if not isinstance(target_stmt, Do): Logger.warn("Target statment is not Do type: %s" % target_stmt.__class__) continue # collect loop control target_f2003 = target_stmt.f2003 if isinstance(target_f2003, Nonlabel_Do_Stmt): loop_control = target_f2003.items[1] loop_var = loop_control.items[0].string.lower() start_idx = loop_control.items[1][0] end_idx = loop_control.items[1][1] if len(loop_control.items[1]) == 3: step = Int_Literal_Constant(str(1)) else: step = loop_control.items[1][2] else: raise ProgramException("Not supported type: %s" % f2003obj.__class__) # collect loop controls through static analysis start_num = target_stmt.get_param(start_idx) end_num = target_stmt.get_param(end_idx) step_num = target_stmt.get_param(step) try: loop_indices = range(start_num, end_num + 1, step_num) except: loop_indices = None # TODO: modify analysis if required lines = [] if factor == "full": if loop_indices is not None: lines = _unroll(target_stmt.content, loop_var, len(loop_indices), method, start_index=start_num) else: Logger.warn("Loopcontrol is not collected") # save in tree elif factor.isdigit(): factor_num = int(factor) if loop_indices is not None and len(loop_indices) == factor_num: lines = _unroll(target_stmt.content, loop_var, factor_num, method, start_index=start_num) else: # replace end and step newstep = "%s*%s" % (step.tofortran(), factor) newend = "%s-%s" % (end_idx.tofortran(), newstep) lines.append(target_stmt.tooc(do_end=newend, do_step=newstep)) lines.extend(_unroll(target_stmt.content, loop_var, factor_num, method)) lines.append(target_stmt.content[-1].tooc()) # replace start newstart = loop_var lines.append(target_stmt.tooc(do_start=newstart, remove_label=True)) lines.extend(_unroll(target_stmt.content, loop_var, 1, method)) lines.append(target_stmt.content[-1].tooc(remove_label=True)) else: raise UserException("Unknown unroll factor: %s" % factor) if lines: parsed = parse("\n".join(lines), analyze=False) if len(parsed.content) > 0: for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(target_stmt, parsed.content)
def loop_unroll(targets, factor, method): for target_stmt in targets: if not isinstance(target_stmt, Do): Logger.warn('Target statment is not Do type: %s' % target_stmt.__class__) continue # collect loop control target_f2003 = target_stmt.f2003 if isinstance(target_f2003, Nonlabel_Do_Stmt): loop_control = target_f2003.items[1] loop_var = loop_control.items[0].string.lower() start_idx = loop_control.items[1][0] end_idx = loop_control.items[1][1] if len(loop_control.items[1]) == 3: step = Int_Literal_Constant(str(1)) else: step = loop_control.items[1][2] else: raise ProgramException('Not supported type: %s' % f2003obj.__class__) # collect loop controls through static analysis start_num = target_stmt.get_param(start_idx) end_num = target_stmt.get_param(end_idx) step_num = target_stmt.get_param(step) try: loop_indices = range(start_num, end_num + 1, step_num) except: loop_indices = None # TODO: modify analysis if required lines = [] if factor == 'full': if loop_indices is not None: lines = _unroll(target_stmt.content, loop_var, len(loop_indices), method, start_index=start_num) else: Logger.warn('Loopcontrol is not collected') # save in tree elif factor.isdigit(): factor_num = int(factor) if loop_indices is not None and len(loop_indices) == factor_num: lines = _unroll(target_stmt.content, loop_var, factor_num, method, start_index=start_num) else: # replace end and step newstep = '%s*%s' % (step.tofortran(), factor) newend = '%s-%s' % (end_idx.tofortran(), newstep) lines.append(target_stmt.tooc(do_end=newend, do_step=newstep)) lines.extend( _unroll(target_stmt.content, loop_var, factor_num, method)) lines.append(target_stmt.content[-1].tooc()) # replace start newstart = loop_var lines.append( target_stmt.tooc(do_start=newstart, remove_label=True)) lines.extend(_unroll(target_stmt.content, loop_var, 1, method)) lines.append(target_stmt.content[-1].tooc(remove_label=True)) else: raise UserException('Unknown unroll factor: %s' % factor) if lines: parsed = parse('\n'.join(lines), analyze=False) if len(parsed.content) > 0: for stmt, depth in walk(parsed, -1): stmt.parse_f2003() insert_content(target_stmt, parsed.content)
def _collect_mpi_params(self): from api import parse, walk if Config.mpi['enabled']: # get path of mpif.h mpifpath = '' if os.path.isabs(Config.mpi['header']): if os.path.exists(Config.mpi['header']): mpifpath = Config.mpi['header'] else: raise UserException('Can not find %s' % Config.mpi['header']) else: for p in Config.include['path']: fp = os.path.join(p, Config.mpi['header']) if os.path.exists(fp): mpifpath = fp break if not mpifpath: for incpath, incdict in Config.include['file'].items(): for p in incdict['path']: fp = os.path.join(p, Config.mpi['header']) if os.path.exists(fp): mpifpath = fp break if mpifpath: break # collect required information if mpifpath: try: with open(mpifpath, 'r') as f: filelines = f.read().split('\n') lines = '\n'.join( handle_include(os.path.dirname(mpifpath), filelines)) #reader = FortranStringReader(lines) tree = parse(lines, ignore_comments=True, analyze=False, isfree=True, isstrict=False, include_dirs=None, source_only=None) for stmt, depth in walk(tree, -1): stmt.parse_f2003() #import pdb; pdb.set_trace() #spec = Specification_Part(reader) bag = {} config_name_mapping = [ ('comm', 'MPI_COMM_WORLD'), ('logical', 'MPI_LOGICAL'), ('status_size', 'MPI_STATUS_SIZE'), ('any_source', 'MPI_ANY_SOURCE'), ('source', 'MPI_SOURCE'), ] for config_key, name in config_name_mapping: if not Config.mpi.has_key( config_key) or Config.mpi[config_key] is None: for stmt, depth in walk(tree, -1): bag['key'] = name bag[name] = [] if hasattr(stmt, 'f2003'): traverse(stmt.f2003, get_MPI_PARAM, bag, subnode='content') if len(bag[name]) > 0: Config.mpi[config_key] = bag[name][-1] break for config_key, name in config_name_mapping: if not Config.mpi.has_key( config_key) or Config.mpi[config_key] is None: raise UserException( 'Can not find {name} in mpif.h'.format( name=name)) except UserException: raise # Reraise this exception rather than catching it below except Exception as e: raise UserException('Error occurred during reading %s.' % mpifpath) else: raise UserException( 'Can not find mpif.h. Please provide a path to the file')
def process_directive(self): from kgsearch import f2003_search_unknowns from statements import Comment from block_statements import executable_construct import re def get_next_non_comment(stmt): if not stmt: return if not hasattr(stmt, 'parent'): return started = False for s in stmt.parent.content: if s == stmt: if not isinstance(s, Comment): return s started = True elif started: if not isinstance(s, Comment): return s def get_names(node, bag, depth): from Fortran2003 import Name if isinstance(node, Name) and not node.string in bag: bag.append(node.string) # collect directives directs = [] for stmt, depth in api.walk(self.tree): if isinstance(stmt, Comment): line = stmt.item.comment.strip() match = re.match(r'^[c!*]\$kgen\s+(.+)$', line, re.IGNORECASE) if match: dsplit = match.group(1).split(' ', 1) dname = dsplit[0].strip() if len(dsplit) > 1: clause = dsplit[1].strip() else: clause = None if dname.startswith('begin_'): sname = dname[6:] directs.append(sname) Config.kernel['name'] = clause elif dname.startswith('end_'): ename = dname[4:] if directs[-1] == ename: directs.pop() if ename == 'callsite': while isinstance(Config.callsite['stmts'][-1], Comment): Config.callsite['stmts'].pop() else: raise UserException( 'WARNING: Not supported KGEN directive: %s' % ename) else: raise UserException( 'Directive name mismatch: %s, %s' % (dname_stack[-1], ename)) elif dname == 'callsite': next_fort_stmt = get_next_non_comment(stmt) if next_fort_stmt: Config.kernel['name'] = clause Config.callsite['stmts'].append(next_fort_stmt) else: raise UserException( 'WARNING: callsite is not found') elif dname == 'write': if clause: stmt.write_state = tuple( c.strip() for c in clause.split(',')) if not hasattr(stmt, 'unknowns'): f2003_search_unknowns(stmt, stmt.f2003) if hasattr(stmt, 'unknowns'): for unk, req in stmt.unknowns.iteritems(): if req.state != ResState.RESOLVED: stmt.resolve(req) elif dname == 'exclude': next_fort_stmt = get_next_non_comment(stmt) if next_fort_stmt: next_fort_stmt.f2003.skip_search = True next_fort_stmt.f2003.after_exclude = True else: raise UserException( 'WARNING: exclude target is not found') elif dname == 'coverage': next_fort_stmt = get_next_non_comment(stmt) if next_fort_stmt: next_fort_stmt.f2003.after_coverage = True next_fort_stmt.f2003.coverage_name = clause else: raise UserException( 'WARNING: coverage target is not found') elif 'callsite' in directs: # if not match and within callsite if Config.callsite['stmts'] or not isinstance( stmt, Comment): Config.callsite['stmts'].append(stmt) elif 'callsite' in directs: # if not Comment Config.callsite['stmts'].append(stmt) else: # not in callsite if Config.callsite[ 'namepath'] and stmt.__class__ in executable_construct: names = [] kgutils.traverse(stmt.f2003, get_names, names) for name in names: if kgutils.match_namepath(Config.callsite['namepath'], kgutils.pack_exnamepath( stmt, name), internal=False): Config.kernel['name'] = name for _s, _d in api.walk(stmt): Config.callsite['stmts'].append(_s) return elif len(directs) > 0 and directs[-1] == 'callsite': Config.callsite['stmts'].append(stmt)
def __init__(self, srcpath, preprocess=True): import os.path from kgen_utils import run_shcmd from statements import Comment from block_statements import Module, Program # set default values self.tree = None self.srcpath = srcpath self.abspath = os.path.abspath(self.srcpath) # set source file format isfree = True isstrict = False if self.abspath in Config.source["file"].keys(): if Config.source["file"][self.abspath].has_key("isfree"): isfree = Config.source["file"][self.abspath]["isfree"] if Config.source["file"][self.abspath].has_key("isstrict"): isstrict = Config.source["file"][self.abspath]["isstrict"] else: if Config.source["isstrict"]: isstrict = Config.source["isstrict"] if Config.source["isfree"]: isfree = Config.source["isfree"] # prepare include paths and macro definitions path_src = [] macros_src = [] if Config.include["file"].has_key(self.abspath): path_src = Config.include["file"][self.abspath]["path"] + [os.path.dirname(self.abspath)] for k, v in Config.include["file"][self.abspath]["macro"].iteritems(): if v: macros_src.append("-D%s=%s" % (k, v)) else: macros_src.append("-D%s" % k) includes = "-I" + " -I".join(Config.include["path"] + path_src) macros_common = [] for k, v in Config.include["macro"].iteritems(): if v: macros_common.append("-D%s=%s" % (k, v)) else: macros_common.append("-D%s" % k) macros = " ".join(macros_common + macros_src) # execute preprocessing Logger.info("Reading %s" % self.srcpath, stdout=True) new_lines = [] with open(self.abspath, "r") as f: if preprocess: pp = Config.bin["pp"] if pp.endswith("fpp"): if isfree: srcfmt = " -free" else: srcfmt = " -fixed" flags = Config.bin["fpp_flags"] + srcfmt elif pp.endswith("cpp"): flags = Config.bin["cpp_flags"] else: raise UserException("Preprocessor is not either fpp or cpp") output, err, retcode = run_shcmd("%s %s %s %s" % (pp, flags, includes, macros), input=f.read()) prep = map(lambda l: "!KGEN" + l if l.startswith("#") else l, output.split("\n")) new_lines = self.handle_include(prep) else: new_lines = f.read().split("\n") # add include paths if Config.include["file"].has_key(self.abspath) and Config.include["file"][self.abspath].has_key("path"): include_dirs = Config.include["file"][self.abspath]["path"] + [os.path.dirname(self.abspath)] else: include_dirs = None # if self.abspath=='/glade/scratch/youngsun/kgen_system_test/branches/initial/MPAS-Release/src/framework/mpas_derived_types.F': # print '\n'.join(new_lines) # sys.exit() # import pdb ; pdb.set_trace() # fparse self.tree = parse( "\n".join(new_lines), ignore_comments=False, analyze=True, isfree=isfree, isstrict=isstrict, include_dirs=include_dirs, source_only=None, ) self.tree.prep = new_lines self.tree.used4genstate = False # if self.abspath=='/glade/scratch/youngsun/kgen_system_test/branches/initial/MPAS-Release/src/framework/mpas_derived_types.F': # print self.tree # sys.exit() # parse f2003 lineno = 0 linediff = 0 for stmt, depth in walk(self.tree, -1): stmt.parse_f2003() # rename reader.id self.tree.reader.id = self.abspath # collect module information for mod_name, mod_stmt in self.tree.a.module.iteritems(): if not State.modules.has_key(mod_name): State.modules[mod_name] = OrderedDict() # State.modules[mod_name]['num'] = State.mod_num # State.mod_num += 1 State.modules[mod_name]["stmt"] = mod_stmt State.modules[mod_name]["file"] = self State.modules[mod_name]["path"] = self.abspath # State.modules[mod_name]['extern'] = OrderedDict() # State.modules[mod_name]['extern']['names'] = [] # State.modules[mod_name]['extern']['typedecl_stmt'] = OrderedDict() # State.modules[mod_name]['extern']['tkdpat'] = [] # State.modules[mod_name]['mod_rw_var_depends'] = [] # State.modules[mod_name]['dtype'] = [] # collect program unit information for item in self.tree.content: if item.__class__ not in [Module, Comment, Program]: if item.reader.id not in State.program_units.keys(): State.program_units[item.reader.id] = [] State.program_units[item.reader.id].append(item) # create a tuple for file dependency State.srcfiles[self.abspath] = (self, [], [])
moddict={} fsdict={} ### find subroutine and function definitions ############################ for ffile in argset: #print '---- '+ffile+' start -----' reader = FortranFileReader(ffile) fform=False if(re.search('.F90',ffile)): fform=True reader.set_mode(isfree=fform,isstrict=False) parser=FortranParser(reader,ignore_comments=False) parser.parse() sstack=[] deptho=-1 inso=None for c in walk(parser.block): ins=c[0] depth=c[1] if(isinstance(ins, classes.Comment)): continue if(depth>deptho): sstack.append(inso) while len(sstack)>depth: sstack.pop() # print len(sstack),depth, type(ins), item.span, item.strline, item.label #, item.name # deptho=depth # inso=ins # continue frame2x= [x.__class__.__name__+':'+x.name for x in sstack[1:] if x.__class__.__name__ in ranktag] frame2='@'.join(frame2x) # if(isinstance(ins,classes.Type)): # print ins.__class__.__name__ #,item.strline # continue if( "item" in dir(ins)) :
def main(): version = [ 0, 1, '0' ] outdir = '.' retval = 0 varid = 0 Logger.info('Starting simplify_ifcond', stdout=True) try: # option parser parser = optparse.OptionParser(version='simplify_ifcond version %d.%d.%s'%tuple(version)) # common options parser.add_option("--outdir", dest="outdir", action='store', type='string', default='output', help="path to create outputs") parser.add_option("--add-ext", dest="ext", action='store', type='string', default=None, help="File extensions to parse") parser.add_option("-t", "--threshold", dest="threshold", action='store', type='int', default=20, help="Max number of identifiers if condition before simplifying.") opts, args = parser.parse_args() if len(args)<1: print 'ERROR: Target source folders are not provided.' print 'Usage: simplify_ifcond [options] <target folder path[, target folder path, ...]>' sys.exit(-1) # create output directory outpath = os.path.abspath(opts.outdir) if not os.path.exists(outpath): os.makedirs(outpath) outsrcpath = '%s/src'%outpath shutil.rmtree(outsrcpath) os.makedirs(outsrcpath) # walk through source directory tree for srcdir in args: abssrcpath = os.path.abspath(srcdir) for dirName, subdirList, fileList in os.walk(abssrcpath): relpath = os.path.relpath(dirName, start=abssrcpath) outfilepath = '%s/%s'%(outsrcpath, relpath.replace('.', '')) if not os.path.exists(outfilepath): os.makedirs(outfilepath) for srcfile in fileList: if any(srcfile.endswith(ext) for ext in file_exts): try: # read source file parsed = SrcFile(os.path.join(dirName, srcfile), preprocess=False) # create analysis container parstmts = [] # anlyze last_span = ( 1, 1 ) for stmt, depth in api.walk(parsed.tree): if isinstance(stmt, Comment): if stmt.item.span[0] >= last_span[0] and stmt.item.span[1] <= last_span[1]: stmt.ignore = True elif isinstance(stmt, (IfThen, If, ElseIf)): if ifcondcheck(stmt.f2003.items[0], opts.threshold): p = stmt.ancestors()[-1] if p not in parstmts: parstmts.append(p) if not hasattr(p, 'simplify_ifstmts'): p.simplify_ifstmts = [] if stmt not in p.simplify_ifstmts: p.simplify_ifstmts.append(stmt) last_span = stmt.item.span # modify for parstmt in parstmts: # add openmp directive add_specstmts(parstmt, len(parstmt.simplify_ifstmts)) # simplify for i, ifstmt in enumerate(parstmt.simplify_ifstmts): simplify(ifstmt, ifstmt.f2003.items[0], i) # generate modified source files if len(parstmts) > 0: lines = [] for stmt, depth in api.walk(parsed.tree): if hasattr(stmt, 'forced_lines'): lines.extend(stmt.forced_lines) elif not stmt.ignore: start = stmt.item.span[0]-1 end = stmt.item.span[1] for line in stmt.top.prep[start:end]: split = line.split() if len(split) > 2 and split[0].startswith('!KGEN#') and split[1].isdigit(): continue lines.append(line) with open(os.path.join(outfilepath, srcfile), 'w') as f: print 'Generating %s\n'%outfilepath f.write('\n'.join(lines)) f.write('\n') except Exception as e: #import pdb; pdb.set_trace() raise pass # switch source files if directed except UserException as e: print 'ERROR: %s'%str(e) Logger.info(e) #Logger.critical(e) retval = -1 except ProgramException as e: Logger.critical(e) retval = -1 except Exception as e: Logger.critical(e) retval = -1 finally: pass Logger.info('simplify_ifcond is finished.', stdout=True) return retval
def locate_callsite(cs_tree): from statements import Comment from block_statements import executable_construct import re def get_next_non_comment(stmt): if not stmt: return if not hasattr(stmt, 'parent'): return started = False for s in stmt.parent.content: if s == stmt: if not isinstance(s, Comment): return s started = True elif started: if not isinstance(s, Comment): return s def get_names(node, bag, depth): from Fortran2003 import Name if isinstance(node, Name) and not node.string in bag: bag.append(node.string) # collect directives directs = [] for stmt, depth in walk(cs_tree): if isinstance(stmt, Comment): line = stmt.item.comment.strip() match = re.match(r'^[c!*]\$kgen\s+(.+)$', line, re.IGNORECASE) if match: dsplit = match.group(1).split(' ', 1) dname = dsplit[0].strip() if len(dsplit) > 1: clause = dsplit[1].strip() else: clause = None if dname.startswith('begin_'): sname = dname[6:] directs.append(sname) State.kernel['name'] = clause elif dname.startswith('end_'): ename = dname[4:] if directs[-1] == ename: directs.pop() if ename == 'callsite': pass else: raise UserException( 'WARNING: Not supported KGEN directive: %s' % ename) else: raise UserException('Directive name mismatch: %s, %s' % (dname_stack[-1], ename)) elif dname == 'callsite': next_fort_stmt = get_next_non_comment(stmt) if next_fort_stmt: State.kernel['name'] = clause State.callsite['stmts'].append(next_fort_stmt) else: raise UserException('WARNING: callsite is not found') else: if Config.callsite[ 'namepath'] and stmt.__class__ in executable_construct: names = [] traverse(stmt.f2003, get_names, names) for name in names: if match_namepath(Config.callsite['namepath'], pack_exnamepath(stmt, name), internal=False): State.kernel['name'] = name for _s, _d in walk(stmt): State.callsite['stmts'].append(_s) return elif len(directs) > 0 and directs[-1] == 'callsite': State.callsite['stmts'].append(stmt) if len(State.callsite['stmts']) == 0: raise UserException('Can not find callsite')
def __init__(self, srcpath, preprocess=True): # set default values self.tree = None self.srcpath = srcpath self.realpath = os.path.realpath(self.srcpath) # set source file format isfree = None isstrict = None if self.realpath in Config.source['file'].keys(): if Config.source['file'][self.realpath].has_key('isfree'): isfree = Config.source['file'][self.realpath]['isfree'] if Config.source['file'][self.realpath].has_key('isstrict'): isstrict = Config.source['file'][self.realpath]['isstrict'] else: isstrict = Config.source['isstrict'] isfree = Config.source['isfree'] # prepare include paths and macro definitions path_src = [] macros_src = [] if Config.include['file'].has_key(self.realpath): path_src = Config.include['file'][self.realpath]['path'] + [ os.path.dirname(self.realpath) ] path_src = [path for path in path_src if len(path) > 0] for k, v in Config.include['file'][ self.realpath]['macro'].iteritems(): if v is not None: macros_src.append('-D%s=%s' % (k, v)) else: macros_src.append('-D%s' % k) if os.path.isfile(Config.mpi['header']): includes = [ '-I %s' % incpath for incpath in [os.path.dirname(Config.mpi['header'])] + Config.include['path'] + path_src ] else: includes = [ '-I %s' % incpath for incpath in Config.include['path'] + path_src ] macros_common = [] for k, v in Config.include['macro'].iteritems(): if v: macros_common.append('-D%s=%s' % (k, v)) else: macros_common.append('-D%s' % k) macros = ' '.join(macros_common + macros_src) # execute preprocessing logger.info('Reading %s' % self.srcpath) new_lines = [] with open(self.realpath, 'r') as f: if preprocess: pp = Config.bin['pp'] if pp.endswith('fpp'): if isfree is None or isfree: srcfmt = ' -free' else: srcfmt = ' -fixed' flags = Config.bin['fpp_flags'] + srcfmt elif pp.endswith('cpp'): flags = Config.bin['cpp_flags'] else: raise UserException( 'Preprocessor is not either fpp or cpp') output, err, retcode = kgutils.run_shcmd( '%s %s %s %s' % (pp, flags, ' '.join(includes), macros), input=f.read()) prep = map(lambda l: '!KGEN' + l if l.startswith('#') else l, output.split('\n')) new_lines = self.handle_include(prep) else: new_lines = f.read().split('\n') # add include paths include_dirs = Config.include['path'][:] if Config.include['file'].has_key( self.realpath) and Config.include['file'][ self.realpath].has_key('path'): include_dirs.extend(Config.include['file'][self.realpath]['path']) include_dirs.append(os.path.dirname(self.realpath)) # fparse self.tree = api.parse('\n'.join(new_lines), ignore_comments=False, analyze=True, isfree=isfree, \ isstrict=isstrict, include_dirs=include_dirs, source_only=None ) self.tree.prep = new_lines # parse f2003 lineno = 0 linediff = 0 for stmt, depth in api.walk(self.tree, -1): stmt.parse_f2003() # rename reader.id self.tree.reader.id = self.realpath # collect module information for mod_name, mod_stmt in self.tree.a.module.iteritems(): if not Config.modules.has_key(mod_name): Config.modules[mod_name] = collections.OrderedDict() Config.modules[mod_name]['stmt'] = mod_stmt Config.modules[mod_name]['file'] = self Config.modules[mod_name]['path'] = self.realpath # collect program unit information for item in self.tree.content: if item.__class__ not in [Module, Comment, Program]: if item.reader.id not in Config.program_units.keys(): Config.program_units[item.reader.id] = [] Config.program_units[item.reader.id].append(item) # create a tuple for file dependency Config.srcfiles[self.realpath] = (self, [], []) self.process_directive()
srcfiles = set(argset) ranktag = ["Program", "Subroutine", "Function", "Module", "Type", "Interface"] moddict = {} fsdict = {} ### find subroutine and function definitions ############################ for ffile in argset: # print '---- '+ffile+' start -----' reader = FortranFileReader(ffile) reader.set_mode(isfree=False, isstrict=False) parser = FortranParser(reader, ignore_comments=False) parser.parse() sstack = [] deptho = -1 inso = None for c in walk(parser.block): ins = c[0] depth = c[1] if (isinstance(ins, classes.Comment)): continue if (depth > deptho): sstack.append(inso) while len(sstack) > depth: sstack.pop() # print len(sstack),depth, type(ins), ins.item.span, ins.item.strline, ins.item.label #, ins.item.name # deptho=depth # inso=ins # continue frame2x = [ x.__class__.__name__ + ':' + x.name for x in sstack[1:] if x.__class__.__name__ in ranktag ] frame2 = '@'.join(frame2x)
def check_mode(): from kgen_utils import Config, run_shcmd from utils import module_file_extensions from api import parse, walk from statements import Comment from kgen_search import f2003_search_unknowns, SearchException import logging logger = logging.getLogger('kgen') # KGEN addition logger.setLevel(logging.WARNING) files = [] # collect source files for path in Config.check_mode: if os.path.basename(path).startswith('.'): continue if os.path.isdir(path): for root, dirnames, filenames in os.walk(os.path.abspath(path)): for filename in filenames: if os.path.basename(filename).startswith('.'): continue fname, fext = os.path.splitext(filename) if len(fext)>1 and fext.lower() in module_file_extensions: files.append(os.path.join(root, filename)) elif os.path.isfile(path): if os.path.isfile(path): files.append(os.path.abspath(path)) else: raise '%s is not a direcotory nor a file'%path # TODO: support #include cpp directive # parse source files for n, file in enumerate(files): print 'Reading(%d/%d): '%(n+1, len(files)), file # fsrc = open(file, 'rb') # prepare include paths and macro definitions path_src = [] macros_src = [] if Config.include['file'].has_key(self.abspath): path_src = Config.include['file'][self.abspath]['path']+[os.path.dirname(self.abspath)] for k, v in Config.include['file'][self.abspath]['macro'].iteritems(): if v: macros_src.append('-D%s=%s'%(k,v)) else: macros_src.append('-D%s'%k) includes = '-I'+' -I'.join(Config.include['path']+path_src) macros_common = [] for k, v in Config.include['macro'].iteritems(): if v: macros_common.append('-D%s=%s'%(k,v)) else: macros_common.append('-D%s'%k) macros = ' '.join(macros_common + macros_src) # execute preprocessing prep = Config.bin['pp'] if prep.endswith('fpp'): flags = Config.bin['fpp_flags'] elif prep.endswith('cpp'): flags = Config.bin['cpp_flags'] else: raise UserException('Preprocessor is not either fpp or cpp') output, err, retcode = run_shcmd('%s %s %s %s %s' % (prep, flags, includes, macros, file)) # convert the preprocessed for fparser prep = map(lambda l: '!KGEN'+l if l.startswith('#') else l, output.split('\n')) # fparse tree = parse('\n'.join(prep), ignore_comments=False, analyze=False, isfree=True, isstrict=False, \ include_dirs=None, source_only=None ) # parse f2003 Config.search['promote_exception'] = True lineno = 0 linediff = 0 for stmt, depth in walk(tree, -1): try: if isinstance(stmt, Comment) and stmt.item.comment.startswith('!KGEN#'): comment_split = stmt.item.comment.split(' ') lineno = int(comment_split[1]) stmt.item.span = ( 0, 0 ) else: if lineno>0: linediff = stmt.item.span[0] - lineno lineno = 0 stmt.item.span = ( stmt.item.span[0]-linediff, stmt.item.span[1]-linediff ) stmt.parse_f2003() if stmt.f2003.__class__ not in exclude_list: f2003_search_unknowns(stmt, stmt.f2003, gentype=KGGenType.KERNEL) except (NoMatchError, AttributeError) as e: if file not in not_parsed: not_parsed[file] = [] not_parsed[file].append(stmt) except NameError as e: errmsg = str(e) pos = errmsg.find('search_') if len(errmsg)>7 and pos>0: clsname = errmsg[pos+7:-16] #print "NOT SUPPORTED: '%s' Fortran statement is not supported yet"%clsname if file not in not_supported: not_supported[file] = [] not_supported[file].append((clsname, stmt.item.span[0])) except Exception as e: print 'WARNING: Following statement is not correctly parsed' print stmt print '' print '' print '********************' print '*** CHECK RESULT ***' print '********************' print '' print 'NOTE: KGEN may be able to extract kernel even though not all source code lines are parsed or supported.' print '' print '*** KGEN Parsing Error(s) ***' print '' for file, stmts in not_parsed.iteritems(): print file lines = [] for stmt in stmts: if hasattr(stmt, 'item'): lines.append('Near line # %d:'%stmt.item.span[0]) lines.append(stmt.tokgen()+'\n') else: lines.append(str(stmt)+'\n') print '\n'.join(lines), '\n' print '*** Not Supported Fortran Statement(s) ***' print '' for file, clsnames in not_supported.iteritems(): print file lines = [] for clsname, lineno in clsnames: lines.append("'%s' Fortran statment near line # %d"%(clsname, lineno)) print '\n'.join(lines), '\n' if len(not_parsed)==0 and len(not_supported)==0: print 'Current KGEN version can support all source code lines.'