def analyze_callsite(): from block_statements import EndStatement, Subroutine, Function, Interface from statements import SpecificBinding from kgen_search import f2003_search_unknowns # read source file that contains callsite stmt cs_file = SrcFile(Config.callsite['filepath']) #process_directive(cs_file.tree) if len(State.callsite['stmts']) == 0: raise UserException('Can not find callsite') # ancestors of callsite stmt ancs = State.callsite['stmts'][0].ancestors() # add geninfo for ancestors prevstmt = State.callsite['stmts'][0] prevname = None for anc in reversed(ancs): if not hasattr(anc, 'geninfo'): anc.geninfo = OrderedDict() if len(anc.content)>0 and isinstance(anc.content[-1], EndStatement) and \ not hasattr(anc.content[-1], 'geninfo'): anc.content[-1].geninfo = OrderedDict() if prevname: dummy_req = ResState(KGGenType.STATE_IN, KGName(prevname), None, [anc]) dummy_req.res_stmts = [prevstmt] anc.check_spec_stmts(dummy_req.uname, dummy_req) if hasattr(anc, 'name'): prevname = anc.name else: prevname = None prevstmt = anc # populate parent block parameters State.parentblock['stmt'] = ancs[-1] # populate top block parameters State.topblock['stmt'] = ancs[0] for cs_stmt in State.callsite['stmts']: #resolve cs_stmt f2003_search_unknowns(cs_stmt, cs_stmt.f2003) for uname, req in cs_stmt.unknowns.iteritems(): cs_stmt.resolve(req) if not req.res_stmts: raise ProgramException('Resolution fail.') # update state info of callsite and its upper blocks update_state_info(State.parentblock['stmt']) # update state info of modules for modname, moddict in State.modules.iteritems(): modstmt = moddict['stmt'] if modstmt != State.topblock['stmt']: update_state_info(moddict['stmt'])
def check_mode(): from kgen_utils import Config, exec_cmd from utils import module_file_extensions from api import parse, walk from statements import Comment from kgen_search import f2003_search_unknowns, SearchException import logging logger = logging.getLogger('kgen') # KGEN addition logger.setLevel(logging.WARNING) files = [] # collect source files for path in Config.check_mode: if os.path.basename(path).startswith('.'): continue if os.path.isdir(path): for root, dirnames, filenames in os.walk(os.path.abspath(path)): for filename in filenames: if os.path.basename(filename).startswith('.'): continue fname, fext = os.path.splitext(filename) if len(fext) > 1 and fext.lower( ) in module_file_extensions: files.append(os.path.join(root, filename)) elif os.path.isfile(path): if os.path.isfile(path): files.append(os.path.abspath(path)) else: raise '%s is not a direcotory nor a file' % path # TODO: support #include cpp directive # parse source files for n, file in enumerate(files): print 'Reading(%d/%d): ' % (n + 1, len(files)), file # fsrc = open(file, 'rb') # prepare include paths and macro definitions path_src = [] macros_src = [] if Config.include['file'].has_key(self.abspath): path_src = Config.include['file'][self.abspath]['path'] + [ os.path.dirname(self.abspath) ] for k, v in Config.include['file'][ self.abspath]['macro'].iteritems(): if v: macros_src.append('-D%s=%s' % (k, v)) else: macros_src.append('-D%s' % k) includes = '-I' + ' -I'.join(Config.include['path'] + path_src) macros_common = [] for k, v in Config.include['macro'].iteritems(): if v: macros_common.append('-D%s=%s' % (k, v)) else: macros_common.append('-D%s' % k) macros = ' '.join(macros_common + macros_src) # execute preprocessing prep = Config.bin['pp'] if prep.endswith('fpp'): flags = Config.bin['fpp_flags'] elif prep.endswith('cpp'): flags = Config.bin['cpp_flags'] else: raise UserException('Preprocessor is not either fpp or cpp') output = exec_cmd('%s %s %s %s %s' % (prep, flags, includes, macros, file)) # convert the preprocessed for fparser prep = map(lambda l: '!KGEN' + l if l.startswith('#') else l, output.split('\n')) # fparse tree = parse('\n'.join(prep), ignore_comments=False, analyze=False, isfree=True, isstrict=False, \ include_dirs=None, source_only=None ) # parse f2003 Config.search['promote_exception'] = True lineno = 0 linediff = 0 for stmt, depth in walk(tree, -1): try: if isinstance( stmt, Comment) and stmt.item.comment.startswith('!KGEN#'): comment_split = stmt.item.comment.split(' ') lineno = int(comment_split[1]) stmt.item.span = (0, 0) else: if lineno > 0: linediff = stmt.item.span[0] - lineno lineno = 0 stmt.item.span = (stmt.item.span[0] - linediff, stmt.item.span[1] - linediff) stmt.parse_f2003() if stmt.f2003.__class__ not in exclude_list: f2003_search_unknowns(stmt, stmt.f2003, gentype=KGGenType.KERNEL) except (NoMatchError, AttributeError) as e: if file not in not_parsed: not_parsed[file] = [] not_parsed[file].append(stmt) except NameError as e: errmsg = str(e) pos = errmsg.find('search_') if len(errmsg) > 7 and pos > 0: clsname = errmsg[pos + 7:-16] #print "NOT SUPPORTED: '%s' Fortran statement is not supported yet"%clsname if file not in not_supported: not_supported[file] = [] not_supported[file].append((clsname, stmt.item.span[0])) except Exception as e: print 'WARNING: Following statement is not correctly parsed' print stmt print '' print '' print '********************' print '*** CHECK RESULT ***' print '********************' print '' print 'NOTE: KGEN may be able to extract kernel even though not all source code lines are parsed or supported.' print '' print '*** KGEN Parsing Error(s) ***' print '' for file, stmts in not_parsed.iteritems(): print file lines = [] for stmt in stmts: if hasattr(stmt, 'item'): lines.append('Near line # %d:' % stmt.item.span[0]) lines.append(stmt.tokgen() + '\n') else: lines.append(str(stmt) + '\n') print '\n'.join(lines), '\n' print '*** Not Supported Fortran Statement(s) ***' print '' for file, clsnames in not_supported.iteritems(): print file lines = [] for clsname, lineno in clsnames: lines.append("'%s' Fortran statment near line # %d" % (clsname, lineno)) print '\n'.join(lines), '\n' if len(not_parsed) == 0 and len(not_supported) == 0: print 'Current KGEN version can support all source code lines.'
def analyze(): from block_statements import EndStatement, Subroutine, Function, Interface from statements import SpecificBinding from kgen_search import f2003_search_unknowns # read source file that contains callsite stmt cs_file = SrcFile(Config.callsite['filepath']) locate_callsite(cs_file.tree) # ancestors of callsite stmt ancs = State.callsite['stmts'][0].ancestors() # add geninfo for ancestors prevstmt = State.callsite['stmts'][0] prevname = None for anc in reversed(ancs): if not hasattr(anc, 'geninfo'): anc.geninfo = OrderedDict() if len(anc.content)>0 and isinstance(anc.content[-1], EndStatement) and \ not hasattr(anc.content[-1], 'geninfo'): anc.content[-1].geninfo = OrderedDict() if prevname: dummy_req = ResState(KGGenType.STATE_IN, KGName(prevname), None, [anc]) dummy_req.res_stmts = [ prevstmt ] anc.check_spec_stmts(dummy_req.uname, dummy_req) if hasattr(anc, 'name'): prevname = anc.name else: prevname = None prevstmt = anc # populate parent block parameters State.parentblock['stmt'] = ancs[-1] #State.parentblock['expr'] = State.parentblock['stmt'].f2003 #collect_args_from_subpstmt(State.parentblock['stmt'], State.parentblock['dummy_arg']) # populate top block parameters #State.topblock['file'] = cs_file #State.topblock['path'] = cs_file.abspath State.topblock['stmt'] = ancs[0] #State.topblock['expr'] = State.topblock['stmt'].f2003 for cs_stmt in State.callsite['stmts']: #resolve cs_stmt f2003_search_unknowns(cs_stmt, cs_stmt.f2003) for uname, req in cs_stmt.unknowns.iteritems(): cs_stmt.resolve(req) if not req.res_stmts: raise ProgramException('Resolution fail.') # update state info of callsite and its upper blocks update_state_info(State.parentblock['stmt']) # update state info of modules for modname, moddict in State.modules.iteritems(): modstmt = moddict['stmt'] if modstmt != State.topblock['stmt']: update_state_info(moddict['stmt'])
def check_mode(): from kgen_utils import Config, run_shcmd from utils import module_file_extensions from api import parse, walk from statements import Comment from kgen_search import f2003_search_unknowns, SearchException import logging logger = logging.getLogger('kgen') # KGEN addition logger.setLevel(logging.WARNING) files = [] # collect source files for path in Config.check_mode: if os.path.basename(path).startswith('.'): continue if os.path.isdir(path): for root, dirnames, filenames in os.walk(os.path.abspath(path)): for filename in filenames: if os.path.basename(filename).startswith('.'): continue fname, fext = os.path.splitext(filename) if len(fext)>1 and fext.lower() in module_file_extensions: files.append(os.path.join(root, filename)) elif os.path.isfile(path): if os.path.isfile(path): files.append(os.path.abspath(path)) else: raise '%s is not a direcotory nor a file'%path # TODO: support #include cpp directive # parse source files for n, file in enumerate(files): print 'Reading(%d/%d): '%(n+1, len(files)), file # fsrc = open(file, 'rb') # prepare include paths and macro definitions path_src = [] macros_src = [] if Config.include['file'].has_key(self.abspath): path_src = Config.include['file'][self.abspath]['path']+[os.path.dirname(self.abspath)] for k, v in Config.include['file'][self.abspath]['macro'].iteritems(): if v: macros_src.append('-D%s=%s'%(k,v)) else: macros_src.append('-D%s'%k) includes = '-I'+' -I'.join(Config.include['path']+path_src) macros_common = [] for k, v in Config.include['macro'].iteritems(): if v: macros_common.append('-D%s=%s'%(k,v)) else: macros_common.append('-D%s'%k) macros = ' '.join(macros_common + macros_src) # execute preprocessing prep = Config.bin['pp'] if prep.endswith('fpp'): flags = Config.bin['fpp_flags'] elif prep.endswith('cpp'): flags = Config.bin['cpp_flags'] else: raise UserException('Preprocessor is not either fpp or cpp') output, err, retcode = run_shcmd('%s %s %s %s %s' % (prep, flags, includes, macros, file)) # convert the preprocessed for fparser prep = map(lambda l: '!KGEN'+l if l.startswith('#') else l, output.split('\n')) # fparse tree = parse('\n'.join(prep), ignore_comments=False, analyze=False, isfree=True, isstrict=False, \ include_dirs=None, source_only=None ) # parse f2003 Config.search['promote_exception'] = True lineno = 0 linediff = 0 for stmt, depth in walk(tree, -1): try: if isinstance(stmt, Comment) and stmt.item.comment.startswith('!KGEN#'): comment_split = stmt.item.comment.split(' ') lineno = int(comment_split[1]) stmt.item.span = ( 0, 0 ) else: if lineno>0: linediff = stmt.item.span[0] - lineno lineno = 0 stmt.item.span = ( stmt.item.span[0]-linediff, stmt.item.span[1]-linediff ) stmt.parse_f2003() if stmt.f2003.__class__ not in exclude_list: f2003_search_unknowns(stmt, stmt.f2003, gentype=KGGenType.KERNEL) except (NoMatchError, AttributeError) as e: if file not in not_parsed: not_parsed[file] = [] not_parsed[file].append(stmt) except NameError as e: errmsg = str(e) pos = errmsg.find('search_') if len(errmsg)>7 and pos>0: clsname = errmsg[pos+7:-16] #print "NOT SUPPORTED: '%s' Fortran statement is not supported yet"%clsname if file not in not_supported: not_supported[file] = [] not_supported[file].append((clsname, stmt.item.span[0])) except Exception as e: print 'WARNING: Following statement is not correctly parsed' print stmt print '' print '' print '********************' print '*** CHECK RESULT ***' print '********************' print '' print 'NOTE: KGEN may be able to extract kernel even though not all source code lines are parsed or supported.' print '' print '*** KGEN Parsing Error(s) ***' print '' for file, stmts in not_parsed.iteritems(): print file lines = [] for stmt in stmts: if hasattr(stmt, 'item'): lines.append('Near line # %d:'%stmt.item.span[0]) lines.append(stmt.tokgen()+'\n') else: lines.append(str(stmt)+'\n') print '\n'.join(lines), '\n' print '*** Not Supported Fortran Statement(s) ***' print '' for file, clsnames in not_supported.iteritems(): print file lines = [] for clsname, lineno in clsnames: lines.append("'%s' Fortran statment near line # %d"%(clsname, lineno)) print '\n'.join(lines), '\n' if len(not_parsed)==0 and len(not_supported)==0: print 'Current KGEN version can support all source code lines.'