def testClearHistory(self): """Tests a simple CppIncludeStd constructor and clearHistory().""" myObj = IncludeHandler.CppIncludeStd([], []) myObj.validateCpStack() myObj.clearHistory() self.assertEqual(myObj._cpStack, []) self.assertEqual(myObj._findLogic, [])
def setUp(self): self._incSim = IncludeHandler.CppIncludeStringIO( [ os.path.join('usr'), ], [ os.path.join('sys'), ], u"""Contents of src/spam.c """, { os.path.join('usr', 'spam.hp'): u"""User, spam.hp """, os.path.join('sys', 'spam.h'): u"""System, spam.h """, os.path.join('src', 'spam.h'): u"""Current place, spam.h """, }) self._incSim.validateCpStack() self.assertEqual([], self._incSim.cpStack) self.assertEqual(0, self._incSim.cpStackSize) # Load the initial translation unit f = self._incSim.initialTu('src/spam.c') self._incSim.validateCpStack() self.assertEqual([ 'src', ], self._incSim.cpStack) self.assertEqual(1, self._incSim.cpStackSize) self.assertNotEqual(None, f) # Test the return value self.assertEqual('Contents of src/spam.c\n', f.fileObj.read()) self.assertEqual('src/spam.c', f.filePath) self.assertEqual('src', f.currentPlace) self.assertEqual('TU', f.origin)
def main(): print('Processing:', sys.argv[1]) myH = IncludeHandler.CppIncludeStdOs( theUsrDirs=[ '../usr', ], theSysDirs=[ '../sys', ], ) myLex = PpLexer.PpLexer(sys.argv[1], myH) tu = ''.join(tok.t for tok in myLex.ppTokens(minWs=True)) print(' Translation Unit '.center(75, '=')) print(tu) print(' Translation Unit END '.center(75, '=')) print() print(' File Include Graph '.center(75, '=')) print(repr(myLex.fileIncludeGraphRoot)) print(myLex.fileIncludeGraphRoot) print(' File Include Graph END '.center(75, '=')) print() print(' Conditional Compilation Graph '.center(75, '=')) print(myLex.condCompGraph) print(' Conditional Compilation Graph END '.center(75, '=')) print() print(' Macro Environment '.center(75, '=')) print(myLex.macroEnvironment) print(' Macro Environment END '.center(75, '=')) print() print(' Macro History '.center(75, '=')) print(myLex.macroEnvironment.macroHistory(incEnv=False, onlyRef=False)) print(' Macro History END '.center(75, '='))
def main(): try: print 'Processing:', sys.argv[1] myH = IncludeHandler.CppIncludeStdOs( theUsrDirs=[ '../usr', ], theSysDirs=[ '../sys', ], ) myLex = PpLexer.PpLexer(sys.argv[1], myH) m = hashlib.md5() for tok in myLex.ppTokens(minWs=True, incCond=False): m.update(tok.t) #print tok #print tok.t, #print myLex.condState #print 'Bad File: %s, line %d, col %d' % (myLex.fileName, myLex.lineNum, myLex.colNum) #print myLex.fileStack #print #print myLex.fileIncludeGraphRoot #print #print myLex.definedMacros print print myLex.macroEnvironment.macroHistory(onlyRef=False) print print 'Checksum is: %s' % m.hexdigest() print print myLex._tokCountStk except ExceptionCpip, err: print 'Ooops: %s' % err
def test_initialTuFails(self): """Tests initialTu() is not implemented.""" myObj = IncludeHandler.CppIncludeStd([], []) self.assertRaises( NotImplementedError, myObj.initialTu, 'NonExistentFile', )
def main(): print('Processing:', sys.argv[1]) myH = IncludeHandler.CppIncludeStdOs( theUsrDirs=['../usr',], theSysDirs=['../sys',], ) myLex = PpLexer.PpLexer(sys.argv[1], myH) tu = ''.join(tok.t for tok in myLex.ppTokens(minWs=True)) print(repr(myLex.fileIncludeGraphRoot))
def setUp(self): self._incSim = IncludeHandler.CppIncludeStringIO( self._pathsUsr, self._pathsSys, self._initialTuContents, self._incFileMap, ) self._incSim.validateCpStack() self.assertEqual([], self._incSim.cpStack) self.assertEqual(0, self._incSim.cpStackSize)
def main(): print('Processing:', sys.argv[1]) myH = IncludeHandler.CppIncludeStdOs( theUsrDirs=[ 'proj/usr', ], theSysDirs=[ 'proj/sys', ], ) myLex = PpLexer.PpLexer(sys.argv[1], myH)
def main(): print('Processing:', sys.argv[1]) myH = IncludeHandler.CppIncludeStdOs( theUsrDirs=[ 'proj/usr', ], theSysDirs=[ 'proj/sys', ], ) myLex = PpLexer.PpLexer(sys.argv[1], myH) for tok in myLex.ppTokens(): print(tok.t, end=' ')
def main(): print('Processing:', sys.argv[1]) myH = IncludeHandler.CppIncludeStdOs( theUsrDirs=[ '../usr', ], theSysDirs=[ '../sys', ], ) myLex = PpLexer.PpLexer(sys.argv[1], myH) tu = ''.join(tok.t for tok in myLex.ppTokens(minWs=True)) myVis = FileIncludeGraph.FigVisitorTree(MyVisitorTreeNode) myLex.fileIncludeGraphRoot.acceptVisitor(myVis) myTree = myVis.tree() print(myTree)
def setUp(self): self._incSim = IncludeHandler.CppIncludeStringIO( theUsrDirs=[ os.path.join('usr'), os.path.join('usr', 'inc'), ], theSysDirs=[ os.path.join('sys'), os.path.join('sys', 'inc'), ], theInitialTuContent=u"""Contents of src/spam.c """, theFilePathToContent={ # spam.h os.path.join('usr', 'inc', 'spam.h'): u"""User, include, spam.h """, os.path.join('usr', 'spam.h'): u"""User, spam.h """, os.path.join('sys', 'spam.h'): u"""System, spam.h """, os.path.join('sys', 'inc', 'spam.h'): u"""System, include, spam.h """, # no_next.h os.path.join('usr', 'no_next.h'): u"""User, no_next.h """, os.path.join('sys', 'no_next.h'): u"""System, no_next.h """, }) self._incSim.validateCpStack() self.assertEqual([], self._incSim.cpStack) self.assertEqual(0, self._incSim.cpStackSize) # Load the initial translation unit f = self._incSim.initialTu(u'src/spam.c') self._incSim.validateCpStack() self.assertEqual([ 'src', ], self._incSim.cpStack) self.assertEqual(1, self._incSim.cpStackSize) self.assertNotEqual(None, f) # Test the return value self.assertEqual('Contents of src/spam.c\n', f.fileObj.read()) self.assertEqual('src/spam.c', f.filePath) self.assertEqual('src', f.currentPlace) self.assertEqual('TU', f.origin)
def setUp(self): self._incSim = IncludeHandler.CppIncludeStringIO( [ os.path.join('usr'), ], [ os.path.join('sys'), ], u"""Contents of src/spam.c """, { os.path.join('usr', 'spam.hp'): """User, spam.hp """, os.path.join('sys', 'spam.h'): """System, spam.h """, }) self.assertEqual(False, self._incSim.canInclude()) self.assertRaises(IncludeHandler.ExceptionCppInclude, self._incSim.endInclude) try: self._incSim.currentPlace self.fail( 'IncludeHandler.ExceptionCppInclude not raised for currentPlace' ) except IncludeHandler.ExceptionCppInclude: pass self._incSim.validateCpStack() self.assertEqual([], self._incSim.cpStack) self.assertEqual(0, self._incSim.cpStackSize) # Load the initial translation unit f = self._incSim.initialTu('src/spam.c') self._incSim.validateCpStack() self.assertEqual([ 'src', ], self._incSim.cpStack) self.assertEqual(1, self._incSim.cpStackSize) self.assertNotEqual(None, f) # Test the return value self.assertEqual('Contents of src/spam.c\n', f.fileObj.read()) self.assertEqual('src/spam.c', f.filePath) self.assertEqual('src', f.currentPlace) self.assertEqual('TU', f.origin) self.assertRaises(IncludeHandler.ExceptionCppInclude, self._incSim.finalise)
def preProcessForIncludes(theItu, incUsr, incSys, theDefineS, preIncS, keepGoing, ignorePragma): """Pre-process a file for included files.""" myIncH = IncludeHandler.CppIncludeStdOs( theUsrDirs=incUsr or [], theSysDirs=incSys or [], ) myPreIncFiles = [] # Add macros in psuedo pre-include if theDefineS: myStr = '\n'.join( ['#define ' + ' '.join(d.split('=')) for d in theDefineS]) + '\n' myPreIncFiles = [ io.StringIO(myStr), ] myPreIncFiles.extend([open(f) for f in preIncS]) myDiag = None if keepGoing: myDiag = CppDiagnostic.PreprocessDiagnosticKeepGoing() myPh = None if ignorePragma: myPh = PragmaHandler.PragmaHandlerNull() # Create the lexer. myLexer = PpLexer.PpLexer( theItu, myIncH, preIncFiles=myPreIncFiles, diagnostic=myDiag, pragmaHandler=myPh, ) logging.info('Preprocessing TU: %s' % theItu) for t in myLexer.ppTokens(): pass logging.info('Preprocessing TU done.') retVal = retIncludedFileSet(myLexer) # Remove any artificial files try: retVal.remove(PpLexer.UNNAMED_FILE_NAME) except KeyError: pass return retVal
def tree_codes(file_name): report = [] #print('Processing:', ) myH = IncludeHandler.CppIncludeStdOs( theUsrDirs=[ 'proj/usr', ], theSysDirs=[ 'proj/sys', ], ) myLex = PpLexer.PpLexer(file_name, myH) stack = [] for tok in myLex.ppTokens(): if tok.t == '\n': code = ("".join(stack)) if re.match(r'^\s+$', code): pass else: if code: # macro name # class name # class of node # arguments? #print "CODE(%s)"% code; m = re.match(r'DEFTREECODE\((\w+),\"(\w+)",(\w+),(\d)\)', code) report.append(m.groups()) stack = [] else: if tok.t == ' ': pass elif re.match(r'^[\s\n\t]*$', tok.t): pass else: #print "TOK(%s)" % tok.t stack.append(tok.t) return report
def main(): print('Processing:', sys.argv[1]) myH = IncludeHandler.CppIncludeStdOs( theUsrDirs=[ '../usr', ], theSysDirs=[ '../sys', ], ) myLex = PpLexer.PpLexer(sys.argv[1], myH) for tok in myLex.ppTokens(minWs=True): # print(tok.t, end=' ') # print(myLex.condState) # print(myLex.fileStack) # print(myLex.fileLineCol) print(myLex.macroEnvironment) print(' File Include Graph '.center(75, '=')) print(myLex.fileIncludeGraphRoot) print(' File Include Graph END '.center(75, '=')) print(' Macro Environment '.center(75, '=')) print(myLex.macroEnvironment) print(' Macro Environment END '.center(75, '='))
def testConstructor(self): """Tests a simple CppIncludeStd constructor.""" myObj = IncludeHandler.CppIncludeStd([], []) myObj.validateCpStack()
def main(): """Processes command line to preprocess a file or a directory.""" program_version = "v%s" % __version__ program_shortdesc = __import__('__main__').__doc__.split("\n")[1] program_license = """%s Created by Paul Ross on %s. Copyright 2008-2015. All rights reserved. Licensed under GPL 2.0 USAGE """ % (program_shortdesc, str(__date__)) parser = argparse.ArgumentParser( description=program_license, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( "-c", action="store_true", dest="plot_conditional", default=False, help= "Add conditionally included files to the plots. [default: %(default)s]" ) parser.add_argument("-d", "--dump", action="append", dest="dump", default=[], help="""Dump output, additive. Can be: C - Conditional compilation graph. F - File names encountered and their count. I - Include graph. M - Macro environment. T - Token count. R - Macro dependencies as an input to DOT. [default: %(default)s]""") parser.add_argument( "-g", "--glob", type=str, dest="glob", default="*.*", help= "Pattern match to use when processing directories. [default: %(default)s]" ) parser.add_argument("--heap", action="store_true", dest="heap", default=False, help="Profile memory usage. [default: %(default)s]") parser.add_argument( "-j", "--jobs", type=int, dest="jobs", default=0, help="""Max simultaneous processes when pre-processing directories. Zero uses number of native CPUs [%d]. 1 means no multiprocessing.""" % multiprocessing.cpu_count() \ + " [default: %(default)s]" ) parser.add_argument("-k", "--keep-going", action="store_true", dest="keep_going", default=False, help="Keep going. [default: %(default)s]") parser.add_argument( "-l", "--loglevel", type=int, dest="loglevel", default=30, help="Log Level (debug=10, info=20, warning=30, error=40, critical=50)" \ " [default: %(default)s]" ) parser.add_argument("-o", "--output", type=str, dest="output", default="out", help="Output directory. [default: %(default)s]") parser.add_argument( "-p", action="store_true", dest="ignore_pragma", default=False, help="Ignore pragma statements. [default: %(default)s]") parser.add_argument( "-r", "--recursive", action="store_true", dest="recursive", default=False, help="Recursively process directories. [default: %(default)s]") parser.add_argument( "-t", "--dot", action="store_true", dest="include_dot", default=False, help="""Write an DOT include dependency table and execute DOT on it to create a SVG file. [default: %(default)s]""") parser.add_argument( "-G", action="store_true", dest="gcc_extensions", default=False, help= """Support GCC extensions. Currently only #include_next. [default: %(default)s]""" ) parser.add_argument(dest="path", nargs=1, help="Path to source file.") Cpp.addStandardArguments(parser) args = parser.parse_args() # print(' ARGS '.center(75, '-')) # print(args) # print(' END: ARGS '.center(75, '-')) clkStart = time.clock() # Initialise logging etc. inPath = args.path[0] if args.jobs != 1 and os.path.isdir(inPath): # Multiprocessing logFormat = '%(asctime)s %(levelname)-8s [%(process)5d] %(message)s' else: logFormat = '%(asctime)s %(levelname)-8s %(message)s' logging.basicConfig( level=args.loglevel, format=logFormat, # datefmt='%y-%m-%d % %H:%M:%S', stream=sys.stdout) # Memory usage dump if args.heap: try: from guppy import hpy except ImportError: print('Can not profile memory as you do not have guppy installed:' \ ' http://guppy-pe.sourceforge.net/') args.heap = False # Start memory profiling if requested if args.heap: myHeap = hpy() myHeap.setrelheap() else: myHeap = None # Create objects to pass to pre-processor myIncH = IncludeHandler.CppIncludeStdOs( theUsrDirs=args.incUsr or [], theSysDirs=args.incSys or [], ) preDefMacros = {} if args.predefines: for d in args.predefines: _tup = d.split('=') if len(_tup) == 2: preDefMacros[_tup[0]] = _tup[1] + '\n' elif len(_tup) == 1: preDefMacros[_tup[0]] = '\n' else: raise ValueError('Can not read macro definition: %s' % d) # Create the job specification jobSpec = MainJobSpec( incHandler=myIncH, preDefMacros=preDefMacros, preIncFiles=Cpp.predefinedFileObjects(args), diagnostic=CppDiagnostic.PreprocessDiagnosticKeepGoing() if args.keep_going else None, pragmaHandler=PragmaHandler.PragmaHandlerNull() if args.ignore_pragma else None, keepGoing=args.keep_going, conditionalLevel=2 if args.plot_conditional else 0, dumpList=args.dump, helpMap=retOptionMap(parser, args), includeDOT=args.include_dot, cmdLine=' '.join(sys.argv), gccExtensions=args.gcc_extensions, ) if os.path.isfile(inPath): preprocessFileToOutput(inPath, args.output, jobSpec) writeIndexHtml([inPath], args.output, jobSpec) elif os.path.isdir(inPath): preprocessDirToOutput( inPath, args.output, jobSpec, globMatch=args.glob, recursive=args.recursive, numJobs=args.jobs, ) else: logging.fatal('%s is neither a file or a directory!' % inPath) return 1 if args.heap and myHeap is not None: print('Dump of heap:') h = myHeap.heap() print(h) print() print('Dump of heap byrcs:') print(h.byrcs) print() clkExec = time.clock() - clkStart print('CPU time = %8.3f (S)' % clkExec) print('Bye, bye!') return 0
def main(argv=None): """Command line options.""" if argv is None: argv = sys.argv else: sys.argv.extend(argv) program_name = os.path.basename(sys.argv[0]) program_version = "v%s" % __version__ program_build_date = str(__updated__) program_version_message = '%%(prog)s %s (%s)' % (program_version, program_build_date) program_shortdesc = __import__('__main__').__doc__.split("\n")[1] program_license = """%s Created by Paul Ross on %s. Copyright 2015. All rights reserved. Licensed under the GPL License 2.0 USAGE """ % (program_shortdesc, str(__date__)) # Setup argument parser parser = argparse.ArgumentParser(description=program_license, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("-v", "--verbose", dest="verbose", action="count", default=0, help="set verbosity level [default: %(default)s]") parser.add_argument("-t", '--tokens', dest="tokens", action="store_true", help="Show actual preprocessing tokens.") parser.add_argument('-V', '--version', action='version', version=program_version_message) parser.add_argument(dest="path", metavar="path", nargs='?', help="Paths to source file. If absent then stdin is processed." " [default: %(default)s]") # cpp like options parser.add_argument("-d", dest="macroOptions", action='append', default=[], help="Pre-processor options M, D and N." " [default: %(default)s]") parser.add_argument("-E", dest="preprocess", action="store_true", required=True, help="Pre-process, required.") Cpp.addStandardArguments(parser) args = parser.parse_args() if args.path is None: # stdin myIncH = IncludeHandler.CppIncludeStdin( theUsrDirs=args.incUsr or [], theSysDirs=args.incSys or [], ) ituName = 'stdin' else: myIncH = IncludeHandler.CppIncludeStdOs( theUsrDirs=args.incUsr or [], theSysDirs=args.incSys or [], ) ituName = args.path _processFile(ituName, myIncH, Cpp.stdPredefinedMacros(args), Cpp.predefinedFileObjects(args), args.tokens, args.macroOptions) return 0
def setUp(self): self._incSim = IncludeHandler.CppIncludeStdin([], [])
def test_searchFileFails(self): """Tests _searchFile() is not implemented.""" myObj = IncludeHandler.CppIncludeStd([], []) self.assertRaises(NotImplementedError, myObj._searchFile, 'NonExistentFile', os.curdir)