def processFiles(glbDataParser, htmlGen, files): for file in files: for zwrFile in glbDataParser.allFiles[file]['path']: logger.progress("Parsing file: %s at %s" % (file, zwrFile)) glbDataParser.parseZWRGlobalFileBySchemaV2(zwrFile, file) htmlGen.outputFileManDataAsHtml(file, glbDataParser) glbDataParser.outFileManData.pop(file) gc.collect()
def outputAllPackageDependency(crossRef, outputFile): dependents = {} depends = {} routines = {} files = {} fields = {} for pkg in itervalues(crossRef.getAllPackages()): pkgName = pkg.getName() logger.progress("Processing package %s" % pkgName) routines[pkgName] = len(pkg.getAllRoutines()) numFiles = 0 numFields = 0 allGlobals = pkg.getAllGlobals() for globalVar in itervalues(allGlobals): if globalVar.isFileManFile(): numFiles += 1 allFields = globalVar.getAllFileManFields() if allFields: numFields += len(allFields) files[pkgName] = numFiles fields[pkgName] = numFields # Collect a set of dependents / depends for each package depends[pkgName] = set() for depPkgs in [pkg.getPackageRoutineDependencies(), pkg.getPackageGlobalDependencies(), pkg.getPackageFileManFileDependencies(), pkg.getPackageFileManDbCallDependencies()]: for depPkg in depPkgs: depPkgName = depPkg.getName() if depPkgName == pkgName: # Current package, nothing to do continue else: # Add to dependency set depends[pkgName].add(depPkgName) # Now let's add the current package as a dependent if depPkgName not in dependents: dependents[depPkgName] = set() dependents[depPkgName].add(pkgName) # Build json output outJson = [] for pkgName in depends: pkgjson = {'name': pkgName, "depends": list(depends[pkgName])} if pkgName in dependents: pkgjson['dependents'] = list(dependents[pkgName]) else: pkgjson['dependents'] = [] pkgjson['routines'] = routines[pkgName] pkgjson['files'] = files[pkgName] pkgjson['fields'] = fields[pkgName] outJson.append(pkgjson) # Write json file with open(outputFile, "w") as output: json.dump(outJson, output)
def processFiles(glbDataParser, htmlGen, files): for file in files: for zwrFile in glbDataParser.allFiles[file]['path']: logger.progress("Parsing file: %s at %s" % (file, zwrFile)) glbDataParser.parseZWRGlobalFileBySchemaV2(zwrFile, file) htmlGen.outputFileManDataAsHtml(file, glbDataParser) # Pop out everything that isn't the package file. if file != "9.4": glbDataParser.outFileManData.pop(file) gc.collect()
def generateGraphs(self): logger.progress("Generate Package Dependencies graphs") self.generatePackageDependenciesGraph() logger.progress("Generate Package Dependents graphs") self.generatePackageDependentsGraph() logger.progress("Generate Routine Call graphs") self.generateRoutineCallGraph() logger.progress("Generate Routine Caller graphs") self.generateRoutineCallerGraph() logger.progress("Generate Color Legend") self.generateColorLegend()
def run(args): logger.progress("Parsing ICR JSON file....") icrJsonFile = os.path.abspath(args.icrJsonFile) parsedICRJSON = parseICRJson(icrJsonFile) doxDir = os.path.join(args.patchRepositDir, 'Utilities/Dox') crossRef = CrossReferenceBuilder().buildCrossReferenceWithArgs( args, icrJson=parsedICRJSON, inputTemplateDeps=readIntoDictionary(args.inputTemplateDep), sortTemplateDeps=readIntoDictionary(args.sortTemplateDep), printTemplateDeps=readIntoDictionary(args.printTemplateDep)) logger.progress("Starting generating graphs....") graphGenerator = GraphGenerator(crossRef, args.outDir, doxDir, args.dot) graphGenerator.generateGraphs()
def parseFileManDbJSONFile(self, dbJsonFile): logger.progress("Start parsing JSON file [%s]" % dbJsonFile) with open(dbJsonFile, 'r') as jsonFile: dbCallJson = json.load(jsonFile) for pkgItem in dbCallJson: # find all the routines under that package routines = pkgItem['routines'] for rtn in routines: rtnName = rtn['name'] routine = self._crossRef.getRoutineByName(rtnName) if not routine: logger.warn("Cannot find routine [%s]" % rtnName) continue fileManGlobals = rtn['Globals'] self._addFileManGlobals(routine, fileManGlobals) fileManFileNos = rtn['FileMan calls'] self._addFileManDBCalls(routine, fileManFileNos)
def _generate(icrFile, icrJsonFile, MRepositDir=None, patchRepositDir=None, generateHTML=False, generatePDF=False, outDir=None, pdfOutDir=None, local=False): generate_json(icrFile, icrJsonFile) if generateHTML or generatePDF: # Look for date file was created date = ICRSchema.getDate(icrFile) logger.progress("Convert JSON to HTML") convertJson(icrJsonFile, date, MRepositDir, patchRepositDir, generateHTML=generateHTML, generatePDF=generatePDF, outDir=outDir, pdfOutDir=pdfOutDir, local=local)
def __init__(self): logger.progress("Create cross reference generator...") self.crossRef = CrossReference()
def run(args): from InitCrossReferenceGenerator import parseCrossRefGeneratorWithArgs from FileManDataToHtml import FileManDataToHtml logger.progress("Start FileMan Global Data Parser") # Ensure that output directory exists if not os.path.exists(os.path.join(args.outDir, "dox")): os.makedirs(os.path.join(args.outDir, "dox")) crossRef = parseCrossRefGeneratorWithArgs(args) # Populate glbDataParse logger.progress("Populate global data parser") glbDataParser = FileManGlobalDataParser(args.MRepositDir, crossRef) glbDataParser.parseZWRGlobalFileBySchemaV2(glbDataParser.allFiles['1']['path'][0], '1', '^DIC(') del glbDataParser.outFileManData['1'] glbDataParser.outdir = args.outDir glbDataParser.patchDir = args.patchRepositDir logger.progress("Fileman data to html") _doxURL = getDOXURL(args.local) _vivianURL = getViViaNURL(args.local) _filesURL = getFilesURL(args.local) htmlGen = FileManDataToHtml(crossRef, glbDataParser.schemaParser, args.outDir, _doxURL, _vivianURL, _filesURL) if not args.all: logger.progress("Checking files...") assert set(args.fileNos).issubset(glbDataParser.allFiles) for fileNo in args.fileNos: assert fileNo in glbDataParser.globalLocationMap logger.progress("Process files...") processFiles(glbDataParser, htmlGen, args.fileNos) else: # Start with 'Strongly connected components' fileSet = glbDataParser.schemaParser.sccSet # Add files we're specifically interested int fileSet.add('101') #Protocol fileSet.add('8994') #Remote Procedure fileSet.add('19') #Option fileSet.add('779.2') #HLO Application fileSet.add('9.6') #Build (needs to be before install) fileSet.add('9.7') #Install fileSet.add('.5') #Function fileSet.add('409.61') #List Template fileSet.add('19.1') #Security Key fileSet.add('9.2') #Help Frame fileSet.add('.403') #Form fileSet.add('.401') #Sort Template fileSet.add('771') #HL7 APPLICATION PARAMETER # Make sure to only use files that are in glbDataParser.allFiles.keys() fileSet &= set(glbDataParser.allFiles.keys()) n = 0 numFiles = len(fileSet) fileList = list(fileSet) # HACK: Sorts by File number coerced to float. Fixes some "dependency" # issues between files needed for later information. # # Depends # 9.7 => 9.6, to write out dependency information for patches # 19 => 9.4, to find and query for Package names of options found. fileList = sorted(fileList, key=lambda x: float(x)) for file in fileList: n += 1 logger.progress("Processing %s (file %d/%d)" % (file, n, numFiles)) for zwrFile in glbDataParser.allFiles[file]['path']: glbDataParser.generateFileIndex(zwrFile, file) logger.progress("Process files...") processFiles(glbDataParser, htmlGen, fileList) glbDataParser.outRtnReferenceDict()
def parseAllCallGraphLog(xindexLogDir, crossRef, icrJson): logger.progress("Parse call graph logfiles") xindexLogParser = CallerGraphLogFileParser(crossRef, icrJson) xindexLogParser.parseAllCallerGraphLog(xindexLogDir, "*.log") return xindexLogParser
def generate_json(icrFile=None, icrJsonFile=None): # Convert ICR file to JSON logger.progress("Convert ICR to JSON") convertICRToJson(icrFile, icrJsonFile)
def parseDataDictionaryLogFile(crossRef, fileSchemaDir): logger.progress("Parse data dictionary logfile") DDFileParser = DataDictionaryListFileLogParser(crossRef) DDFileParser.parseAllDataDictionaryListLog(fileSchemaDir, "*.schema") DDFileParser.parseAllDataDictionaryListLog(fileSchemaDir, ".*.schema") return DDFileParser