def __parseDataDictionaryLogFile__(self, logFileName): if not os.path.exists(logFileName): logger.error("File: %s does not exist" % logFileName) return logFileHandle = open(logFileName, "rb") baseName = os.path.basename(logFileName) fileNo = baseName[:-len(".schema")] self._curGlobal = self._crossRef.getGlobalByFileNo(fileNo) if not self._curGlobal: logger.error("Could not find global based on file# %s" % fileNo) return for line in logFileHandle: # handle the empty line line = line.rstrip("\r\n") if len(line) == 0: # ignore the empty line continue section = self.__isSectionHeader__(line) if section: if section != self.FILEMAN_FIELD_SECTION: logger.debug("Current Section is %d [%s]" % (section, line)) if self._curSect and self._curParser: self._curParser.onSectionEnd(line, self._curSect, self._curGlobal, self._crossRef) self._curSect = section self._curParser = self._sectionParserDict.get(self._curSect) if self._curParser: self._curParser.onSectionStart(line, self._curSect, self._curGlobal, self._crossRef) elif self._curSect and self._curParser: self._curParser.parseLine(line, self._curGlobal, self._crossRef)
def _getPackageHRefLink(pkgName, icrEntry, **kargs): if 'crossRef' not in kargs: logger.error('No CrossReference given. Cannot find mapping for package: [%s]', pkgName) return pkgName crossRef = kargs['crossRef'] mappedPkgName = crossRef.getMappedPackageName(pkgName) if mappedPkgName is not None: pkgLink = getPackageHtmlFileName(mappedPkgName) return '<a href=\"%s/%s\">%s</a>' % (DOX_URL, pkgLink, pkgName) upperName = pkgName.upper() mappedPkgName = crossRef.getMappedPackageName(upperName) if mappedPkgName is not None: pkgLink = getPackageHtmlFileName(mappedPkgName) return '<a href=\"%s/%s\">%s</a>' % (DOX_URL, pkgLink, pkgName) pkg = crossRef.getPackageByName(pkgName) if not pkg: pkgRename = crossRef.normalizePackageName(pkgName).title() pkg = crossRef.getPackageByName(pkgRename) if not pkg: pkgRename = crossRef.normalizePackageName(pkgName) pkg = crossRef.getPackageByName(pkgRename) if pkg: pkgLink = getPackageHtmlFileName(pkg.getName()) return '<a href=\"%s/%s\">%s</a>' % (DOX_URL, pkgLink, pkgName) else: logger.warning('Cannot find mapping for package: [%s]', pkgName) return pkgName
def generateRoutineDependencyGraph(self, routine, isDependency=True): if not routine.getPackage(): return routineName = routine.getName() packageName = routine.getPackage().getName() if isDependency: depRoutines = routine.getCalledRoutines() routineSuffix = "_called" totalDep = routine.getTotalCalled() else: depRoutines = routine.getCallerRoutines() routineSuffix = "_caller" totalDep = routine.getTotalCaller() # do not generate graph if no dep routines or # totalDep routines > max_dependency_list if (not depRoutines or len(depRoutines) == 0 or totalDep > MAX_DEPENDENCY_LIST_SIZE): logger.debug("No called Routines found! for routine:%s package:%s" % (routineName, packageName)) return try: dirName = os.path.join(self._outDir, packageName) if not os.path.exists(dirName): os.makedirs(dirName) except OSError, e: logger.error("Error making dir %s : Error: %s" % (dirName, e)) return
def __parseDataDictionaryLogFile__(self, logFileName): if not os.path.exists(logFileName): logger.error("File: %s does not exist" % logFileName) return logFileHandle = open(logFileName, "rb") baseName = os.path.basename(logFileName) fileNo = baseName[:-len(".schema")] self._curGlobal = self._crossRef.getGlobalByFileNo(fileNo) if not self._curGlobal: logger.error("Could not find global based on file# %s" % fileNo) return for line in logFileHandle: # handle the empty line line = line.rstrip("\r\n") if not line: # ignore the empty line continue section = self.__isSectionHeader__(line) if section: if self._curSect and self._curParser: self._curParser.onSectionEnd(line, self._curSect, self._curGlobal, self._crossRef) self._curSect = section self._curParser = self._sectionParserDict.get(self._curSect) if self._curParser: self._curParser.onSectionStart(line, self._curSect, self._curGlobal, self._crossRef) elif self._curSect and self._curParser: self._curParser.parseLine(line, self._curGlobal, self._crossRef)
def parseXindexLogFile(self, logFileName): if not os.path.exists(logFileName): logger.error("File: %s does not exist" % logFileName) return logFile = open(logFileName, "rb") for curLine in logFile: curLine = curLine.rstrip("\r\n") if pressReturn.search(curLine) or crossRef.search(curLine): continue # check to see if it is a section header or we just in the routine header part if not self._curSection or self._curSection == IXindexLogFileParser.ROUTINE: sectionHeader = self.__isSectionHeader__(curLine) if sectionHeader: self._curSection = sectionHeader self._curHandler = self._sectHandleDict.get(sectionHeader) if self._curHandler: self._curHandler.onSectionStart(curLine, sectionHeader) self._sectionStack.append(sectionHeader) continue if self.__isEndOfSection__(curLine, self._curSection): if self._curHandler: self._curHandler.onSectionEnd(curLine, self._curSection, self._curRoutine, self._crossRef) assert(self._curSection == self._sectionStack.pop()) if len(self._sectionStack) > 0: self._curSection = self._sectionStack[-1] self._curHandler = self._sectHandleDict[self._curSection] else: self._curSection = None self._curHandler = None continue if self._curHandler: self._curHandler.parseLine(curLine, self._curRoutine, self._crossRef)
def _updateRPCRefence(self): rpcData = self._glbData['8994'] for ien in sorted(rpcData.dataEntries.keys(), key=lambda x: float(x)): rpcEntry = rpcData.dataEntries[ien] rpcRoutine = None if rpcEntry.name: namespace, package = \ self._crossRef.__categorizeVariableNameByNamespace__(rpcEntry.name) if package: package.rpcs.append(rpcEntry) if '.03' in rpcEntry.fields: rpcRoutine = rpcEntry.fields['.03'].value else: if rpcRoutine: """ try to categorize by routine called """ namespace, package = \ self._crossRef.__categorizeVariableNameByNamespace__(rpcRoutine) if package: package.rpcs.append(rpcEntry) else: logger.error("Cannot find package for RPC: %s" % (rpcEntry.name)) """ Generate the routine referenced based on RPC Call """ if rpcRoutine: rpcInfo = {"name": rpcEntry.name, "ien" : ien } if '.02' in rpcEntry.fields: rpcTag = rpcEntry.fields['.02'].value rpcInfo['tag'] = rpcTag self._rtnRefDict.setdefault(rpcRoutine,{}).setdefault('8994',[]).append(rpcInfo)
def onSectionStart(self, line, section, crossRef): if section != IXindexLogFileParser.ROUTINE: logger.error("Invalid section Header --> %s", line) return False routineName = RoutineStart.search(line).group('name') if validRoutineName.search( routineName ) != None: #, "Invalid RoutineName: [%s] Line: [%s]" % (routineName, line) if self._crossRef.isPlatformDependentRoutineByName(routineName): self._curRoutine = self._crossRef.getPlatformDependentRoutineByName( routineName) return True renamedRoutineName = routineName if self._crossRef.routineNeedRename(routineName): renamedRoutineName = self._crossRef.getRenamedRoutineName( routineName) if not self._crossRef.hasRoutine(renamedRoutineName): logger.error("Invalid Routine: '%s': Rename Routine '%s'" % (routineName, renamedRoutineName)) return False self._curRoutine = self._crossRef.getRoutineByName( renamedRoutineName) self._curRoutine._structuredCode = structuredSource self._curPackage = self._curRoutine.getPackage() return True if validObject.search(routineName).group("name")[:2] == "dd": fileNo = validObject.search(routineName).group("name")[2:] if '.' not in fileNo: fileNo += ".0" self._curRoutine = self._crossRef.getGlobalByFileNo(fileNo) if not self._curRoutine: self._curRoutine = self._crossRef.getFileManSubFileByFileNo( fileNo) if self._curRoutine: self._curRoutine.setPackage(self._curPackage)
def __parsingVariablePointer__(self, Global, CrossReference): index, fileList, found = 0, None, False indentValue = self.__getDefaultIndentLevel__(self._curFile, self.DEFAULT_NAME_INDENT) for index in range(len(self._lines)): if not found: if re.search( "^ {%d,%d}FILE ORDER PREFIX LAYGO MESSAGE$" % (self.DEFAULT_NAME_INDENT, indentValue), self._lines[index]): found = True continue else: if re.search("^ {%d,}$" % indentValue, self._lines[index]): break else: result = re.search("^ +(?P<File>[0-9\.]+) +", self._lines[index]) if result: filePointedTo = CrossReference.getGlobalByFileNo( result.group('File')) if not filePointedTo: # log an error for now, will handle this case later logger.error( "INVALID File! File is %s, Global is %s" % (result.group('File'), Global)) continue if not fileList: fileList = [] fileList.append(filePointedTo) self._field.setPointedToFiles(fileList)
def generateFileFieldMap(self, inputFileList, fileNumber, fieldNo): """ Generate a map Field Value => IEN """ schemaFile = self._allSchemaDict[fileNumber] if not schemaFile.hasField(fieldNo): logger.error("File does not have a [%s] field, ignore", fieldNo) return dict() keyField = schemaFile.getFileManFieldByFieldNo(fieldNo) keyLoc = keyField.getLocation() if not keyLoc: logger.error("[%s] field does not have a location", fieldNo) return dict() glbLoc = self._glbLocMap[fileNumber] fieldMap = {} for inputFileName in inputFileList: for dataRoot in readGlobalNodeFromZWRFileV2(inputFileName, glbLoc): if not dataRoot: continue fileDataRoot = dataRoot (ien, detail) = self._getKeyNameBySchema(fileDataRoot, keyLoc, keyField) if detail: fieldMap[detail] = ien return fieldMap
def __createFieldByType__(self, fieldNo, fType, fName, fLocation, line, Global, CrossReference): logger.debug("Current Type is [%s]" % fType) result = self.UNDEFINED_POINTER.search(fType) if result: self._field = FileManFieldFactory.createField( fieldNo, fName, FileManField.FIELD_TYPE_FILE_POINTER, fLocation) return result = self.POINTER_TO_REGEX.search(fType) if result: fileNo = result.group('File') filePointedTo = CrossReference.getGlobalByFileNo(fileNo) self._field = FileManFieldFactory.createField( fieldNo, fName, FileManField.FIELD_TYPE_FILE_POINTER, fLocation) if not filePointedTo: logger.error( "Could not find file pointed to [%s], [%s], line:[%s]" % (fileNo, self._curFile, line)) else: self._field.setPointedToFile(filePointedTo) return # deal with file pointer to subFiles result = self.SUBFILE_REGEX.search(fType) if result: # create a field for sub file type self._field = FileManFieldFactory.createField( fieldNo, fName, FileManField.FIELD_TYPE_SUBFILE_POINTER, fLocation) fileNo = result.group('File') logger.debug("Pointer to subFile %s" % fileNo) subFile = Global.getSubFileByFileNo(fileNo) if not subFile: subFile = FileManFile(fileNo, fName, self._curFile) self._curFile.addFileManSubFile(subFile) logger.debug("Added subFile %s to File %s" % (fileNo, self._curFile.getFileNo())) if self._isSubFile: Global.addFileManSubFile(subFile) self._field.setPointedToSubFile(subFile) return for (key, value) in self.StringTypeMappingDict.iteritems(): if fType.startswith(key): self._field = FileManFieldFactory.createField( fieldNo, fName, value, fLocation) break if not self._field: # double check the loc and type if line.find(fType) > self.MAXIMIUM_TYPE_START_INDEX: fType = line[self.MAXIMIUM_TYPE_START_INDEX:] if fLocation: fLocation = line[line.find(fLocation):self. MAXIMIUM_TYPE_START_INDEX] logger.warn("new Type is [%s], loc is [%s]" % (fType, fLocation)) self.__createFieldByType__(fieldNo, fType, fName, fLocation, line, Global, CrossReference) assert self._field, "Could not find the right type for %s, %s, %s, %s, %s" % ( fType, fLocation, fieldNo, line, self._curFile.getFileNo())
def generateFileIndex(self, inputFileName, fileNumber): schemaFile = self._allSchemaDict[fileNumber] if not schemaFile.hasField('.01'): logger.error("File %s does not have a .01 field, ignore" % fileNumber) return keyField = schemaFile.getFileManFieldByFieldNo('.01') keyLoc = keyField.getLocation() if not keyLoc: logger.error("File %s .01 field does not have a location, ignore" % fileNumber) return self._curFileNo = fileNumber if fileNumber in self._glbLocMap: glbLoc = self._glbLocMap[fileNumber] for dataRoot in readGlobalNodeFromZWRFileV2(inputFileName, glbLoc): if not dataRoot: continue self._dataRoot = dataRoot fileDataRoot = dataRoot (ien, detail) = self._getKeyNameBySchema(fileDataRoot, keyLoc, keyField) if detail: self._addFileKeyIndex(fileNumber, ien, detail)
def parseLine(self, line, Global, CrossReference): assert self._global strippedLine = line.rstrip(" ") if len(strippedLine) == 0: return value = strippedLine[self.POINTED_TO_BY_VALUE_INDEX:] logger.debug("Parsing line [%s]" % value) result = self.POINTED_TO_BY_VALUE.search(value) if result: fileManNo = result.group("FileNo") fieldNo = result.group('fieldNo') subFileNo = result.group('subFieldNo') logger.debug("File # %s, field # %s, sub-field # %s" % (fileManNo, fieldNo, subFileNo)) pointedByGlobal = CrossReference.getGlobalByFileNo(fileManNo) if pointedByGlobal: self._global.addPointedToByFile(pointedByGlobal, fieldNo, subFileNo) logger.debug("added global to pointed list: %s, %s, %s" % (fileManNo, fieldNo, subFileNo)) else: logger.warning("Could not find global based on %s, %s" % (fileManNo, result.group("Name"))) else: logger.error( "Could not parse pointer reference [%s] in file [%s]" % (line, self._global.getFileNo()))
def parseLine(self, line, Routine, CrossReference): if self.__ignoreLine__(line): return # handle three cases: # 1. continuation of the previous info with value info # 2. Name too long. # 3. normal name/value pair result = self.__isNameValuePairLine__(line) if result: if self._suspiousLine: self.__handleSuspiousCases__(Routine, CrossReference) self._suspiousLine = False self._varPrefix = line[0:DEFAULT_NAME_FIELD_START_INDEX] self._varValue = line[self._valueStartIdx:] self._varName = line[DEFAULT_NAME_FIELD_START_INDEX:self._valueStartIdx].strip() if self._addVarToRoutine: self._addVarToRoutine(Routine, CrossReference) if self._postParsingRoutine: self._postParsingRoutine(Routine, CrossReference) return result = self.__isValueOnlyLine__(line) if result: self._suspiousLine = False self._varValue = line[self._valueStartIdx:].strip() if not self._varName: logger.error("No varname is set, Routine: %s line: %s" % (Routine, line)) return if self._addVarToRoutine: self._addVarToRoutine(Routine, CrossReference) if self._postParsingRoutine: self._postParsingRoutine(Routine, CrossReference) return result = self.__isLongNameLine__(line) if result: ''' Check that Global information doesn't happen to touch the rest of the info Global Variables ( * Changed ! Killed) ^AUTTHF("B" ISDUE+13 ^PXRMINDX(9000010.23ISDUE+14,ISDUE+16 <<<< What we are trying to capture ^TMP($J LIST+6,LIST+10*,LIST+12,LIST+14,LIST+15*,LIST+16! ''' match = re.search("(?P<globalName>^ +\^[A-Z]+[(][0-9.]+)+(?P<locationInfo>.+$)", line) if match: self._varPrefix = line[0:DEFAULT_NAME_FIELD_START_INDEX] self._varValue = match.groups()[1] self._varName = match.groups()[0].strip() if self._addVarToRoutine: self._addVarToRoutine(Routine, CrossReference) if self._postParsingRoutine: self._postParsingRoutine(Routine, CrossReference) return if self._suspiousLine: self.__handleSuspiousCases__(Routine, CrossReference) self._varName = line[DEFAULT_NAME_FIELD_START_INDEX:].strip() self._varPrefix = line[0:DEFAULT_NAME_FIELD_START_INDEX] self._suspiousLine = True return logger.error("Could not handle this, Routine: %s, line: %s" % (Routine, line))
def generateRoutineDependencyGraph(self, routine, isDependency=True): if not routine.getPackage(): return routineName = routine.getName() packageName = routine.getPackage().getName() if isDependency: depRoutines = routine.getCalledRoutines() routineSuffix = "_called" totalDep = routine.getTotalCalled() else: depRoutines = routine.getCallerRoutines() routineSuffix = "_caller" totalDep = routine.getTotalCaller() # do not generate graph if no dep routines or # totalDep routines > max_dependency_list if (not depRoutines or len(depRoutines) == 0 or totalDep > MAX_DEPENDENCY_LIST_SIZE): logger.debug( "No called Routines found! for routine:%s package:%s" % (routineName, packageName)) return try: dirName = os.path.join(self._outDir, packageName) if not os.path.exists(dirName): os.makedirs(dirName) except OSError, e: logger.error("Error making dir %s : Error: %s" % (dirName, e)) return
def _addFileManDBCalls(self, routine, callLists): for callDetail in callLists: if self.isFunctionIgnored(callDetail): logger.debug("Ignore call detail %s" % callDetail) continue fnIdx = callDetail.find('(') if fnIdx < 0: logger.error("Can not extract fileman number from %s" % callDetail) continue callTag = callDetail[:fnIdx] fileNo = callDetail[fnIdx+1:] fileManFile = self._crossRef.getGlobalByFileNo(fileNo) if fileManFile: logger.debug("FileMan: Adding fileMan:[%s] to routine:[%s]" % (fileNo, routine.getName())) routine.addFilemanDbCallGlobal(fileManFile, callTag) else: if self._crossRef.isFileManSubFileByFileNo(fileNo): # subfile subFile = self._crossRef.getFileManSubFileByFileNo(fileNo) rootFile = self._crossRef.getSubFileRootByFileNo(fileNo) assert rootFile logger.debug("FileMan: Adding subFile:[%s] to routine:[%s]" % (subFile, routine.getName())) routine.addFilemanDbCallGlobal(subFile, callTag) else: logger.error("file #%s[%s] is not a valid fileman file, for" " routine [%s]" % (fileNo, callDetail, routine))
def __parseDataDictionaryLogFile__(self, logFileName): if not os.path.exists(logFileName): logger.error("File: %s does not exist" % logFileName) return logFileHandle = codecs.open(logFileName, 'r', encoding='ISO-8859-1', errors='ignore') baseName = os.path.basename(logFileName) fileNo = baseName[:-len(".schema")] self._curGlobal = self._crossRef.getGlobalByFileNo(fileNo) if not self._curGlobal: logger.warning("Could not find global based on file# %s" % fileNo) return for line in logFileHandle: # handle the empty line line = line.rstrip("\r\n") if not line: # ignore the empty line continue section = self.__isSectionHeader__(line) if section: if self._curSect and self._curParser: self._curParser.onSectionEnd(line, self._curSect, self._curGlobal, self._crossRef) self._curSect = section self._curParser = self._sectionParserDict.get(self._curSect) if self._curParser: self._curParser.onSectionStart(line, self._curSect, self._curGlobal, self._crossRef) elif self._curSect and self._curParser: self._curParser.parseLine(line, self._curGlobal, self._crossRef)
def _updateRPCRefence(self): rpcData = self._glbData['8994'] for ien in sorted(rpcData.dataEntries.keys(), key=lambda x: float(x)): rpcEntry = rpcData.dataEntries[ien] rpcRoutine = None if rpcEntry.name: namespace, package = \ self._crossRef.__categorizeVariableNameByNamespace__(rpcEntry.name) if package: package.rpcs.append(rpcEntry) if '.03' in rpcEntry.fields: rpcRoutine = rpcEntry.fields['.03'].value else: if rpcRoutine: """ try to categorize by routine called """ namespace, package = \ self._crossRef.__categorizeVariableNameByNamespace__(rpcRoutine) if package: package.rpcs.append(rpcEntry) else: logger.error("Cannot find package for RPC: %s" % (rpcEntry.name)) """ Generate the routine referenced based on RPC Call """ if rpcRoutine: rpcInfo = {"name": rpcEntry.name, "ien": ien} if '.02' in rpcEntry.fields: rpcTag = rpcEntry.fields['.02'].value rpcInfo['tag'] = rpcTag self._rtnRefDict.setdefault(rpcRoutine, {}).setdefault( '8994', []).append(rpcInfo)
def _addFileManDBCalls(self, routine, callLists): for callDetail in callLists: if self.isFunctionIgnored(callDetail): logger.debug("Ignore call detail %s" % callDetail) continue fnIdx = callDetail.find('(') if fnIdx < 0: logger.error("Can not extract fileman number from %s" % callDetail) continue callTag = callDetail[:fnIdx] fileNo = callDetail[fnIdx + 1:] fileManFile = self._crossRef.getGlobalByFileNo(fileNo) if fileManFile: logger.debug("FileMan: Adding fileMan:[%s] to routine:[%s]" % (fileNo, routine.getName())) routine.addFilemanDbCallGlobal(fileManFile, callTag) else: if self._crossRef.isFileManSubFileByFileNo(fileNo): # subfile subFile = self._crossRef.getFileManSubFileByFileNo(fileNo) rootFile = self._crossRef.getSubFileRootByFileNo(fileNo) assert rootFile logger.debug( "FileMan: Adding subFile:[%s] to routine:[%s]" % (subFile, routine.getName())) routine.addFilemanDbCallGlobal(subFile, callTag) else: logger.error( "file #%s[%s] is not a valid fileman file, for" " routine [%s]" % (fileNo, callDetail, routine))
def parseXindexLogFile(self, logFileName): if not os.path.exists(logFileName): logger.error("File: %s does not exist" % logFileName) return logFile = open(logFileName, "rb") for curLine in logFile: curLine = curLine.rstrip("\r\n") if pressReturn.search(curLine) or crossRef.search(curLine): continue # check to see if it is a section header or we just in the routine header part if not self._curSection or self._curSection == IXindexLogFileParser.ROUTINE: sectionHeader = self.__isSectionHeader__(curLine) if sectionHeader: self._curSection = sectionHeader self._curHandler = self._sectHandleDict.get(sectionHeader) if self._curHandler: self._curHandler.onSectionStart(curLine, sectionHeader) self._sectionStack.append(sectionHeader) continue if self.__isEndOfSection__(curLine, self._curSection): if self._curHandler: self._curHandler.onSectionEnd(curLine, self._curSection, self._curRoutine, self._crossRef) assert (self._curSection == self._sectionStack.pop()) if len(self._sectionStack) > 0: self._curSection = self._sectionStack[-1] self._curHandler = self._sectHandleDict[self._curSection] else: self._curSection = None self._curHandler = None continue if self._curHandler: self._curHandler.parseLine(curLine, self._curRoutine, self._crossRef)
def createGlobalNode(inputLine, globalRoot=None): """ create Global Node based on the input if globalRoot is None, it should result the root node created. """ retRoot = globalRoot nodeIndex, nodeValue, nodeRoot = findSubscriptValue(inputLine) if nodeIndex: if nodeValue and len(nodeValue) > 0: nodeValue = nodeValue.replace('""', '"') if not globalRoot: retRoot = GlobalNode(subscript=nodeRoot) nodeIdx = retRoot else: nodeIdx = retRoot.getRootNode() if nodeIdx.subscript != nodeRoot: logger.error("Global Node root subscript mismatch: %s, %s" % (nodeRoot, nodeIdx.subscript)) for idx in nodeIndex[:-1]: if idx not in nodeIdx: nodeIdx[idx] = GlobalNode() nodeIdx = nodeIdx[idx] nodeIdx[nodeIndex[-1]] = GlobalNode(nodeValue) return retRoot
def convertJson(inputJsonFile, date, MRepositDir, patchRepositDir, generateHTML, generatePDF, outDir=None, pdfOutDir=None, local=False): if not generateHTML and not generatePDF: raise Exception("Nothing to generate!") global DOX_URL global VIVIAN_URL DOX_URL = getDOXURL(local) VIVIAN_URL = getViViaNURL(local) if generateHTML: if not outDir: raise Exception("Must specify Output directory") if not os.path.exists(outDir): # Will also create intermediate directories if needed os.makedirs(outDir) if generatePDF: if not pdfOutDir: raise Exception("Must specify PDF Output directory") # Will also create intermediate directories if needed if not os.path.exists(pdfOutDir): os.makedirs(pdfOutDir) crossRef = parseCrossReferenceGeneratorArgs(MRepositDir, patchRepositDir) global RPC_NAME_TO_IEN_MAPPING RPC_NAME_TO_IEN_MAPPING = generateSingleFileFieldToIenMappingBySchema(MRepositDir, crossRef, RPC_FILE_NO, RPC_NAME_FIELD_NO) with open(inputJsonFile, 'r') as inputFile: pkgJson = {} # group by package allpkgJson = [] inputJson = json.load(inputFile) for icrEntry in inputJson: if 'NUMBER' not in icrEntry: logger.error("Could not parse entry: " + str(icrEntry)) continue if generatePDF: _generateICRIndividualPagePDF(icrEntry, date, pdfOutDir) if generateHTML: _generateICRIndividualPage(icrEntry, date, outDir, crossRef) summaryInfo = _convertICREntryToSummaryInfo(icrEntry, crossRef) allpkgJson.append(summaryInfo) if 'CUSTODIAL PACKAGE' in icrEntry: pkgJson.setdefault(icrEntry['CUSTODIAL PACKAGE'],[]).append(summaryInfo) if generateHTML: _generateICRSummaryPageImpl(allpkgJson, 'ICR List', 'All', date, outDir, isForAll=True) for pkgName, outJson in pkgJson.iteritems(): _generateICRSummaryPageImpl(outJson, 'ICR List', pkgName, date, outDir) logger.warn('Total # entry in PACKAGE_MAP is [%s]', len(PACKAGE_MAP)) logger.warn('Total # entry in pkgJson is [%s]', len(pkgJson)) _generatePkgDepSummaryPage(inputJson, date, outDir, crossRef)
def printGlobal(crossRef, globalName, visitor=None): globalVar = crossRef.getGlobalByName(globalName) if globalVar: if visitor: visitor.visitGlobal(globalVar) else: globalVar.printResult() else: logger.error ("Global: %s Not Found!" % globalName)
def generateColorLegend(self, isCalled=True): command = "\"%s\" -Tpng -o\"%s\" -Tcmapx -o\"%s\" \"%s\"" % (self._dot, os.path.join(self._outDir,"colorLegend.png"), os.path.join(self._outDir,"colorLegend.cmapx"), os.path.join(self._docRepDir,'callerGraph_color_legend.dot')) logger.debug("command is %s" % command) retCode = subprocess.call(command, shell=True) if retCode != 0: logger.error("calling dot with command[%s] returns %d" % (command, retCode))
def printGlobal(self, globalName, visitor=None): globalVar = self._crossRef.getGlobalByName(globalName) if globalVar: if visitor: visitor.visitGlobal(globalVar) else: globalVar.printResult() else: logger.error("Global: %s Not Found!" % globalName)
def generateColorLegend(self, isCalled=True): command = "\"%s\" -Tpng -o\"%s\" -Tcmapx -o\"%s\" \"%s\"" % ( self._dot, os.path.join(self._outDir, "colorLegend.png"), os.path.join(self._outDir, "colorLegend.cmapx"), os.path.join(self._docRepDir, 'callerGraph_color_legend.dot')) retCode = subprocess.call(command, shell=True) if retCode != 0: logger.error("calling dot with command[%s] returns %d" % (command, retCode))
def _generateMenuDependency(self, allMenuList, allOptionList, outDir): menuDict = dict((x.ien, x) for x in allOptionList) menuDepDict = dict((x, set()) for x in allMenuList) for dataEntry in allMenuList: if '10' in dataEntry.fields: menuData = dataEntry.fields['10'].value if menuData and menuData.dataEntries: for subIen in menuData.dataEntries: subEntry = menuData.dataEntries[subIen] if not ".01" in subEntry.fields: continue value = subEntry.name childIen = value.split('^')[1] if '2' in subEntry.fields: self.synonymMap[( dataEntry.name, menuDict[childIen].name )] = "[" + subEntry.fields['2'].value + "]" if childIen in menuDict: menuDepDict[dataEntry].add(menuDict[childIen]) else: logger.error("Could not find %s: value: %s" % (childIen, value)) """ discard any menu does not have any child """ leafMenus = set() for entry in menuDepDict: if len(menuDepDict[entry]) == 0: leafMenus.add(entry) for entry in leafMenus: del menuDepDict[entry] """ find the top level menu, menu without any parent """ allChildSet = reduce(set.union, menuDepDict.itervalues()) rootSet = set(allMenuList) - allChildSet leafSet = allChildSet - set(allMenuList) """ generate the json file based on root menu """ for item in rootSet: outJson = {} outJson['name'] = item.name outJson['option'] = item.name outJson['ien'] = item.ien # Explicitly exclude the ZZSERVERMENU from having a link generated for it. outJson['hasLink'] = False if item.name == "ZZSERVERMENU" else True if '1' in item.fields: outJson['name'] = item.fields['1'].value if '3' in item.fields: outJson['lock'] = item.fields['3'].value if '4' in item.fields: outJson['type'] = item.fields['4'].value if item in menuDepDict: self._addChildMenusToJson(menuDepDict[item], menuDepDict, outJson, item) with open(os.path.join(outDir, "VistAMenu-%s.json" % item.ien), 'w') as output: logger.info("Generate File: %s" % output.name) json.dump(outJson, output)
def getFileManFilePointerLink(dataEntry, value, **kargs): if value: fields = value.split('^') if len(fields) == 3: # fileNo, ien, name refFile = getDataEntryHtmlFileName(fields[1], fields[0]) value = '<a href="%s/%s/%s">%s</a>' % (VIV_URL, fields[0].replace(".","_"),refFile, fields[-1]) elif len(fields) == 2: value = 'File: %s, IEN: %s' % (fields[0], fields[1]) else: logger.error("Unknown File Pointer Value %s" % value) return value
def _generateImagesFromDotFile(self, pngFilename, cmapxFilename, dotFilename): # Generate the image in png format and also cmapx (client side map) to # make sure link embeded in the graph is clickable # @TODO this should be able to run in parallel command = "\"%s\" -Tpng -o\"%s\" -Tcmapx -o\"%s\" \"%s\"" % ( self._dot, pngFilename, cmapxFilename, dotFilename) retCode = subprocess.call(command, shell=True) if retCode != 0: logger.error("calling dot with command[%s] returns %d" % (command, retCode))
def _generateImagesFromDotFile(self, pngFilename, cmapxFilename, dotFilename): # Generate the image in png format and also cmapx (client side map) to # make sure link embeded in the graph is clickable # @TODO this should be able to run in parallel command = "\"%s\" -Tpng -o\"%s\" -Tcmapx -o\"%s\" \"%s\"" % (self._dot, pngFilename, cmapxFilename, dotFilename) retCode = subprocess.call(command, shell=True) if retCode != 0: logger.error("calling dot with command[%s] returns %d" % (command, retCode))
def _addFileManGlobals(self, routine, fileManGlobals): for fileManGbl in fileManGlobals: fileManFile = self._crossRef.getGlobalByName(fileManGbl) if not fileManFile and fileManGbl[-1] == '(': fileManGblAlt = fileManGbl[:-1] fileManFile = self._crossRef.getGlobalByName(fileManGblAlt) if fileManFile: routine.addFilemanDbCallGlobal(fileManFile) else: # ignore non-fileman global, could be false positive logger.error("global [%s] is not a valid Fileman file for" " routine %s" % (fileManGbl, routine)) return
def getFileManFilePointerLink(dataEntry, value, **kargs): if value: fields = value.split('^') if len(fields) == 3: # fileNo, ien, name refFile = getDataEntryHtmlFile(fields[1], fields[0]) value = '<a href="../%s/%s">%s</a>' % (fields[0].replace( ".", "_"), refFile, fields[-1]) elif len(fields) == 2: value = 'File: %s, IEN: %s' % (fields[0], fields[1]) else: logger.error("Unknown File Pointer Value %s" % value) return value
def convertFilePointerToHtml(inputValue): value = inputValue name = inputValue fields = inputValue.split('^') if len(fields) == 3: # fileNo, ien, name refFile = getDataEntryHtmlFileName(fields[1], fields[0]) value = '<a href="%s/%s/%s">%s</a>' % (FILES_URL, fields[0].replace(".", "_"), refFile, fields[-1]) name = fields[-1] elif len(fields) == 2: value = 'File: %s, IEN: %s' % (fields[0], fields[1]) name = value else: logger.error("Unknown File Pointer Value '%s'" % inputValue) return value, name
def sortSchemaByLocation(fileSchema): locFieldDict = {} for fldAttr in fileSchema.getAllFileManFields().itervalues(): loc = fldAttr.getLocation() if not loc: continue locInfo = loc.split(';') if len(locInfo) != 2: logger.error("Unknown location info %s for %r" % (loc, fldAttr)) continue index, pos = locInfo if index not in locFieldDict: locFieldDict[index] = {} locFieldDict[index][pos] = fldAttr return locFieldDict
def _parseDataValueField(self, dataRoot, fieldDict, outDataEntry): if not dataRoot.value: return values = dataRoot.value.split('^') if not values: return # this is very import to check for idx, value in enumerate(values, 1): if value and str(idx) in fieldDict: fieldAttr = fieldDict[str(idx)] try: self._parseIndividualFieldDetail(value, fieldAttr, outDataEntry) except: logger.error("Field data didn't match: %s as %s" % (value, fieldAttr)) continue
def sortSchemaByLocation(fileSchema): locFieldDict = {} for fldAttr in fileSchema.getAllFileManFields().itervalues(): loc = fldAttr.getLocation() if not loc: continue locInfo = loc.split(';') if len(locInfo) != 2: logger.error("Unknown location info %s for %r" % (loc, fldAttr)) continue index,pos = locInfo if index not in locFieldDict: locFieldDict[index] = {} locFieldDict[index][pos] = fldAttr return locFieldDict
def convertFilePointerToHtml(inputValue): value = inputValue name = inputValue fields = inputValue.split('^') if len(fields) == 3: # fileNo, ien, name refFile = getDataEntryHtmlFileName(fields[1], fields[0]) value = '<a href="%s/%s/%s">%s</a>' % (VIV_URL, fields[0].replace(".","_"), refFile, fields[-1]) name = fields[-1] elif len(fields) == 2: value = 'File: %s, IEN: %s' % (fields[0], fields[1]) name = value else: logger.error("Unknown File Pointer Value '%s'" % inputValue) return value, name
def __createFieldByType__(self, fieldNo, fType, fName, fLocation, line, Global, CrossReference): logger.debug("Current Type is [%s]" % fType) result = self.UNDEFINED_POINTER.search(fType) if result: self._field = FileManFieldFactory.createField(fieldNo, fName, FileManField.FIELD_TYPE_FILE_POINTER, fLocation) return result = self.POINTER_TO_REGEX.search(fType) if result: fileNo = result.group('File') filePointedTo = CrossReference.getGlobalByFileNo(fileNo) self._field = FileManFieldFactory.createField(fieldNo, fName, FileManField.FIELD_TYPE_FILE_POINTER, fLocation) if not filePointedTo: logger.error("Could not find file pointed to [%s], [%s], line:[%s]" % (fileNo, self._curFile, line)) else: self._field.setPointedToFile(filePointedTo) return # deal with file pointer to subFiles result = self.SUBFILE_REGEX.search(fType) if result: # create a field for sub file type self._field = FileManFieldFactory.createField(fieldNo, fName, FileManField.FIELD_TYPE_SUBFILE_POINTER, fLocation) fileNo = result.group('File') logger.debug("Pointer to subFile %s" % fileNo) subFile = Global.getSubFileByFileNo(fileNo) if not subFile: # this is a new subfile subFile = FileManFile(fileNo, fName, self._curFile) self._curFile.addFileManSubFile(subFile) logger.debug("Added subFile %s to File %s" % (fileNo, self._curFile.getFileNo())) if self._isSubFile: Global.addFileManSubFile(subFile) self._field.setPointedToSubFile(subFile) CrossReference.addFileManSubFile(subFile) return for (key, value) in self.StringTypeMappingDict.iteritems(): if fType.startswith(key): self._field = FileManFieldFactory.createField(fieldNo, fName, value, fLocation) break if not self._field: # double check the loc and type if line.find(fType) > self.MAXIMIUM_TYPE_START_INDEX: fType = line[self.MAXIMIUM_TYPE_START_INDEX:] if fLocation: fLocation = line[line.find(fLocation):self.MAXIMIUM_TYPE_START_INDEX] logger.warn("new Type is [%s], loc is [%s]" % (fType, fLocation)) self.__createFieldByType__(fieldNo, fType, fName, fLocation, line, Global, CrossReference) assert self._field, "Could not find the right type for %s, %s, %s, %s, %s" % (fType, fLocation, fieldNo, line, self._curFile.getFileNo())
def _generateMenuDependency(self, allMenuList, allOptionList, outDir): menuDict = dict((x.ien, x) for x in allOptionList) menuDepDict = dict((x, set()) for x in allMenuList) for dataEntry in allMenuList: if '10' in dataEntry.fields: menuData = dataEntry.fields['10'].value if menuData and menuData.dataEntries: for subIen in menuData.dataEntries: subEntry = menuData.dataEntries[subIen] if not ".01" in subEntry.fields: continue value = subEntry.name childIen = value.split('^')[1] if '2' in subEntry.fields: self.synonymMap[(dataEntry.name, menuDict[childIen].name)] = "[" + subEntry.fields['2'].value+ "]" if childIen in menuDict: menuDepDict[dataEntry].add(menuDict[childIen]) else: logger.error("Could not find %s: value: %s" % (childIen, value)) """ discard any menu does not have any child """ leafMenus = set() for entry in menuDepDict: if not menuDepDict[entry]: leafMenus.add(entry) for entry in leafMenus: del menuDepDict[entry] """ find the top level menu, menu without any parent """ allChildSet = reduce(set.union, menuDepDict.itervalues()) rootSet = set(allMenuList) - allChildSet leafSet = allChildSet - set(allMenuList) """ generate the json file based on root menu """ for item in rootSet: outJson = {} outJson['name'] = item.name outJson['option'] = item.name outJson['ien'] = item.ien # Explicitly exclude the ZZSERVERMENU from having a link generated for it. outJson['hasLink'] = False if item.name == "ZZSERVERMENU" else True if '1' in item.fields: outJson['name'] = item.fields['1'].value if '3' in item.fields: outJson['lock'] = item.fields['3'].value if '4' in item.fields: outJson['type'] = item.fields['4'].value if item in menuDepDict: self._addChildMenusToJson(menuDepDict[item], menuDepDict, outJson, item) with open(os.path.join(outDir, "VistAMenu-%s.json" % item.ien), 'w') as output: json.dump(outJson, output)
def visitRoutine(self, routine, outputDir): calledRoutines = routine.getCalledRoutines() if not calledRoutines or len(calledRoutines) == 0: logger.warn("No called Routines found! for package:%s" % routineName) return routineName = routine.getName() if not routine.getPackage(): logger.error("ERROR: package: %s does not belongs to a package" % routineName) return packageName = routine.getPackage().getName() try: dirName = os.path.join(outputDir, packageName) if not os.path.exists(dirName): os.makedirs(dirName) except OSError, e: logger.error("Error making dir %s : Error: %s" % (dirName, e)) return
def __init__(self, crossReference, outDir, docRepDir, dot): self._crossRef = crossReference self._allPackages = crossReference.getAllPackages() self._outDir = outDir self._docRepDir = docRepDir self._dot = dot self._isDependency = False # Check for package directories once # TODO: Should delete empty directories after graphs are generated? for package in itervalues(self._allPackages): try: packageName = package.getName() dirName = os.path.join(self._outDir, packageName) if not os.path.exists(dirName): os.makedirs(dirName) except OSError as e: logger.error("Error making dir %s : Error: %s" % (dirName, e))
def onSectionStart(self, line, section): if section != IXindexLogFileParser.ROUTINE: logger.error("Invalid section Header") return False routineName = RoutineStart.search(line).group('name') assert validRoutineName.search(routineName) != None, "Invalid RoutineName: [%s] Line: [%s]" % (routineName, line) if self._crossRef.isPlatformDependentRoutineByName(routineName): self._curRoutine = self._crossRef.getPlatformDependentRoutineByName(routineName) return True renamedRoutineName = routineName if self._crossRef.routineNeedRename(routineName): renamedRoutineName = self._crossRef.getRenamedRoutineName(routineName) if not self._crossRef.hasRoutine(renamedRoutineName): logger.error("Invalid Routine: %s: rename Routine %s" % (routineName, renamedRoutineName)) return False self._curRoutine = self._crossRef.getRoutineByName(renamedRoutineName) return True
def __init__(self, crossReference, outDir, docRepDir, dot): self._crossRef = crossReference self._allPackages = crossReference.getAllPackages() self._outDir = outDir self._docRepDir = docRepDir self._dot = dot self._isDependency = False # Check for package directories once # TODO: Should delete empty directories after graphs are generated? for package in self._allPackages.itervalues(): try: packageName = package.getName() dirName = os.path.join(self._outDir, packageName) if not os.path.exists(dirName): os.makedirs(dirName) except OSError, e: logger.error("Error making dir %s : Error: %s" % (dirName, e))
def parseXindexLogFile(self, logFileName): if not os.path.exists(logFileName): logger.error("File: %s does not exist" % logFileName) return logFile = open(logFileName, 'r') for curLine in logFile: curLine = curLine.rstrip("\r\n") # check to see if it is a section header or we just in the routine header part if not self._curSection or (self._curSection in [ IXindexLogFileParser.ROUTINE, IXindexLogFileParser.PACKAGE_COMPONENT_LIST_SECTION ]): sectionHeader = self.__isSectionHeader__(curLine) if sectionHeader: self._curSection = sectionHeader self._curHandler = self._sectHandleDict.get(sectionHeader) if self._curHandler: self._curHandler._curPackage = self._curPackage self._curHandler.onSectionStart( curLine, sectionHeader, self._crossRef) self._sectionStack.append(sectionHeader) if not self._curSection or (self._curSection in [IXindexLogFileParser.ROUTINE]): continue if self.__isEndOfSection__(curLine, self._curSection): if self._curHandler: self._curHandler.onSectionEnd(curLine, self._curSection, self._curRoutine, self._crossRef) assert (self._curSection == self._sectionStack.pop()) if self._sectionStack: self._curSection = self._sectionStack[-1] self._curHandler = self._sectHandleDict[self._curSection] else: self._curSection = None self._curHandler = None continue if self._curHandler: self._curHandler.parseLine(curLine, self._curRoutine, self._crossRef) elif PRESS_RETURN.search(curLine): continue elif CROSS_REF.match(curLine.strip()): break
def parseLine(self, line, Routine, CrossReference): if self.__ignoreLine__(line): return # handle three cases: # 1. continuation of the previous info with value info # 2. Name too long. # 3. normal name/value pair result = self.__isNameValuePairLine__(line) if result: if self._suspiousLine: self.__handleSuspiousCases__(Routine, CrossReference) self._suspiousLine = False self._varPrefix = line[0:DEFAULT_NAME_FIELD_START_INDEX] self._varValue = line[self._valueStartIdx:] self._varName = line[DEFAULT_NAME_FIELD_START_INDEX:self. _valueStartIdx].strip() if self._addVarToRoutine: self._addVarToRoutine(Routine, CrossReference) if self._postParsingRoutine: self._postParsingRoutine(Routine, CrossReference) return result = self.__isValueOnlyLine__(line) if result: self._suspiousLine = False self._varValue = line[self._valueStartIdx:].strip() if not self._varName: logger.error("No varname is set, Routine: %s line: %s" % (Routine, line)) return if self._addVarToRoutine: self._addVarToRoutine(Routine, CrossReference) if self._postParsingRoutine: self._postParsingRoutine(Routine, CrossReference) return result = self.__isLongNameLine__(line) if result: if self._suspiousLine: self.__handleSuspiousCases__(Routine, CrossReference) self._varName = line[DEFAULT_NAME_FIELD_START_INDEX:].strip() self._varPrefix = line[0:DEFAULT_NAME_FIELD_START_INDEX] self._suspiousLine = True return logger.error("Could not handle this, Routine: %s, line: %s" % (Routine, line))
def parseLine(self, line, Global, CrossReference): assert self._global strippedLine = line.rstrip(" ") if not strippedLine: return value = strippedLine[self.POINTED_TO_BY_VALUE_INDEX:] result = POINTED_TO_BY_VALUE_REGEX.search(value) if result: fileManNo = result.group("FileNo") fieldNo = result.group('fieldNo') subFileNo = result.group('subFieldNo') pointedByGlobal = CrossReference.getGlobalByFileNo(fileManNo) if pointedByGlobal: self._global.addPointedToByFile(pointedByGlobal, fieldNo, subFileNo) else: logger.warning("Could not find global based on %s, %s" % (fileManNo, result.group("Name"))) else: logger.error("Could not parse pointer reference [%s] in file [%s]" % (line, self._global.getFileNo()))
def onSectionStart(self, line, section): if section != IXindexLogFileParser.ROUTINE: logger.error("Invalid section Header") return False routineName = RoutineStart.search(line).group('name') assert validRoutineName.search(routineName) != None, "Invalid RoutineName: [%s] Line: [%s]" % (routineName, line) if self._crossRef.isPlatformDependentRoutineByName(routineName): self._curRoutine = self._crossRef.getPlatformDependentRoutineByName(routineName) return True renamedRoutineName = routineName if self._crossRef.routineNeedRename(routineName): renamedRoutineName = self._crossRef.getRenamedRoutineName(routineName) if not self._crossRef.hasRoutine(renamedRoutineName): logger.error("Invalid Routine: %s: rename Routine %s" % (routineName, renamedRoutineName)) return False self._curRoutine = self._crossRef.getRoutineByName(renamedRoutineName) self._curRoutine._structuredCode = structuredSource return True
def generateFileFieldMap(self, inputFileList, fileNumber, fieldNo): schemaFile = self._allSchemaDict[fileNumber] if not schemaFile.hasField(fieldNo): logger.error("File does not have a [%s] field, ignore", fieldNo) return keyField = schemaFile.getFileManFieldByFieldNo(fieldNo) keyLoc = keyField.getLocation() if not keyLoc: logger.error("[%s] field does not have a location", fieldNo) return glbLoc = self._glbLocMap[fileNumber] fieldMap = {} for inputFileName in inputFileList: for dataRoot in readGlobalNodeFromZWRFileV2(inputFileName, glbLoc): if not dataRoot: continue fileDataRoot = dataRoot (ien, detail) = self._getKeyNameBySchema(fileDataRoot, keyLoc, keyField) if detail: fieldMap[detail] = ien return fieldMap
def parseLine(self, line, Routine, CrossReference): if self.__ignoreLine__(line): return # handle three cases: # 1. continuation of the previous info with value info # 2. Name too long. # 3. normal name/value pair result = self.__isNameValuePairLine__(line) if result: if self._suspiousLine: self.__handleSuspiousCases__(Routine, CrossReference) self._suspiousLine = False self._varPrefix = line[0:DEFAULT_NAME_FIELD_START_INDEX] self._varValue = line[self._valueStartIdx:] self._varName = line[DEFAULT_NAME_FIELD_START_INDEX:self._valueStartIdx].strip() if self._addVarToRoutine: self._addVarToRoutine(Routine, CrossReference) if self._postParsingRoutine: self._postParsingRoutine(Routine, CrossReference) return result = self.__isValueOnlyLine__(line) if result: self._suspiousLine = False self._varValue = line[self._valueStartIdx:].strip() if not self._varName: logger.error("No varname is set, Routine: %s line: %s" % (Routine, line)) return if self._addVarToRoutine: self._addVarToRoutine(Routine, CrossReference) if self._postParsingRoutine: self._postParsingRoutine(Routine, CrossReference) return result = self.__isLongNameLine__(line) if result: if self._suspiousLine: self.__handleSuspiousCases__(Routine, CrossReference) self._varName = line[DEFAULT_NAME_FIELD_START_INDEX:].strip() self._varPrefix = line[0:DEFAULT_NAME_FIELD_START_INDEX] self._suspiousLine = True return logger.error("Could not handle this, Routine: %s, line: %s" % (Routine, line))
def parseLine(self, line, Global, CrossReference): assert self._global strippedLine = line.rstrip(" ") if len(strippedLine) == 0: return value = strippedLine[self.POINTED_TO_BY_VALUE_INDEX:] logger.debug("Parsing line [%s]" % value) result = self.POINTED_TO_BY_VALUE.search(value) if result: fileManNo = result.group("FileNo") fieldNo = result.group('fieldNo') subFileNo = result.group('subFieldNo') logger.debug("File # %s, field # %s, sub-field # %s" % (fileManNo, fieldNo, subFileNo)) pointedByGlobal = CrossReference.getGlobalByFileNo(fileManNo) if pointedByGlobal: self._global.addPointedToByFile(pointedByGlobal, fieldNo, subFileNo) logger.debug("added global to pointed list: %s, %s, %s" % (fileManNo, fieldNo, subFileNo)) else: logger.warning("Could not find global based on %s, %s" % (fileManNo, result.group("Name"))) else: logger.error("Could not parse pointer reference [%s] in file [%s]" % (line, self._global.getFileNo()))
def generatePackageDependencyGraph(self, package, dependencyList=True): # merge the routine and package list depPackages, depPackageMerged = mergeAndSortDependencyListByPackage( package, dependencyList) if dependencyList: packageSuffix = "_dependency" else: packageSuffix = "_dependent" packageName = package.getName() normalizedName = normalizePackageName(packageName) totalPackage = len(depPackageMerged) if (totalPackage == 0) or (totalPackage > MAX_DEPENDENCY_LIST_SIZE): logger.info("Nothing to do exiting... Package: %s Total: %d " % (packageName, totalPackage)) return try: dirName = os.path.join(self._outDir, packageName) if not os.path.exists(dirName): os.makedirs(dirName) except OSError, e: logger.error("Error making dir %s : Error: %s" % (dirName, e)) return
def __parsingVariablePointer__(self, Global, CrossReference): index, fileList, found = 0, None, False indentValue = self.__getDefaultIndentLevel__(self._curFile, self.DEFAULT_NAME_INDENT) for index in range(len(self._lines)): if not found: if re.search("^ {%d,%d}FILE ORDER PREFIX LAYGO MESSAGE$" % (self.DEFAULT_NAME_INDENT, indentValue), self._lines[index]): found = True continue else: if re.search("^ {%d,}$" % indentValue, self._lines[index]): break else: result = FILE_REGEX.search(self._lines[index]) if result: filePointedTo = CrossReference.getGlobalByFileNo(result.group('File')) if not filePointedTo: # log an error for now, will handle this case later logger.error("INVALID File! File is %s, Global is %s" % (result.group('File'), Global)) continue if not fileList: fileList = [] fileList.append(filePointedTo) self._field.setPointedToFiles(fileList)
def onSectionEnd(self, line, section, Routine, CrossReference): if section != IXindexLogFileParser.ROUTINE: logger.error("Invalid section Header") return False self._curRoutine = None return True
def onSectionStart(self, line, section, Global, CrossReference): self._lines = [] result = DataDictionaryListFileLogParser.FILEMAN_FIELD_START.search(line) assert result fileNo = result.group('FileNo') fieldNo = result.group("FieldNo") self._isSubFile = float(fileNo) != float(Global.getFileNo()) if self._isSubFile: self._curFile = Global.getSubFileByFileNo(fileNo) assert self._curFile, "Could not find subFile [%s] in file [%s] line [%s]" % (fileNo, Global.getFileNo(), line) else: self._curFile = Global restOfLineStart = line.find("," + fieldNo) + len(fieldNo) startIdent = self.DEFAULT_NAME_INDENT defaultIdentLevel = self.__getDefaultIndentLevel__(self._curFile, self.DEFAULT_NAME_INDENT) if restOfLineStart > defaultIdentLevel: logger.warning("FileNo: %s, FieldNo: %s, line: %s, may not be a valid field no, %d, %d" % (fileNo, fieldNo, line, restOfLineStart, defaultIdentLevel)) try: floatValue = float(fieldNo) except ValueError: logger.error("invalid fieldNo %s" % fieldNo) fieldNo = line[line.find(",")+1:defaultIdentLevel] floatValue = float(fieldNo) restOfLine = line[line.find("," + fieldNo) + len(fieldNo)+1:].strip() result = NAME_LOC_TYPE_REGEX.search(restOfLine) fName, fType, fLocation = None, None, None if result: fName = result.group('Name').strip() fLocation = result.group('Loc').strip() if fLocation == ";": fLocation = None fType = result.group('Type').strip() else: # handle three cases, 1. no location info 2. no type info 3. Both if restOfLine.find(";") != -1: # missing type info logger.warn("Missing Type information [%s]" % line) result = NAME_LOC_REGEX.search(restOfLine) if result: fName = result.group('Name').strip() fLocation = result.group('Loc').strip() else: logger.error("Could not parse [%s]" % restOfLine) return else: # missing location, assume at least two space seperate name and type result = NAME_TYPE_REGEX.search(restOfLine) if result: fName = result.group('Name').strip() fType = result.group('Type').strip() else: logger.warn("Guessing Name: %s at line [%s]" % (restOfLine.strip(), line)) stripedType = "" if fType: stripedType = self.__stripFieldAttributes__(fType) if stripedType: self.__createFieldByType__(fieldNo, stripedType, fName, fLocation, line, Global, CrossReference) else: self._field = FileManFieldFactory.createField(fieldNo, fName, FileManField.FIELD_TYPE_NONE, fLocation) self._curFile.addFileManField(self._field) if stripedType: self.__parseFieldAttributes__(fType)
def _setFieldSpecificData(self, zeroFields, fileField, rootNode, fileSchema, filePointedTo, subFile): if fileField.getType() == FileManField.FIELD_TYPE_FILE_POINTER: fileGlobalRoot = "" if len(zeroFields) >= 3: fileGlobalRoot = zeroFields[2] if filePointedTo: if filePointedTo not in self._allSchema: """ create a new fileman file """ self._allSchema[filePointedTo] = Global(fileGlobalRoot, filePointedTo, "") pointedToFile = self._allSchema[filePointedTo] assert pointedToFile.isRootFile() fileField.setPointedToFile(pointedToFile) globalName = pointedToFile.getName() fileNo = fileSchema.getFileNo() if fileSchema.isSubFile(): fileNo = fileSchema.getRootFile().getFileNo() self._addToFileDepDict(fileNo, pointedToFile.getFileNo()) if fileGlobalRoot: if not globalName: pointedToFile.setName(fileGlobalRoot) elif globalName != fileGlobalRoot: logger.warning("%s: FileMan global root mismatch '%s' : '%s'" % (zeroFields, globalName, fileGlobalRoot)) else: logger.info("@TODO, find file global root for # %s" % filePointedTo) elif fileGlobalRoot: self._noPointedToFiles[fileGlobalRoot] = Global(fileGlobalRoot) logger.info("@TODO, set the file number for %s" % fileGlobalRoot) else: logger.warn("No pointed to file set for file:%s: field:%r 0-index:%s" % (fileSchema.getFileNo(), fileField, zeroFields)) elif fileField.getType() == FileManField.FIELD_TYPE_SUBFILE_POINTER: if subFile: if subFile not in self._allSchema: self._allSchema[subFile] = FileManFile(subFile, "", fileSchema) subFileSchema = self._allSchema[subFile] subFileSchema.setParentFile(fileSchema) fileSchema.addFileManSubFile(subFileSchema) fileField.setPointedToSubFile(subFileSchema) else: logger.warn("No subfile is set for file:%s, field:%r 0-index:%s" % (fileSchema.getFileNo(), fileField, zeroFields)) elif fileField.getType() == FileManField.FIELD_TYPE_SET and not subFile: setDict = dict([x.split(':') for x in zeroFields[2].rstrip(';').split(';')]) fileField.setSetMembers(setDict) elif fileField.getType() == FileManField.FIELD_TYPE_VARIABLE_FILE_POINTER: if "V" in rootNode: # parsing variable pointer vptrs = parsingVariablePointer(rootNode['V']) vpFileSchemas = [] if vptrs: for x in vptrs: if x not in self._allSchema: self._allSchema[x] = Global("", x, "") pointedToFile = self._allSchema[x] if pointedToFile.isSubFile(): logger.error("Field: %r point to subFile: %s, parent: %s" % (fileField, pointedToFile, pointedToFile.getParentFile())) else: fileNo = fileSchema.getFileNo() if fileSchema.isSubFile(): fileNo = fileSchema.getRootFile().getFileNo() self._addToFileDepDict(fileNo, pointedToFile.getFileNo()) vpFileSchemas.append(self._allSchema[x]) fileField.setPointedToFiles(vpFileSchemas)
def printRoutine(crossRef, routineName, visitor=DefaultRoutineVisit()): routine = crossRef.getRoutineByName(routineName) if routine: visitor.visitRoutine(routine) else: logger.error ("Routine: %s Not Found!" % routineName)
def printPackage(crossRef, packageName, visitor=DefaultPackageVisit()): package = crossRef.getPackageByName(packageName) if package: visitor.visitPackage(package) else: logger.error ("Package: %s Not Found!" % packageName)