def _addToPackageMap(icrEntry, pkgName): if 'CUSTODIAL PACKAGE' in icrEntry: icrPkg = icrEntry['CUSTODIAL PACKAGE'] if icrPkg not in PACKAGE_MAP: PACKAGE_MAP[icrPkg] = pkgName elif PACKAGE_MAP[icrPkg] != pkgName: logger.warning('[%s] mapped to [%s] and [%s]', icrPkg, PACKAGE_MAP[icrPkg], pkgName)
def __postParsing__(self, Routine, CrossReference): routineDetail = routineTag.search(self._varName.strip()) if routineDetail: routineName = routineDetail.group('name') if not validRoutineName.search(routineName): logger.warning("invalid Routine Name: %s in routine:%s, package: %s" % (routineName, Routine, Routine.getPackage())) return if (routineName.startswith("%")): CrossReference.addPercentRoutine(routineName) # ignore mumps routine for now if CrossReference.isMumpsRoutine(routineName): return # routineName=routineName[1:] if CrossReference.routineNeedRename(routineName): routineName = CrossReference.getRenamedRoutineName(routineName) tag = "" if routineDetail.group('external'): tag += routineDetail.group('external') if routineDetail.group('tag'): tag += routineDetail.group('tag') if not CrossReference.hasRoutine(routineName): # automatically categorize the routine by the namespace # if could not find one, assign to Uncategorized defaultPackageName = "Uncategorized" (namespace, package) = CrossReference.categorizeRoutineByNamespace(routineName) if namespace and package: defaultPackageName = package.getName() CrossReference.addRoutineToPackageByName(routineName, defaultPackageName, False) routine = CrossReference.getRoutineByName(routineName) Routine.addCalledRoutines(routine, tag, self._varValue)
def _getPackageHRefLink(pkgName, icrEntry, **kargs): global pgkUpperCaseNameDict if pkgName in PACKAGE_MAP: pkgLink = getPackageHtmlFileName(PACKAGE_MAP[pkgName]) return '<a href=\"%s%s\">%s</a>' % (DOX_URL, pkgLink, pkgName) crossRef = None if 'crossRef' in kargs: crossRef = kargs['crossRef'] if crossRef: if not pgkUpperCaseNameDict: for name in crossRef.getAllPackages().iterkeys(): pgkUpperCaseNameDict[name.upper()] = name upperName = _normalizeName(pkgName).upper() if upperName in pgkUpperCaseNameDict: _addToPackageMap(icrEntry, pgkUpperCaseNameDict[upperName]) return '<a href=\"%s%s\">%s</a>' % (DOX_URL, getPackageHtmlFileName(pgkUpperCaseNameDict[upperName]), pkgName) pkg = crossRef.getPackageByName(pkgName) if not pkg: pkgRename = _normalizeName(pkgName).title() pkg = crossRef.getPackageByName(pkgRename) if not pkg: pkgRename = _normalizeName(pkgName) pkg = crossRef.getPackageByName(pkgRename) if pkg: _addToPackageMap(icrEntry, pkg.getName()) pkgLink = getPackageHtmlFileName(pkg.getName()) return '<a href=\"%s%s\">%s</a>' % (DOX_URL, pkgLink, pkgName) else: logger.warning('Cannot find mapping for package: [%s]', pkgName) return pkgName
def _getFileManFileHRefLink(fileNo, icrEntry, **kargs): crossRef = None if 'crossRef' in kargs: crossRef = kargs['crossRef'] if crossRef: fileInfo = crossRef.getGlobalByFileNo(fileNo) if fileInfo: linkName = getGlobalHtmlFileNameByName(fileInfo.getName()) # _addToPackageMap(icrEntry, fileInfo.getPackage().getName()) return '<a href=\"%s%s\">%s</a>' % (DOX_URL, linkName, fileNo) else: logger.warning('Cannot find file: [%s]', fileNo) return fileNo
def _parseSubFileField(self, dataRoot, fieldAttr, outDataEntry): subFile = fieldAttr.getPointedToSubFile() if fieldAttr.hasSubType(FileManField.FIELD_TYPE_WORD_PROCESSING): outLst = self._parsingWordProcessingNode(dataRoot) outDataEntry.addField(FileManDataField(fieldAttr.getFieldNo(), FileManField.FIELD_TYPE_WORD_PROCESSING, fieldAttr.getName(), outLst)) elif subFile: subFileData = FileManFileData(subFile.getFileNo(), subFile.getFileManName()) self._parseDataBySchema(dataRoot, subFile, subFileData) outDataEntry.addField(FileManDataField(fieldAttr.getFieldNo(), FileManField.FIELD_TYPE_SUBFILE_POINTER, fieldAttr.getName(), subFileData)) else: logger.warning("Do not know how to intepret the schema %s" % fieldAttr)
def parseLine(self, line, Global, CrossReference): assert self._global strippedLine = line.rstrip(" ") if not strippedLine: return value = strippedLine[self.POINTED_TO_BY_VALUE_INDEX:] result = POINTED_TO_BY_VALUE_REGEX.search(value) if result: fileManNo = result.group("FileNo") fieldNo = result.group('fieldNo') subFileNo = result.group('subFieldNo') pointedByGlobal = CrossReference.getGlobalByFileNo(fileManNo) if pointedByGlobal: self._global.addPointedToByFile(pointedByGlobal, fieldNo, subFileNo) else: logger.warning("Could not find global based on %s, %s" % (fileManNo, result.group("Name"))) else: logger.error("Could not parse pointer reference [%s] in file [%s]" % (line, self._global.getFileNo()))
def _parseSchemaField(self, fieldNo, rootNode, fileSchema): if '0' not in rootNode: logger.warn('%s does not have a 0 subscript' % rootNode) return None zeroFields = rootNode["0"].value if not zeroFields: logger.warn("No value: %s for %s" % (zeroFields, rootNode['0'])) return None zeroFields = zeroFields.split('^') if len(zeroFields) < 2: return FileManFieldFactory.createField(fieldNo, zeroFields[0], FileManField.FIELD_TYPE_NONE, None) types, specifier, filePointedTo, subFile = \ self.parseFieldTypeSpecifier(zeroFields[1]) location = None if len(zeroFields) >= 4 and zeroFields[3]: location = zeroFields[3].strip(' ') if location == ';': # No location information location = None elif location.split(';')[-1] == '0': # 0 means multiple multipleType = FileManField.FIELD_TYPE_SUBFILE_POINTER if not types: types = [multipleType] if multipleType in types and types[0] != multipleType: types.remove(multipleType) types.insert(0, multipleType) if not subFile: subFile = filePointedTo if not types: logger.debug('Cannot determine the type for %s, fn: %s, file:%s' % (zeroFields, fieldNo, fileSchema.getFileNo())) types = [FileManField.FIELD_TYPE_NONE] if types and types[0] == FileManField.FIELD_TYPE_SUBFILE_POINTER: if subFile and subFile == fileSchema.getFileNo(): logger.warning("Recursive subfile pointer for %s" % subFile) types = [FileManField.FIELD_TYPE_NONE] fileField = FileManFieldFactory.createField(fieldNo, zeroFields[0], types[0], location) if specifier: fileField.setSpecifier(specifier) self._setFieldSpecificData(zeroFields, fileField, rootNode, fileSchema, filePointedTo, subFile) return fileField
def parseLine(self, line, Global, CrossReference): assert self._global strippedLine = line.rstrip(" ") if len(strippedLine) == 0: return value = strippedLine[self.POINTED_TO_BY_VALUE_INDEX:] logger.debug("Parsing line [%s]" % value) result = self.POINTED_TO_BY_VALUE.search(value) if result: fileManNo = result.group("FileNo") fieldNo = result.group('fieldNo') subFileNo = result.group('subFieldNo') logger.debug("File # %s, field # %s, sub-field # %s" % (fileManNo, fieldNo, subFileNo)) pointedByGlobal = CrossReference.getGlobalByFileNo(fileManNo) if pointedByGlobal: self._global.addPointedToByFile(pointedByGlobal, fieldNo, subFileNo) logger.debug("added global to pointed list: %s, %s, %s" % (fileManNo, fieldNo, subFileNo)) else: logger.warning("Could not find global based on %s, %s" % (fileManNo, result.group("Name"))) else: logger.error("Could not parse pointer reference [%s] in file [%s]" % (line, self._global.getFileNo()))
def onSectionStart(self, line, section, crossRef): if section != IXindexLogFileParser.ROUTINE: logger.error("Invalid section Header --> %s", line) return False routineName = ROUTINE_START.search(line).group('name') if VALID_ROUTINE_NAME.search( routineName ) != None: #, "Invalid RoutineName: [%s] Line: [%s]" % (routineName, line) if self._crossRef.isPlatformDependentRoutineByName(routineName): self._curRoutine = self._crossRef.getPlatformDependentRoutineByName( routineName) return True renamedRoutineName = routineName if self._crossRef.routineNeedRename(routineName): renamedRoutineName = self._crossRef.getRenamedRoutineName( routineName) if not self._crossRef.hasRoutine(renamedRoutineName): logger.warning("Invalid Routine: '%s': Rename Routine '%s'" % (routineName, renamedRoutineName)) return False self._curRoutine = self._crossRef.getRoutineByName( renamedRoutineName) self._curRoutine._structuredCode = structuredSource self._curPackage = self._curRoutine.getPackage() return True match = VALID_OBJECT.search(routineName).group("name") if match[:2] == "dd": fileNo = match[2:] if '.' not in fileNo: fileNo += ".0" self._curRoutine = self._crossRef.getGlobalByFileNo(fileNo) if not self._curRoutine: self._curRoutine = self._crossRef.getFileManSubFileByFileNo( fileNo) if self._curRoutine: self._curRoutine.setPackage(self._curPackage)
def _setFieldSpecificData(self, zeroFields, fileField, rootNode, fileSchema, filePointedTo, subFile): if fileField.getType() == FileManField.FIELD_TYPE_FILE_POINTER: fileGlobalRoot = "" if len(zeroFields) >= 3: fileGlobalRoot = zeroFields[2] if filePointedTo: if filePointedTo not in self._allSchema: """ create a new fileman file """ self._allSchema[filePointedTo] = Global(fileGlobalRoot, filePointedTo, "") pointedToFile = self._allSchema[filePointedTo] assert pointedToFile.isRootFile() fileField.setPointedToFile(pointedToFile) globalName = pointedToFile.getName() fileNo = fileSchema.getFileNo() if fileSchema.isSubFile(): fileNo = fileSchema.getRootFile().getFileNo() self._addToFileDepDict(fileNo, pointedToFile.getFileNo()) if fileGlobalRoot: if not globalName: pointedToFile.setName(fileGlobalRoot) elif globalName != fileGlobalRoot: logger.warning("%s: FileMan global root mismatch '%s' : '%s'" % (zeroFields, globalName, fileGlobalRoot)) else: logger.info("@TODO, find file global root for # %s" % filePointedTo) elif fileGlobalRoot: self._noPointedToFiles[fileGlobalRoot] = Global(fileGlobalRoot) logger.info("@TODO, set the file number for %s" % fileGlobalRoot) else: logger.warn("No pointed to file set for file:%s: field:%r 0-index:%s" % (fileSchema.getFileNo(), fileField, zeroFields)) elif fileField.getType() == FileManField.FIELD_TYPE_SUBFILE_POINTER: if subFile: if subFile not in self._allSchema: self._allSchema[subFile] = FileManFile(subFile, "", fileSchema) subFileSchema = self._allSchema[subFile] subFileSchema.setParentFile(fileSchema) fileSchema.addFileManSubFile(subFileSchema) fileField.setPointedToSubFile(subFileSchema) else: logger.warn("No subfile is set for file:%s, field:%r 0-index:%s" % (fileSchema.getFileNo(), fileField, zeroFields)) elif fileField.getType() == FileManField.FIELD_TYPE_SET and not subFile: setDict = dict([x.split(':') for x in zeroFields[2].rstrip(';').split(';')]) fileField.setSetMembers(setDict) elif fileField.getType() == FileManField.FIELD_TYPE_VARIABLE_FILE_POINTER: if "V" in rootNode: # parsing variable pointer vptrs = parsingVariablePointer(rootNode['V']) vpFileSchemas = [] if vptrs: for x in vptrs: if x not in self._allSchema: self._allSchema[x] = Global("", x, "") pointedToFile = self._allSchema[x] if pointedToFile.isSubFile(): logger.error("Field: %r point to subFile: %s, parent: %s" % (fileField, pointedToFile, pointedToFile.getParentFile())) else: fileNo = fileSchema.getFileNo() if fileSchema.isSubFile(): fileNo = fileSchema.getRootFile().getFileNo() self._addToFileDepDict(fileNo, pointedToFile.getFileNo()) vpFileSchemas.append(self._allSchema[x]) fileField.setPointedToFiles(vpFileSchemas)
def onSectionStart(self, line, section, Global, CrossReference): self._lines = [] result = DataDictionaryListFileLogParser.FILEMAN_FIELD_START.search(line) assert result fileNo = result.group('FileNo') fieldNo = result.group("FieldNo") self._isSubFile = float(fileNo) != float(Global.getFileNo()) if self._isSubFile: self._curFile = Global.getSubFileByFileNo(fileNo) assert self._curFile, "Could not find subFile [%s] in file [%s] line [%s]" % (fileNo, Global.getFileNo(), line) else: self._curFile = Global restOfLineStart = line.find("," + fieldNo) + len(fieldNo) startIdent = self.DEFAULT_NAME_INDENT defaultIdentLevel = self.__getDefaultIndentLevel__(self._curFile, self.DEFAULT_NAME_INDENT) if restOfLineStart > defaultIdentLevel: logger.warning("FileNo: %s, FieldNo: %s, line: %s, may not be a valid field no, %d, %d" % (fileNo, fieldNo, line, restOfLineStart, defaultIdentLevel)) try: floatValue = float(fieldNo) except ValueError: logger.error("invalid fieldNo %s" % fieldNo) fieldNo = line[line.find(",")+1:defaultIdentLevel] floatValue = float(fieldNo) restOfLine = line[line.find("," + fieldNo) + len(fieldNo)+1:].strip() result = NAME_LOC_TYPE_REGEX.search(restOfLine) fName, fType, fLocation = None, None, None if result: fName = result.group('Name').strip() fLocation = result.group('Loc').strip() if fLocation == ";": fLocation = None fType = result.group('Type').strip() else: # handle three cases, 1. no location info 2. no type info 3. Both if restOfLine.find(";") != -1: # missing type info logger.warn("Missing Type information [%s]" % line) result = NAME_LOC_REGEX.search(restOfLine) if result: fName = result.group('Name').strip() fLocation = result.group('Loc').strip() else: logger.error("Could not parse [%s]" % restOfLine) return else: # missing location, assume at least two space seperate name and type result = NAME_TYPE_REGEX.search(restOfLine) if result: fName = result.group('Name').strip() fType = result.group('Type').strip() else: logger.warn("Guessing Name: %s at line [%s]" % (restOfLine.strip(), line)) stripedType = "" if fType: stripedType = self.__stripFieldAttributes__(fType) if stripedType: self.__createFieldByType__(fieldNo, stripedType, fName, fLocation, line, Global, CrossReference) else: self._field = FileManFieldFactory.createField(fieldNo, fName, FileManField.FIELD_TYPE_NONE, fLocation) self._curFile.addFileManField(self._field) if stripedType: self.__parseFieldAttributes__(fType)
def _setFieldSpecificData(self, zeroFields, fileField, rootNode, fileSchema, filePointedTo, subFile): if fileField.getType() == FileManField.FIELD_TYPE_FILE_POINTER: fileGlobalRoot = "" if len(zeroFields) >= 3: fileGlobalRoot = zeroFields[2] if filePointedTo: if filePointedTo not in self._allSchema: """ create a new fileman file """ self._allSchema[filePointedTo] = Global( fileGlobalRoot, filePointedTo, "") pointedToFile = self._allSchema[filePointedTo] assert pointedToFile.isRootFile() fileField.setPointedToFile(pointedToFile) globalName = pointedToFile.getName() fileNo = fileSchema.getFileNo() if fileSchema.isSubFile(): fileNo = fileSchema.getRootFile().getFileNo() self._addToFileDepDict(fileNo, pointedToFile.getFileNo()) if fileGlobalRoot: if not globalName: pointedToFile.setName(fileGlobalRoot) elif globalName != fileGlobalRoot: logger.warning( "%s: FileMan global root mismatch '%s' : '%s'" % (zeroFields, globalName, fileGlobalRoot)) else: logger.info("@TODO, find file global root for # %s" % filePointedTo) elif fileGlobalRoot: self._noPointedToFiles[fileGlobalRoot] = Global(fileGlobalRoot) logger.info("@TODO, set the file number for %s" % fileGlobalRoot) else: logger.warn( "No pointed to file set for file:%s: field:%r 0-index:%s" % (fileSchema.getFileNo(), fileField, zeroFields)) elif fileField.getType() == FileManField.FIELD_TYPE_SUBFILE_POINTER: if subFile: if subFile not in self._allSchema: self._allSchema[subFile] = FileManFile( subFile, "", fileSchema) subFileSchema = self._allSchema[subFile] subFileSchema.setParentFile(fileSchema) fileSchema.addFileManSubFile(subFileSchema) fileField.setPointedToSubFile(subFileSchema) else: logger.warn( "No subfile is set for file:%s, field:%r 0-index:%s" % (fileSchema.getFileNo(), fileField, zeroFields)) elif fileField.getType( ) == FileManField.FIELD_TYPE_SET and not subFile and zeroFields[2]: setDict = dict( [x.split(':') for x in zeroFields[2].rstrip(';').split(';')]) fileField.setSetMembers(setDict) elif fileField.getType( ) == FileManField.FIELD_TYPE_VARIABLE_FILE_POINTER: if "V" in rootNode: # parsing variable pointer vptrs = parsingVariablePointer(rootNode['V']) vpFileSchemas = [] if vptrs: for x in vptrs: if x not in self._allSchema: self._allSchema[x] = Global("", x, "") pointedToFile = self._allSchema[x] if pointedToFile.isSubFile(): logger.error( "Field: %r point to subFile: %s, parent: %s" % (fileField, pointedToFile, pointedToFile.getParentFile())) else: fileNo = fileSchema.getFileNo() if fileSchema.isSubFile(): fileNo = fileSchema.getRootFile().getFileNo() self._addToFileDepDict(fileNo, pointedToFile.getFileNo()) vpFileSchemas.append(self._allSchema[x]) fileField.setPointedToFiles(vpFileSchemas)
def convertJson(inputJsonFile, date, MRepositDir, patchRepositDir, generateHTML, generatePDF, outDir=None, pdfOutDir=None, local=False): if not generateHTML and not generatePDF: raise Exception("Nothing to generate!") global DOX_URL global VIVIAN_URL DOX_URL = getDOXURL(local) VIVIAN_URL = getViViaNURL(local) if generateHTML: if not outDir: raise Exception("Must specify Output directory") if not os.path.exists(outDir): # Will also create intermediate directories if needed os.makedirs(outDir) if generatePDF: if not pdfOutDir: raise Exception("Must specify PDF Output directory") # Will also create intermediate directories if needed if not os.path.exists(pdfOutDir): os.makedirs(pdfOutDir) from InitCrossReferenceGenerator import parseCrossReferenceGeneratorArgs crossRef = parseCrossReferenceGeneratorArgs(MRepositDir, patchRepositDir) global RPC_NAME_TO_IEN_MAPPING RPC_NAME_TO_IEN_MAPPING = generateSingleFileFieldToIenMappingBySchema(MRepositDir, crossRef) with open(inputJsonFile, 'r') as inputFile: pkgJson = {} # group by package allpkgJson = [] inputJson = json.load(inputFile) for icrEntry in inputJson: if 'NUMBER' not in icrEntry: logger.error("Could not parse entry: " + str(icrEntry)) continue if 'CUSTODIAL PACKAGE' in icrEntry: pkgName = icrEntry['CUSTODIAL PACKAGE'] if crossRef.getMappedPackageName(pkgName) is None: crossRef.addMappedPackage(pkgName, crossRef.normalizePackageName(pkgName).title()) logger.warning("Adding package " + pkgName + " to package name map.") if generatePDF: _generateICRIndividualPagePDF(icrEntry, date, pdfOutDir) if generateHTML: _generateICRIndividualPage(icrEntry, date, outDir, crossRef) summaryInfo = _convertICREntryToSummaryInfo(icrEntry, crossRef) allpkgJson.append(summaryInfo) if 'CUSTODIAL PACKAGE' in icrEntry: pkgJson.setdefault(icrEntry['CUSTODIAL PACKAGE'], []).append(summaryInfo) if generateHTML: _generateICRSummaryPageImpl(allpkgJson, 'ICR List', 'All', date, outDir, crossRef, isForAll=True) for pkgName, outJson in iteritems(pkgJson): _generateICRSummaryPageImpl(outJson, 'ICR List', pkgName, date, outDir, crossRef) logger.warn('Total # entry in pkgJson is [%s]', len(pkgJson)) _generatePkgDepSummaryPage(inputJson, date, outDir, crossRef)