def ConvertLogFile(self, LogFile): newline = [] lfr = None lfw = None if LogFile: lfr = open(LogFile, "rb") lfw = open(LogFile + ".new", "wb") for line in lfr: line = line.strip() line = line.replace(".efi", "") index = line.find("Loading PEIM at ") if index > -1: newline.append(line[index + 55 :]) continue index = line.find("Loading driver at ") if index > -1: newline.append(line[index + 57 :]) continue for line in newline: lfw.write(line + "\r\n") if lfr: lfr.close() if lfw: lfw.close()
def TrimEdkSourceCode(Source, Target): EdkLogger.verbose("\t%s -> %s" % (Source, Target)) CreateDirectory(os.path.dirname(Target)) try: f = open (Source,'rb') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) # read whole file Lines = f.read() f.close() NewLines = None for Re,Repl in gImportCodePatterns: if NewLines == None: NewLines = Re.sub(Repl, Lines) else: NewLines = Re.sub(Repl, NewLines) # save all lines if trimmed if Source == Target and NewLines == Lines: return try: f = open (Target,'wb') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) f.write(NewLines) f.close()
def ConvertLogFile(self, LogFile): newline = [] lfr = None lfw = None if LogFile: lfr = open(LogFile, 'rb') lfw = open(LogFile + '.new', 'wb') for line in lfr: line = line.strip() line = line.replace('.efi', '') index = line.find("Loading PEIM at ") if index > -1: newline.append(line[index + 55 : ]) continue index = line.find("Loading driver at ") if index > -1: newline.append(line[index + 57 : ]) continue for line in newline: lfw.write(line + '\r\n') if lfr: lfr.close() if lfw: lfw.close()
def TrimPreprocessedVfr(Source, Target): CreateDirectory(os.path.dirname(Target)) try: f = open (Source,'r') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) # read whole file Lines = f.readlines() f.close() FoundTypedef = False Brace = 0 TypedefStart = 0 TypedefEnd = 0 for Index in range(len(Lines)): Line = Lines[Index] # don't trim the lines from "formset" definition to the end of file if Line.strip() == 'formset': break if FoundTypedef == False and (Line.find('#line') == 0 or Line.find('# ') == 0): # empty the line number directive if it's not aomong "typedef struct" Lines[Index] = "\n" continue if FoundTypedef == False and gTypedefPattern.search(Line) == None: # keep "#pragram pack" directive if gPragmaPattern.search(Line) == None: Lines[Index] = "\n" continue elif FoundTypedef == False: # found "typedef struct", keept its position and set a flag FoundTypedef = True TypedefStart = Index # match { and } to find the end of typedef definition if Line.find("{") >= 0: Brace += 1 elif Line.find("}") >= 0: Brace -= 1 # "typedef struct" must end with a ";" if Brace == 0 and Line.find(";") >= 0: FoundTypedef = False TypedefEnd = Index # keep all "typedef struct" except to GUID, EFI_PLABEL and PAL_CALL_RETURN if Line.strip("} ;\r\n") in ["GUID", "EFI_PLABEL", "PAL_CALL_RETURN"]: for i in range(TypedefStart, TypedefEnd+1): Lines[i] = "\n" # save all lines trimmed try: f = open (Target,'w') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) f.writelines(Lines) f.close()
def __init__(self, ReportName = 'Report.html', FvObj = None, DispatchName=None): self.ReportName = ReportName self.Op = open(ReportName, 'w+') self.DispatchList = None if DispatchName: self.DispatchList = open(DispatchName, 'w+') self.FvObj = FvObj self.FfsIndex = 0 self.PpiIndex = 0 self.ProtocolIndex = 0 if EotGlobalData.gMACRO['EFI_SOURCE'] == '': EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gMACRO['EDK_SOURCE']
def ParseDecFile(self, DecFileList): if DecFileList: path = os.path.normpath(DecFileList) lfr = open(path, "rb") for line in lfr: path = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) if os.path.exists(path): dfr = open(path, "rb") for line in dfr: line = CleanString(line) list = line.split("=") if len(list) == 2: EotGlobalData.gGuidDict[list[0].strip()] = GuidStructureStringToGuidString(list[1].strip())
def GenerateVpdFile (self, MapFileName, BinFileName): #Open an VPD file to process try: fVpdFile = open(BinFileName, "wb") except: # Open failed EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None) try : fMapFile = open(MapFileName, "w") except: # Open failed EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None) # Use a instance of BytesIO to cache data fStringIO = BytesIO() # Write the header of map file. try : fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n") except: EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None) for eachPcd in self.PcdFixedOffsetSizeList : # write map file try : fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId, eachPcd.PcdOffset, eachPcd.PcdSize, eachPcd.PcdUnpackValue)) except: EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None) # Write Vpd binary file fStringIO.seek (eachPcd.PcdBinOffset) if isinstance(eachPcd.PcdValue, list): for i in range(len(eachPcd.PcdValue)): Value = eachPcd.PcdValue[i:i + 1] if isinstance(bytes(Value), str): fStringIO.write(chr(Value[0])) else: fStringIO.write(bytes(Value)) else: fStringIO.write (eachPcd.PcdValue) try : fVpdFile.write (fStringIO.getvalue()) except: EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.VpdFileName, None) fStringIO.close () fVpdFile.close () fMapFile.close ()
def GenerateSourceFileList(self, SourceFileList, IncludeFileList): EdkLogger.quiet("Generating source files list ... ") mSourceFileList = [] mInfFileList = [] mDecFileList = [] mFileList = {} mCurrentInfFile = "" mCurrentSourceFileList = [] if SourceFileList: sfl = open(SourceFileList, "rb") for line in sfl: line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) if line[-2:].upper() == ".C" or line[-2:].upper() == ".H": if line not in mCurrentSourceFileList: mCurrentSourceFileList.append(line) mSourceFileList.append(line) EotGlobalData.gOP_SOURCE_FILES.write("%s\n" % line) if line[-4:].upper() == ".INF": if mCurrentInfFile != "": mFileList[mCurrentInfFile] = mCurrentSourceFileList mCurrentSourceFileList = [] mCurrentInfFile = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line)) EotGlobalData.gOP_INF.write("%s\n" % mCurrentInfFile) if mCurrentInfFile not in mFileList: mFileList[mCurrentInfFile] = mCurrentSourceFileList # Get all include files from packages if IncludeFileList: ifl = open(IncludeFileList, "rb") for line in ifl: if not line.strip(): continue newline = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) for Root, Dirs, Files in os.walk(str(newline)): for File in Files: FullPath = os.path.normpath(os.path.join(Root, File)) if FullPath not in mSourceFileList and File[-2:].upper() == ".H": mSourceFileList.append(FullPath) EotGlobalData.gOP_SOURCE_FILES.write("%s\n" % FullPath) if FullPath not in mDecFileList and File.upper().find(".DEC") > -1: mDecFileList.append(FullPath) EotGlobalData.gSOURCE_FILES = mSourceFileList EotGlobalData.gOP_SOURCE_FILES.close() EotGlobalData.gINF_FILES = mFileList EotGlobalData.gOP_INF.close() EotGlobalData.gDEC_FILES = mDecFileList
def ParseMapFile(Files): AllMaps = {} CurrentModule = '' CurrentMaps = {} for File in Files: Content = open(File, 'r').readlines() for Line in Content: Line = CleanString(Line) # skip empty line if Line == '': continue if Line.find('(') > -1 and Line.find(')') > -1: if CurrentModule != '' and CurrentMaps != {}: AllMaps[CurrentModule] = CurrentMaps CurrentModule = Line[:Line.find('(')] CurrentMaps = {} continue else: Name = '' Address = '' List = Line.split() Address = List[0] if List[1] == 'F' or List[1] == 'FS': Name = List[2] else: Name = List[1] CurrentMaps[Name] = Address continue return AllMaps
def RaiseParserError(Line, Section, File, Format='', LineNo= -1): if LineNo == -1: LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line) ErrorMsg = "Invalid statement '%s' is found in section '%s'" % (Line, Section) if Format != '': Format = "Correct format is " + Format EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=File, Line=LineNo, ExtraData=Format, RaiseError=EdkLogger.IsRaiseError)
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}, IsMakefile = False): # # Prepare the parameter of GenSection # if FfsInf is not None: self.Alignment = FfsInf.__ExtendMacro__(self.Alignment) self.StringData = FfsInf.__ExtendMacro__(self.StringData) self.FileName = FfsInf.__ExtendMacro__(self.FileName) OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get(BINARY_FILE_TYPE_UI)) if self.StringData is not None : NameString = self.StringData elif self.FileName is not None: FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName) FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict) FileObj = open(FileNameStr, 'r') NameString = FileObj.read() FileObj.close() else: NameString = '' GenFdsGlobalVariable.GenerateSection(OutputFile, None, 'EFI_SECTION_USER_INTERFACE', Ui=NameString, IsMakefile=IsMakefile) OutputFileList = [] OutputFileList.append(OutputFile) return OutputFileList, self.Alignment
def Write(self, FilePath): if not (FilePath != None or len(FilePath) != 0): EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid parameter FilePath: %s." % FilePath) try: fd = open(FilePath, "w") except: EdkLogger.error("VpdInfoFile", BuildToolError.FILE_OPEN_FAILURE, "Fail to open file %s for written." % FilePath) try: # write file header fd.write(FILE_COMMENT_TEMPLATE) # write each of PCD in VPD type Pcds = self._VpdArray.keys() Pcds.sort() for Pcd in Pcds: i = 0 for Offset in self._VpdArray[Pcd]: PcdValue = str(Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[i]].DefaultValue).strip() if PcdValue == "" : PcdValue = Pcd.DefaultValue fd.write("%s.%s|%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, str(Pcd.SkuInfoList.keys()[i]),str(Offset).strip(), str(Pcd.MaxDatumSize).strip(),PcdValue)) i += 1 except: EdkLogger.error("VpdInfoFile", BuildToolError.FILE_WRITE_FAILURE, "Fail to write file %s" % FilePath) fd.close()
def SetDir(OutputDir, FdfParser, WorkSpace, ArchList): GenFdsGlobalVariable.VerboseLogger("GenFdsGlobalVariable.OutputDir :%s" % OutputDir) # GenFdsGlobalVariable.OutputDirDict = OutputDir GenFdsGlobalVariable.FdfParser = FdfParser GenFdsGlobalVariable.WorkSpace = WorkSpace GenFdsGlobalVariable.FvDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[ArchList[0]], "FV") if not os.path.exists(GenFdsGlobalVariable.FvDir): os.makedirs(GenFdsGlobalVariable.FvDir) GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir, "Ffs") if not os.path.exists(GenFdsGlobalVariable.FfsDir): os.makedirs(GenFdsGlobalVariable.FfsDir) if ArchList != None: GenFdsGlobalVariable.ArchList = ArchList T_CHAR_LF = "\n" # # Create FV Address inf file # GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, "FvAddress.inf") FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, "w") # # Add [Options] # FvAddressFile.writelines("[options]" + T_CHAR_LF) BsAddress = "0" for Arch in ArchList: if GenFdsGlobalVariable.WorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag, ].BsBaseAddress: BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag, ].BsBaseAddress break FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + BsAddress + T_CHAR_LF) RtAddress = "0" for Arch in ArchList: if GenFdsGlobalVariable.WorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag, ].RtBaseAddress: RtAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag, ].RtBaseAddress FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + RtAddress + T_CHAR_LF) FvAddressFile.close()
def Read(self, FilePath): try: fd = open(FilePath, "r") except: EdkLogger.error("VpdInfoFile", BuildToolError.FILE_OPEN_FAILURE, "Fail to open file %s for written." % FilePath) Lines = fd.readlines() for Line in Lines: Line = Line.strip() if len(Line) == 0 or Line.startswith("#"): continue # # the line must follow output format defined in BPDG spec. # try: PcdName, SkuId,Offset, Size, Value = Line.split("#")[0].split("|") PcdName, SkuId,Offset, Size, Value = PcdName.strip(), SkuId.strip(),Offset.strip(), Size.strip(), Value.strip() TokenSpaceName, PcdTokenName = PcdName.split(".") except: EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath) Found = False for VpdObject in self._VpdArray.keys(): for sku in VpdObject.SkuInfoList.keys(): if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObject.TokenCName == PcdTokenName.strip() and sku == SkuId: if self._VpdArray[VpdObject][VpdObject.SkuInfoList.keys().index(sku)] == "*": if Offset == "*": EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName) self._VpdArray[VpdObject][VpdObject.SkuInfoList.keys().index(sku)] = Offset Found = True if not Found: EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
def ToCSV(self, Filename='Report.csv'): try: File = open(Filename, 'w+') File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""") RecordSet = self.Query() Index = 0 for Record in RecordSet: Index = Index + 1 ErrorID = Record[1] OtherMsg = Record[2] BelongsToTable = Record[3] BelongsToItem = Record[4] IsCorrected = Record[5] SqlCommand = '' if BelongsToTable == 'File': SqlCommand = """select 1, FullPath from %s where ID = %s """ % (BelongsToTable, BelongsToItem) else: SqlCommand = """select A.StartLine, B.FullPath from %s as A, File as B where A.ID = %s and B.ID = A.BelongsToFile """ % (BelongsToTable, BelongsToItem) NewRecord = self.Exec(SqlCommand) if NewRecord != []: File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg)) EdkLogger.quiet("%s(%s): [%s]%s %s" % (NewRecord[0][1], NewRecord[0][0], ErrorID, EccToolError.gEccErrorMessage[ErrorID], OtherMsg)) File.close() except IOError: NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime()) EdkLogger.warn("ECC", "The report file %s is locked by other progress, use %s instead!" % (Filename, NewFilename)) self.ToCSV(NewFilename)
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}, IsMakefile = False): # # Prepare the parameter of GenSection # if FfsInf: self.Alignment = FfsInf.__ExtendMacro__(self.Alignment) self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum) self.StringData = FfsInf.__ExtendMacro__(self.StringData) self.FileName = FfsInf.__ExtendMacro__(self.FileName) OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get('VERSION')) OutputFile = os.path.normpath(OutputFile) # Get String Data StringData = '' if self.StringData: StringData = self.StringData elif self.FileName: FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName) FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict) FileObj = open(FileNameStr, 'r') StringData = FileObj.read() StringData = '"' + StringData + '"' FileObj.close() GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION', Ver=StringData, BuildNumber=self.BuildNum, IsMakefile=IsMakefile) OutputFileList = [] OutputFileList.append(OutputFile) return OutputFileList, self.Alignment
def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]): self.RuleFile = File # Read build rules from file if it's not none if File != None: try: self.RuleContent = open(File, 'r').readlines() except: EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File) elif Content != None: self.RuleContent = Content else: EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given") self.SupportedToolChainFamilyList = SupportedFamily self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object} self.Ext2FileType = {} # {ext : file-type} self.FileTypeList = set() self._LineIndex = LineIndex self._State = "" self._RuleInfo = tdict(True, 2) # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}} self._FileType = '' self._BuildTypeList = [] self._ArchList = [] self._FamilyList = [] self._TotalToolChainFamilySet = set() self._RuleObjectList = [] # FileBuildRule object list self._FileVersion = "" self.Parse() # some intrinsic rules self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, "COMMON", "COMMON", "COMMON"] = self._BinaryFileRule self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE)
def ParseConfig(self): Filepath = os.path.normpath(self.Filename) if not os.path.isfile(Filepath): ErrorMsg = "Can't find configuration file '%s'" % Filepath EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath) LineNo = 0 for Line in open(Filepath, 'r'): LineNo = LineNo + 1 Line = CleanString(Line) if Line != '': List = GetSplitValueList(Line, TAB_EQUAL_SPLIT) if List[0] not in self.__dict__: ErrorMsg = "Invalid configuration option '%s' was found" % List[0] EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath, Line = LineNo) if List[0] == 'ModifierList': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) if List[0] == 'MetaDataFileCheckPathOfGenerateFileList' and List[1] == "": continue if List[0] == 'SkipDirList': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) if List[0] == 'SkipFileList': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) if List[0] == 'BinaryExtList': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) if List[0] == 'Copyright': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) self.__dict__[List[0]] = List[1]
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}): # # Prepare the parameter of GenSection # if FfsInf != None: self.Alignment = FfsInf.__ExtendMacro__(self.Alignment) self.StringData = FfsInf.__ExtendMacro__(self.StringData) self.FileName = FfsInf.__ExtendMacro__(self.FileName) OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + SecNum + Ffs.SectionSuffix.get('UI')) if self.StringData != None : NameString = self.StringData elif self.FileName != None: FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName) FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict) FileObj = open(FileNameStr, 'r') NameString = FileObj.read() NameString = '\"' + NameString + "\"" FileObj.close() else: NameString = '' GenFdsGlobalVariable.GenerateSection(OutputFile, None, 'EFI_SECTION_USER_INTERFACE', Ui=NameString) OutputFileList = [] OutputFileList.append(OutputFile) return OutputFileList, self.Alignment
def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter): F = None try: F = open(FileName, 'r') self.ConfDirectoryPath = os.path.dirname(FileName) except: EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName) if F is not None: F.close() for Line in F: Line = Line.strip() if Line.startswith(CommentCharacter) or Line == '': continue LineList = Line.split(KeySplitCharacter, 1) Key = LineList[0].strip() if len(LineList) == 2: Value = LineList[1].strip() else: Value = "" if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \ DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]: self.TargetTxtDictionary[Key] = Value.replace('\\', '/') if Key == DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.TargetTxtDictionary[Key]: if self.TargetTxtDictionary[Key].startswith("Conf/"): Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip()) if not os.path.exists(Tools_Def) or not os.path.isfile(Tools_Def): # If Conf/Conf does not exist, try just the Conf/ directory Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip()) else: # The File pointed to by TOOL_CHAIN_CONF is not in a Conf/ directory Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip()) self.TargetTxtDictionary[Key] = Tools_Def if Key == DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF and self.TargetTxtDictionary[Key]: if self.TargetTxtDictionary[Key].startswith("Conf/"): Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip()) if not os.path.exists(Build_Rule) or not os.path.isfile(Build_Rule): # If Conf/Conf does not exist, try just the Conf/ directory Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip()) else: # The File pointed to by BUILD_RULE_CONF is not in a Conf/ directory Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip()) self.TargetTxtDictionary[Key] = Build_Rule elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]: self.TargetTxtDictionary[Key] = Value.split() elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER: try: V = int(Value, 0) except: EdkLogger.error("build", FORMAT_INVALID, "Invalid number of [%s]: %s." % (Key, Value), File=FileName) self.TargetTxtDictionary[Key] = Value #elif Key not in GlobalData.gGlobalDefines: # GlobalData.gGlobalDefines[Key] = Value F.close() return 0
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}): # # Prepare the parameter of GenSection # if FfsInf != None: self.Alignment = FfsInf.__ExtendMacro__(self.Alignment) self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum) self.StringData = FfsInf.__ExtendMacro__(self.StringData) self.FileName = FfsInf.__ExtendMacro__(self.FileName) OutputFile = os.path.join(OutputPath, ModuleName + "SEC" + SecNum + Ffs.SectionSuffix.get("VERSION")) OutputFile = os.path.normpath(OutputFile) # Get String Data StringData = "" if self.StringData != None: StringData = self.StringData elif self.FileName != None: FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName) FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict) FileObj = open(FileNameStr, "r") StringData = FileObj.read() StringData = '"' + StringData + '"' FileObj.close() else: StringData = "" GenFdsGlobalVariable.GenerateSection( OutputFile, None, "EFI_SECTION_VERSION", Ver=StringData, BuildNumber=self.BuildNum ) OutputFileList = [] OutputFileList.append(OutputFile) return OutputFileList, self.Alignment
def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile): VfrNameList = [] if os.path.isdir(DebugDir): for CurrentDir, Dirs, Files in os.walk(DebugDir): for FileName in Files: Name, Ext = os.path.splitext(FileName) if Ext == '.c' and Name != 'AutoGen': VfrNameList.append (Name + 'Bin') VfrNameList.append (ModuleName + 'Strings') EfiFileName = os.path.join(DebugDir, ModuleName + '.efi') MapFileName = os.path.join(DebugDir, ModuleName + '.map') VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrNameList) if not VfrUniOffsetList: return try: fInputfile = open(OutputFile, "wb+") except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None) # Use a instance of BytesIO to cache data fStringIO = BytesIO() for Item in VfrUniOffsetList: if (Item[0].find("Strings") != -1): # # UNI offset in image. # GUID + Offset # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f' fStringIO.write(UniGuid) UniValue = pack ('Q', int (Item[1], 16)) fStringIO.write (UniValue) else: # # VFR binary offset in image. # GUID + Offset # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2' fStringIO.write(VfrGuid) type (Item[1]) VfrValue = pack ('Q', int (Item[1], 16)) fStringIO.write (VfrValue) # # write data into file. # try : fInputfile.write (fStringIO.getvalue()) except: EdkLogger.error("Trim", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %OutputFile, None) fStringIO.close () fInputfile.close ()
def XmlParseFile(FileName): try: XmlFile = open(FileName) Dom = xml.dom.minidom.parse(XmlFile) XmlFile.close() return Dom except Exception, X: print X return ""
def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None): NewFileContent = [] try: # # Search LocalSearchPath first if it is specified. # if LocalSearchPath: SearchPathList = [LocalSearchPath] + IncludePathList else: SearchPathList = IncludePathList for IncludePath in SearchPathList: IncludeFile = os.path.join(IncludePath, Source) if os.path.isfile(IncludeFile): try: with open(IncludeFile, "r") as File: F = File.readlines() except: with codecs.open(IncludeFile, "r", encoding='utf-8') as File: F = File.readlines() break else: EdkLogger.error("Trim", "Failed to find include file %s" % Source) except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) # avoid A "include" B and B "include" A IncludeFile = os.path.abspath(os.path.normpath(IncludeFile)) if IncludeFile in gIncludedAslFile: EdkLogger.warn("Trim", "Circular include", ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), IncludeFile)) return [] gIncludedAslFile.append(IncludeFile) for Line in F: LocalSearchPath = None Result = gAslIncludePattern.findall(Line) if len(Result) == 0: Result = gAslCIncludePattern.findall(Line) if len(Result) == 0 or os.path.splitext(Result[0][1])[1].lower() not in [".asl", ".asi"]: NewFileContent.append("%s%s" % (Indent, Line)) continue # # We should first search the local directory if current file are using pattern #include "XXX" # if Result[0][2] == '"': LocalSearchPath = os.path.dirname(IncludeFile) CurrentIndent = Indent + Result[0][0] IncludedFile = Result[0][1] NewFileContent.extend(DoInclude(IncludedFile, CurrentIndent, IncludePathList, LocalSearchPath)) NewFileContent.append("\n") gIncludedAslFile.pop() return NewFileContent
def TrimAslFile(Source, Target, IncludePathFile): CreateDirectory(os.path.dirname(Target)) SourceDir = os.path.dirname(Source) if SourceDir == '': SourceDir = '.' # # Add source directory as the first search directory # IncludePathList = [SourceDir] # # If additional include path file is specified, append them all # to the search directory list. # if IncludePathFile: try: LineNum = 0 for Line in open(IncludePathFile,'r'): LineNum += 1 if Line.startswith("/I") or Line.startswith ("-I"): IncludePathList.append(Line[2:].strip()) else: EdkLogger.warn("Trim", "Invalid include line in include list file.", IncludePathFile, LineNum) except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=IncludePathFile) Lines = DoInclude(Source, '', IncludePathList) # # Undef MIN and MAX to avoid collision in ASL source code # Lines.insert(0, "#undef MIN\n#undef MAX\n") # save all lines trimmed try: f = open (Target,'w') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) f.writelines(Lines) f.close()
def RWFile(self, CommentCharacter, KeySplitCharacter, Num): try: fr = open(self.FileName, 'r') fw = open(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), 'w') existKeys = [] for Line in fr: if Line.startswith(CommentCharacter) or Line.strip() == '': fw.write(Line) else: LineList = Line.split(KeySplitCharacter, 1) if len(LineList) >= 2: Key = LineList[0].strip() if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary: if Key not in existKeys: existKeys.append(Key) else: print("Warning: Found duplicate key item in original configuration files!") if Num == 0: Line = "%-30s = \n" % Key else: ret = GetConfigureKeyValue(self, Key) if ret is not None: Line = ret fw.write(Line) for key in self.TargetTxtDictionary: if key not in existKeys: print("Warning: %s does not exist in original configuration file" % key) Line = GetConfigureKeyValue(self, key) if Line is None: Line = "%-30s = " % key fw.write(Line) fr.close() fw.close() os.remove(self.FileName) os.rename(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), self.FileName) except: last_type, last_value, last_tb = sys.exc_info() traceback.print_exception(last_type, last_value, last_tb)
def generatePcdTable(list, pcdpath): try: f = open(pcdpath, 'w') except: pass f.write('PCD Name Offset Section Name\r\n') for pcditem in list: f.write('%-30s 0x%-08X %-6s\r\n' % (pcditem[0], pcditem[1], pcditem[2])) f.close()
def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter): try: F = open(FileName, 'r') Lines = [] Lines = F.readlines() F.close() except: Lines = [] Keys = Dictionary.keys() MaxLength = 0 for Key in Keys: if len(Key) > MaxLength: MaxLength = len(Key) Index = 0 for Line in Lines: LineList = Line.split(KeySplitCharacter, 1) if len(LineList) >= 2: Key = LineList[0].split() if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary: if ValueSplitFlag: Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, ' '.join(Dictionary[Key[0]])) else: Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]]) Lines.pop(Index) if Key[0] in Keys: Lines.insert(Index, Line) Keys.remove(Key[0]) Index += 1 for RemainingKey in Keys: if ValueSplitFlag: Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, ' '.join(Dictionary[RemainingKey])) else: Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey]) Lines.append(Line) try: F = open(FileName, 'w') except: return False F.writelines(Lines) F.close() return True
def __init__(self, FileName): self.FileLinesList = [] self.FileLinesListFromFile = [] try: fsock = open(FileName, "rb", 0) try: self.FileLinesListFromFile = fsock.readlines() finally: fsock.close() except IOError: raise Warning("Error when opening file %s" % FileName)
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1): if CheckFilename != '' and CheckFilename != None: (Root, Ext) = os.path.splitext(CheckFilename) if Ext.upper() != ExtName.upper(): ContainerFile = open(ContainerFilename, 'r').read() if LineNo == -1: LineNo = GetLineNo(ContainerFile, Line) ErrorMsg = "Invalid %s. '%s' is found, but '%s' file is needed" % (SectionName, CheckFilename, ExtName) EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, Line=LineNo, File=ContainerFilename, RaiseError=EdkLogger.IsRaiseError) return True
return Target # This acts like the main() function for the script, unless it is 'import'ed into another # script. if __name__ == '__main__': EdkLogger.info('start') UniFileList = [ r'C:\\Edk\\Strings2.uni', r'C:\\Edk\\Strings.uni' ] SrcFileList = [] for Root, Dirs, Files in os.walk('C:\\Edk'): for File in Files: SrcFileList.append(File) IncludeList = [ r'C:\\Edk' ] SkipList = ['.inf', '.uni'] BaseName = 'DriverSample' (h, c) = GetStringFiles(UniFileList, SrcFileList, IncludeList, SkipList, BaseName, True) hfile = open('unistring.h', 'w') cfile = open('unistring.c', 'w') hfile.write(h) cfile.write(c) EdkLogger.info('end')
def TrimPreprocessedVfr(Source, Target): CreateDirectory(os.path.dirname(Target)) try: f = open(Source, 'r') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) # read whole file Lines = f.readlines() f.close() FoundTypedef = False Brace = 0 TypedefStart = 0 TypedefEnd = 0 for Index in range(len(Lines)): Line = Lines[Index] # don't trim the lines from "formset" definition to the end of file if Line.strip() == 'formset': break if FoundTypedef == False and (Line.find('#line') == 0 or Line.find('# ') == 0): # empty the line number directive if it's not aomong "typedef struct" Lines[Index] = "\n" continue if FoundTypedef == False and gTypedefPattern.search(Line) is None: # keep "#pragram pack" directive if gPragmaPattern.search(Line) is None: Lines[Index] = "\n" continue elif FoundTypedef == False: # found "typedef struct", keept its position and set a flag FoundTypedef = True TypedefStart = Index # match { and } to find the end of typedef definition if Line.find("{") >= 0: Brace += 1 elif Line.find("}") >= 0: Brace -= 1 # "typedef struct" must end with a ";" if Brace == 0 and Line.find(";") >= 0: FoundTypedef = False TypedefEnd = Index # keep all "typedef struct" except to GUID, EFI_PLABEL and PAL_CALL_RETURN if Line.strip("} ;\r\n") in [ TAB_GUID, "EFI_PLABEL", "PAL_CALL_RETURN" ]: for i in range(TypedefStart, TypedefEnd + 1): Lines[i] = "\n" # save all lines trimmed try: f = open(Target, 'w') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) f.writelines(Lines) f.close()
def GenerateVpdFile(self, MapFileName, BinFileName): #Open an VPD file to process try: fVpdFile = open(BinFileName, "wb", 0) except: # Open failed EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None) try: fMapFile = open(MapFileName, "w", 0) except: # Open failed EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None) # Use a instance of BytesIO to cache data fStringIO = BytesIO('') # Write the header of map file. try: fMapFile.write(st.MAP_FILE_COMMENT_TEMPLATE + "\n") except: EdkLogger.error( "BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None) for eachPcd in self.PcdFixedOffsetSizeList: # write map file try: fMapFile.write( "%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId, eachPcd.PcdOffset, eachPcd.PcdSize, eachPcd.PcdUnpackValue)) except: EdkLogger.error( "BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None) # Write Vpd binary file fStringIO.seek(eachPcd.PcdBinOffset) if isinstance(eachPcd.PcdValue, list): ValueList = [chr(Item) for Item in eachPcd.PcdValue] fStringIO.write(''.join(ValueList)) else: fStringIO.write(eachPcd.PcdValue) try: fVpdFile.write(fStringIO.getvalue()) except: EdkLogger.error( "BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.VpdFileName, None) fStringIO.close() fVpdFile.close() fMapFile.close()
} </Script> </head> <body> <table width="100%%" border="1">""" self.WriteLn(Header) ## # # This acts like the main() function for the script, unless it is 'import'ed into another # script. # if __name__ == '__main__': # Initialize log system FilePath = 'FVRECOVERYFLOPPY.fv' if FilePath.lower().endswith(".fv"): fd = open(FilePath, 'rb') buf = array('B') try: buf.fromfile(fd, os.path.getsize(FilePath)) except EOFError: pass fv = FirmwareVolume("FVRECOVERY", buf, 0) report = Report('Report.html', fv) report.GenerateReport()
def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong): CreateDirectory(os.path.dirname(Target)) try: f = open(Source, 'r') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) # read whole file Lines = f.readlines() f.close() PreprocessedFile = "" InjectedFile = "" LineIndexOfOriginalFile = None NewLines = [] LineControlDirectiveFound = False for Index in range(len(Lines)): Line = Lines[Index] # # Find out the name of files injected by preprocessor from the lines # with Line Control directive # MatchList = gLineControlDirective.findall(Line) if MatchList != []: MatchList = MatchList[0] if len(MatchList) == 2: LineNumber = int(MatchList[0], 0) InjectedFile = MatchList[1] InjectedFile = os.path.normpath(InjectedFile) InjectedFile = os.path.normcase(InjectedFile) # The first injected file must be the preprocessed file itself if PreprocessedFile == "": PreprocessedFile = InjectedFile LineControlDirectiveFound = True continue elif PreprocessedFile == "" or InjectedFile != PreprocessedFile: continue if LineIndexOfOriginalFile is None: # # Any non-empty lines must be from original preprocessed file. # And this must be the first one. # LineIndexOfOriginalFile = Index EdkLogger.verbose( "Found original file content starting from line %d" % (LineIndexOfOriginalFile + 1)) if TrimLong: Line = gLongNumberPattern.sub(r"\1", Line) # convert HEX number format if indicated if ConvertHex: Line = gHexNumberPattern.sub(r"0\2h", Line) else: Line = gHexNumberPattern.sub(r"\1\2", Line) # convert Decimal number format Line = gDecNumberPattern.sub(r"\1", Line) if LineNumber is not None: EdkLogger.verbose("Got line directive: line=%d" % LineNumber) # in case preprocessor removed some lines, like blank or comment lines if LineNumber <= len(NewLines): # possible? NewLines[LineNumber - 1] = Line else: if LineNumber > (len(NewLines) + 1): for LineIndex in range(len(NewLines), LineNumber - 1): NewLines.append(TAB_LINE_BREAK) NewLines.append(Line) LineNumber = None EdkLogger.verbose("Now we have lines: %d" % len(NewLines)) else: NewLines.append(Line) # in case there's no line directive or linemarker found if (not LineControlDirectiveFound) and NewLines == []: MulPatternFlag = False SinglePatternFlag = False Brace = 0 for Index in range(len(Lines)): Line = Lines[Index] if MulPatternFlag == False and gTypedef_MulPattern.search( Line) is None: if SinglePatternFlag == False and gTypedef_SinglePattern.search( Line) is None: # remove "#pragram pack" directive if gPragmaPattern.search(Line) is None: NewLines.append(Line) continue elif SinglePatternFlag == False: SinglePatternFlag = True if Line.find(";") >= 0: SinglePatternFlag = False elif MulPatternFlag == False: # found "typedef struct, typedef union, union, struct", keep its position and set a flag MulPatternFlag = True # match { and } to find the end of typedef definition if Line.find("{") >= 0: Brace += 1 elif Line.find("}") >= 0: Brace -= 1 # "typedef struct, typedef union, union, struct" must end with a ";" if Brace == 0 and Line.find(";") >= 0: MulPatternFlag = False # save to file try: f = open(Target, 'w') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) f.writelines(NewLines) f.close()
def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter): F = None try: F = open(FileName, 'r') self.ConfDirectoryPath = os.path.dirname(FileName) except: EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName) if F is not None: F.close() for Line in F: Line = Line.strip() if Line.startswith(CommentCharacter) or Line == '': continue LineList = Line.split(KeySplitCharacter, 1) Key = LineList[0].strip() if len(LineList) == 2: Value = LineList[1].strip() else: Value = "" if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \ DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]: self.TargetTxtDictionary[Key] = Value.replace('\\', '/') if Key == DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.TargetTxtDictionary[ Key]: if self.TargetTxtDictionary[Key].startswith("Conf/"): Tools_Def = os.path.join( self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip()) if not os.path.exists(Tools_Def) or not os.path.isfile( Tools_Def): # If Conf/Conf does not exist, try just the Conf/ directory Tools_Def = os.path.join( self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace( "Conf/", "", 1).strip()) else: # The File pointed to by TOOL_CHAIN_CONF is not in a Conf/ directory Tools_Def = os.path.join( self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip()) self.TargetTxtDictionary[Key] = Tools_Def if Key == DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF and self.TargetTxtDictionary[ Key]: if self.TargetTxtDictionary[Key].startswith("Conf/"): Build_Rule = os.path.join( self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip()) if not os.path.exists( Build_Rule) or not os.path.isfile(Build_Rule): # If Conf/Conf does not exist, try just the Conf/ directory Build_Rule = os.path.join( self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace( "Conf/", "", 1).strip()) else: # The File pointed to by BUILD_RULE_CONF is not in a Conf/ directory Build_Rule = os.path.join( self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip()) self.TargetTxtDictionary[Key] = Build_Rule elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]: self.TargetTxtDictionary[Key] = Value.split() elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER: try: V = int(Value, 0) except: EdkLogger.error("build", FORMAT_INVALID, "Invalid number of [%s]: %s." % (Key, Value), File=FileName) self.TargetTxtDictionary[Key] = Value #elif Key not in GlobalData.gGlobalDefines: # GlobalData.gGlobalDefines[Key] = Value F.close() return 0
def CreateFile(Directory, FileName, Mode='w'): CreateDirectory(Directory) return open(os.path.join(Directory, FileName), Mode)
def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None): NewFileContent = [] try: # # Search LocalSearchPath first if it is specified. # if LocalSearchPath: SearchPathList = [LocalSearchPath] + IncludePathList else: SearchPathList = IncludePathList for IncludePath in SearchPathList: IncludeFile = os.path.join(IncludePath, Source) if os.path.isfile(IncludeFile): try: with open(IncludeFile, "r") as File: F = File.readlines() except: with codecs.open(IncludeFile, "r", encoding='utf-8') as File: F = File.readlines() break else: EdkLogger.error("Trim", "Failed to find include file %s" % Source) except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) # avoid A "include" B and B "include" A IncludeFile = os.path.abspath(os.path.normpath(IncludeFile)) if IncludeFile in gIncludedAslFile: EdkLogger.warn("Trim", "Circular include", ExtraData="%s -> %s" % (" -> ".join(gIncludedAslFile), IncludeFile)) return [] gIncludedAslFile.append(IncludeFile) for Line in F: LocalSearchPath = None Result = gAslIncludePattern.findall(Line) if len(Result) == 0: Result = gAslCIncludePattern.findall(Line) if len(Result) == 0 or os.path.splitext( Result[0][1])[1].lower() not in [".asl", ".asi"]: NewFileContent.append("%s%s" % (Indent, Line)) continue # # We should first search the local directory if current file are using pattern #include "XXX" # if Result[0][2] == '"': LocalSearchPath = os.path.dirname(IncludeFile) CurrentIndent = Indent + Result[0][0] IncludedFile = Result[0][1] NewFileContent.extend( DoInclude(IncludedFile, CurrentIndent, IncludePathList, LocalSearchPath)) NewFileContent.append("\n") gIncludedAslFile.pop() return NewFileContent
def SetEnv(FdfParser, WorkSpace, ArchList, GlobalData): GenFdsGlobalVariable.ModuleFile = WorkSpace.ModuleFile GenFdsGlobalVariable.FdfParser = FdfParser GenFdsGlobalVariable.WorkSpace = WorkSpace.Db GenFdsGlobalVariable.ArchList = ArchList GenFdsGlobalVariable.ToolChainTag = GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"] GenFdsGlobalVariable.TargetName = GlobalData.gGlobalDefines["TARGET"] GenFdsGlobalVariable.ActivePlatform = GlobalData.gActivePlatform GenFdsGlobalVariable.ConfDir = GlobalData.gConfDirectory GenFdsGlobalVariable.EnableGenfdsMultiThread = GlobalData.gEnableGenfdsMultiThread for Arch in ArchList: GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.normpath( os.path.join(GlobalData.gWorkspace, WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'], GlobalData.gGlobalDefines['TOOLCHAIN']].OutputDirectory, GlobalData.gGlobalDefines['TARGET'] +'_' + GlobalData.gGlobalDefines['TOOLCHAIN'])) GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = os.path.normpath( WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'], GlobalData.gGlobalDefines['TOOLCHAIN']].OutputDirectory) GenFdsGlobalVariable.PlatformName = WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'], GlobalData.gGlobalDefines['TOOLCHAIN']].PlatformName GenFdsGlobalVariable.FvDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[ArchList[0]], DataType.TAB_FV_DIRECTORY) if not os.path.exists(GenFdsGlobalVariable.FvDir): os.makedirs(GenFdsGlobalVariable.FvDir) GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs') if not os.path.exists(GenFdsGlobalVariable.FfsDir): os.makedirs(GenFdsGlobalVariable.FfsDir) # # Create FV Address inf file # GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf') FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w') # # Add [Options] # FvAddressFile.writelines("[options]" + DataType.TAB_LINE_BREAK) BsAddress = '0' for Arch in ArchList: BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'], GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"]].BsBaseAddress if BsAddress: break FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \ BsAddress + \ DataType.TAB_LINE_BREAK) RtAddress = '0' for Arch in reversed(ArchList): temp = GenFdsGlobalVariable.WorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'], GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"]].RtBaseAddress if temp: RtAddress = temp break FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \ RtAddress + \ DataType.TAB_LINE_BREAK) FvAddressFile.close()
def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile): VfrNameList = [] if os.path.isdir(DebugDir): for CurrentDir, Dirs, Files in os.walk(DebugDir): for FileName in Files: Name, Ext = os.path.splitext(FileName) if Ext == '.c' and Name != 'AutoGen': VfrNameList.append (Name + 'Bin') VfrNameList.append (ModuleName + 'Strings') EfiFileName = os.path.join(DebugDir, ModuleName + '.efi') MapFileName = os.path.join(DebugDir, ModuleName + '.map') VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrNameList) if not VfrUniOffsetList: return try: fInputfile = open(OutputFile, "wb+", 0) except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None) # Use a instance of BytesIO to cache data fStringIO = BytesIO('') for Item in VfrUniOffsetList: if (Item[0].find("Strings") != -1): # # UNI offset in image. # GUID + Offset # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66] UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid] fStringIO.write(''.join(UniGuid)) UniValue = pack ('Q', int (Item[1], 16)) fStringIO.write (UniValue) else: # # VFR binary offset in image. # GUID + Offset # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2] VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid] fStringIO.write(''.join(VfrGuid)) type (Item[1]) VfrValue = pack ('Q', int (Item[1], 16)) fStringIO.write (VfrValue) # # write data into file. # try : fInputfile.write (fStringIO.getvalue()) except: EdkLogger.error("Trim", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %OutputFile, None) fStringIO.close () fInputfile.close ()
def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0): # # Length of Binary File # FileHandle = open(FileName, 'rb') FileHandle.seek(0, 2) FileLength = FileHandle.tell() FileHandle.close() # # Unify string to upper string # TypeName = TypeName.upper() # # Get PCD value data length # ValueLength = 0 if TypeName == 'BOOLEAN': ValueLength = 1 elif TypeName == TAB_UINT8: ValueLength = 1 elif TypeName == TAB_UINT16: ValueLength = 2 elif TypeName == TAB_UINT32: ValueLength = 4 elif TypeName == TAB_UINT64: ValueLength = 8 elif TypeName == TAB_VOID: if MaxSize == 0: return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD." ValueLength = int(MaxSize) else: return PARAMETER_INVALID, "PCD type %s is not valid." % ( CommandOptions.PcdTypeName) # # Check PcdValue is in the input binary file. # if ValueOffset + ValueLength > FileLength: return PARAMETER_INVALID, "PcdOffset + PcdMaxSize(DataType) is larger than the input file size." # # Read binary file into array # FileHandle = open(FileName, 'rb') ByteArray = array.array('B') ByteArray.fromfile(FileHandle, FileLength) FileHandle.close() OrigByteList = ByteArray.tolist() ByteList = ByteArray.tolist() # # Clear the data in file # for Index in range(ValueLength): ByteList[ValueOffset + Index] = 0 # # Patch value into offset # SavedStr = ValueString ValueString = ValueString.upper() ValueNumber = 0 if TypeName == 'BOOLEAN': # # Get PCD value for BOOLEAN data type # try: if ValueString == 'TRUE': ValueNumber = 1 elif ValueString == 'FALSE': ValueNumber = 0 ValueNumber = int(ValueString, 0) if ValueNumber != 0: ValueNumber = 1 except: return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % ( ValueString) # # Set PCD value into binary data # ByteList[ValueOffset] = ValueNumber elif TypeName in TAB_PCD_CLEAN_NUMERIC_TYPES: # # Get PCD value for UINT* data type # try: ValueNumber = int(ValueString, 0) except: return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % ( ValueString) # # Set PCD value into binary data # for Index in range(ValueLength): ByteList[ValueOffset + Index] = ValueNumber % 0x100 ValueNumber = ValueNumber // 0x100 elif TypeName == TAB_VOID: ValueString = SavedStr if ValueString.startswith('L"'): # # Patch Unicode String # Index = 0 for ByteString in ValueString[2:-1]: # # Reserve zero as unicode tail # if Index + 2 >= ValueLength: break # # Set string value one by one/ 0x100 # ByteList[ValueOffset + Index] = ord(ByteString) Index = Index + 2 elif ValueString.startswith("{") and ValueString.endswith("}"): # # Patch {0x1, 0x2, ...} byte by byte # ValueList = ValueString[1:len(ValueString) - 1].split(',') Index = 0 try: for ByteString in ValueList: ByteString = ByteString.strip() if ByteString.upper().startswith('0X'): ByteValue = int(ByteString, 16) else: ByteValue = int(ByteString) ByteList[ValueOffset + Index] = ByteValue % 0x100 Index = Index + 1 if Index >= ValueLength: break except: return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % ( ValueString) else: # # Patch ascii string # Index = 0 for ByteString in ValueString[1:-1]: # # Reserve zero as string tail # if Index + 1 >= ValueLength: break # # Set string value one by one # ByteList[ValueOffset + Index] = ord(ByteString) Index = Index + 1 # # Update new data into input file. # if ByteList != OrigByteList: ByteArray = array.array('B') ByteArray.fromlist(ByteList) FileHandle = open(FileName, 'wb') ByteArray.tofile(FileHandle) FileHandle.close() return 0, "Patch Value into File %s successfully." % (FileName)
def ParseGuidList(self, GuidList): Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList) if os.path.isfile(Path): for Line in open(Path): (GuidName, GuidValue) = Line.split() EotGlobalData.gGuidDict[GuidName] = GuidValue
def AddToBuffer(self, Buffer, BaseAddress=None, BlockSize=None, BlockNum=None, ErasePloarity='1', VtfDict=None, MacroDict={}): if BaseAddress == None and self.UiFvName.upper( ) + 'fv' in GenFds.ImageBinDict.keys(): return GenFds.ImageBinDict[self.UiFvName.upper() + 'fv'] # # Check whether FV in Capsule is in FD flash region. # If yes, return error. Doesn't support FV in Capsule image is also in FD flash region. # if self.CapsuleName != None: for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName] for RegionObj in FdObj.RegionList: if RegionObj.RegionType == 'FV': for RegionData in RegionObj.RegionDataList: if RegionData.endswith(".fv"): continue elif RegionData.upper( ) + 'fv' in GenFds.ImageBinDict.keys(): continue elif self.UiFvName.upper() == RegionData.upper(): GenFdsGlobalVariable.ErrorLogger( "Capsule %s in FD region can't contain a FV %s in FD region." % (self.CapsuleName, self.UiFvName.upper())) GenFdsGlobalVariable.InfLogger("\nGenerating %s FV" % self.UiFvName) GenFdsGlobalVariable.LargeFileInFvFlags.append(False) FFSGuid = None if self.FvBaseAddress != None: BaseAddress = self.FvBaseAddress self.__InitializeInf__(BaseAddress, BlockSize, BlockNum, ErasePloarity, VtfDict) # # First Process the Apriori section # MacroDict.update(self.DefineVarDict) GenFdsGlobalVariable.VerboseLogger('First generate Apriori file !') FfsFileList = [] for AprSection in self.AprioriSectionList: FileName = AprSection.GenFfs(self.UiFvName, MacroDict) FfsFileList.append(FileName) # Add Apriori file name to Inf file self.FvInfFile.writelines("EFI_FILE_NAME = " + \ FileName + \ T_CHAR_LF) # Process Modules in FfsList for FfsFile in self.FfsList: FileName = FfsFile.GenFfs(MacroDict, FvParentAddr=BaseAddress) FfsFileList.append(FileName) self.FvInfFile.writelines("EFI_FILE_NAME = " + \ FileName + \ T_CHAR_LF) SaveFileOnChange(self.InfFileName, self.FvInfFile.getvalue(), False) self.FvInfFile.close() # # Call GenFv tool # FvOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName) FvOutputFile = FvOutputFile + '.Fv' # BUGBUG: FvOutputFile could be specified from FDF file (FV section, CreateFile statement) if self.CreateFileName != None: FvOutputFile = self.CreateFileName FvInfoFileName = os.path.join(GenFdsGlobalVariable.FfsDir, self.UiFvName + '.inf') CopyLongFilePath(GenFdsGlobalVariable.FvAddressFileName, FvInfoFileName) OrigFvInfo = None if os.path.exists(FvInfoFileName): OrigFvInfo = open(FvInfoFileName, 'r').read() if GenFdsGlobalVariable.LargeFileInFvFlags[-1]: FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID GenFdsGlobalVariable.GenerateFirmwareVolume( FvOutputFile, [self.InfFileName], AddressFile=FvInfoFileName, FfsList=FfsFileList, ForceRebase=self.FvForceRebase, FileSystemGuid=FFSGuid) NewFvInfo = None if os.path.exists(FvInfoFileName): NewFvInfo = open(FvInfoFileName, 'r').read() if NewFvInfo != None and NewFvInfo != OrigFvInfo: FvChildAddr = [] AddFileObj = open(FvInfoFileName, 'r') AddrStrings = AddFileObj.readlines() AddrKeyFound = False for AddrString in AddrStrings: if AddrKeyFound: #get base address for the inside FvImage FvChildAddr.append(AddrString) elif AddrString.find("[FV_BASE_ADDRESS]") != -1: AddrKeyFound = True AddFileObj.close() if FvChildAddr != []: # Update Ffs again for FfsFile in self.FfsList: FileName = FfsFile.GenFfs(MacroDict, FvChildAddr, BaseAddress) if GenFdsGlobalVariable.LargeFileInFvFlags[-1]: FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID #Update GenFv again GenFdsGlobalVariable.GenerateFirmwareVolume( FvOutputFile, [self.InfFileName], AddressFile=FvInfoFileName, FfsList=FfsFileList, ForceRebase=self.FvForceRebase, FileSystemGuid=FFSGuid) # # Write the Fv contents to Buffer # FvFileObj = open(FvOutputFile, 'r+b') GenFdsGlobalVariable.VerboseLogger("\nGenerate %s FV Successfully" % self.UiFvName) GenFdsGlobalVariable.SharpCounter = 0 Buffer.write(FvFileObj.read()) FvFileObj.seek(0) # PI FvHeader is 0x48 byte FvHeaderBuffer = FvFileObj.read(0x48) # FV alignment position. FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F) # FvAlignmentValue is larger than or equal to 1K if FvAlignmentValue >= 0x400: if FvAlignmentValue >= 0x10000: #The max alignment supported by FFS is 64K. self.FvAlignment = "64K" else: self.FvAlignment = str(FvAlignmentValue / 0x400) + "K" else: # FvAlignmentValue is less than 1K self.FvAlignment = str(FvAlignmentValue) FvFileObj.close() GenFds.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile GenFdsGlobalVariable.LargeFileInFvFlags.pop() return FvOutputFile
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}): # # Generate all section # self.KeyStringList = KeyStringList self.CurrentArchList = GenFdsGlobalVariable.ArchList if FfsInf != None: self.Alignment = FfsInf.__ExtendMacro__(self.Alignment) self.NameGuid = FfsInf.__ExtendMacro__(self.NameGuid) self.SectionType = FfsInf.__ExtendMacro__(self.SectionType) self.CurrentArchList = [FfsInf.CurrentArch] SectFile = tuple() SectAlign = [] Index = 0 MaxAlign = None if self.FvAddr != []: FvAddrIsSet = True else: FvAddrIsSet = False if self.ProcessRequired in ("TRUE", "1"): if self.FvAddr != []: #no use FvAddr when the image is processed. self.FvAddr = [] if self.FvParentAddr != None: #no use Parent Addr when the image is processed. self.FvParentAddr = None for Sect in self.SectionList: Index = Index + 1 SecIndex = '%s.%d' % (SecNum, Index) # set base address for inside FvImage if isinstance(Sect, FvImageSection): if self.FvAddr != []: Sect.FvAddr = self.FvAddr.pop(0) self.IncludeFvSection = True elif isinstance(Sect, GuidSection): Sect.FvAddr = self.FvAddr Sect.FvParentAddr = self.FvParentAddr ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict) if isinstance(Sect, GuidSection): if Sect.IncludeFvSection: self.IncludeFvSection = Sect.IncludeFvSection if align != None: if MaxAlign == None: MaxAlign = align if GenFdsGlobalVariable.GetAlignment( align) > GenFdsGlobalVariable.GetAlignment(MaxAlign): MaxAlign = align if ReturnSectList != []: if align == None: align = "1" for file in ReturnSectList: SectFile += (file, ) SectAlign.append(align) if MaxAlign != None: if self.Alignment == None: self.Alignment = MaxAlign else: if GenFdsGlobalVariable.GetAlignment( MaxAlign) > GenFdsGlobalVariable.GetAlignment( self.Alignment): self.Alignment = MaxAlign OutputFile = OutputPath + \ os.sep + \ ModuleName + \ 'SEC' + \ SecNum + \ Ffs.SectionSuffix['GUIDED'] OutputFile = os.path.normpath(OutputFile) ExternalTool = None ExternalOption = None if self.NameGuid != None: ExternalTool, ExternalOption = FindExtendTool( self.KeyStringList, self.CurrentArchList, self.NameGuid) # # If not have GUID , call default # GENCRC32 section # if self.NameGuid == None: GenFdsGlobalVariable.VerboseLogger( "Use GenSection function Generate CRC32 Section") GenFdsGlobalVariable.GenerateSection( OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign) OutputFileList = [] OutputFileList.append(OutputFile) return OutputFileList, self.Alignment #or GUID not in External Tool List elif ExternalTool == None: EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid) else: DummyFile = OutputFile + ".dummy" # # Call GenSection with DUMMY section type. # GenFdsGlobalVariable.GenerateSection(DummyFile, SectFile, InputAlign=SectAlign) # # Use external tool process the Output # TempFile = OutputPath + \ os.sep + \ ModuleName + \ 'SEC' + \ SecNum + \ '.tmp' TempFile = os.path.normpath(TempFile) # # Remove temp file if its time stamp is older than dummy file # Just in case the external tool fails at this time but succeeded before # Error should be reported if the external tool does not generate a new output based on new input # if os.path.exists(TempFile) and os.path.exists( DummyFile ) and os.path.getmtime(TempFile) < os.path.getmtime(DummyFile): os.remove(TempFile) FirstCall = False CmdOption = '-e' if ExternalOption != None: CmdOption = CmdOption + ' ' + ExternalOption if self.ProcessRequired not in ( "TRUE", "1" ) and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr != None: #FirstCall is only set for the encapsulated flash FV image without process required attribute. FirstCall = True # # Call external tool # ReturnValue = [1] if FirstCall: #first try to call the guided tool with -z option and CmdOption for the no process required guided tool. GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, '-z' + ' ' + CmdOption, ReturnValue) # # when no call or first call failed, ReturnValue are not 1. # Call the guided tool with CmdOption # if ReturnValue[0] != 0: FirstCall = False ReturnValue[0] = 0 GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, CmdOption) # # There is external tool which does not follow standard rule which return nonzero if tool fails # The output file has to be checked # if not os.path.exists(TempFile): EdkLogger.error( "GenFds", COMMAND_FAILURE, 'Fail to call %s, no output file was generated' % ExternalTool) FileHandleIn = open(DummyFile, 'rb') FileHandleIn.seek(0, 2) InputFileSize = FileHandleIn.tell() FileHandleOut = open(TempFile, 'rb') FileHandleOut.seek(0, 2) TempFileSize = FileHandleOut.tell() Attribute = [] HeaderLength = None if self.ExtraHeaderSize != -1: HeaderLength = str(self.ExtraHeaderSize) if self.ProcessRequired == "NONE" and HeaderLength == None: if TempFileSize > InputFileSize: FileHandleIn.seek(0) BufferIn = FileHandleIn.read() FileHandleOut.seek(0) BufferOut = FileHandleOut.read() if BufferIn == BufferOut[TempFileSize - InputFileSize:]: HeaderLength = str(TempFileSize - InputFileSize) #auto sec guided attribute with process required if HeaderLength == None: Attribute.append('PROCESSING_REQUIRED') FileHandleIn.close() FileHandleOut.close() if FirstCall and 'PROCESSING_REQUIRED' in Attribute: # Guided data by -z option on first call is the process required data. Call the guided tool with the real option. GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile], ExternalTool, CmdOption) # # Call Gensection Add Section Header # if self.ProcessRequired in ("TRUE", "1"): if 'PROCESSING_REQUIRED' not in Attribute: Attribute.append('PROCESSING_REQUIRED') if self.AuthStatusValid in ("TRUE", "1"): Attribute.append('AUTH_STATUS_VALID') GenFdsGlobalVariable.GenerateSection( OutputFile, [TempFile], Section.Section.SectionType['GUIDED'], Guid=self.NameGuid, GuidAttr=Attribute, GuidHdrLen=HeaderLength) OutputFileList = [] OutputFileList.append(OutputFile) if 'PROCESSING_REQUIRED' in Attribute: # reset guided section alignment to none for the processed required guided data self.Alignment = None self.IncludeFvSection = False self.ProcessRequired = "TRUE" return OutputFileList, self.Alignment
def AddToBuffer(self, Buffer, BaseAddress=None, BlockSize=None, BlockNum=None, ErasePloarity='1', MacroDict={}, Flag=False): if BaseAddress is None and self.UiFvName.upper( ) + 'fv' in GenFdsGlobalVariable.ImageBinDict: return GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv'] # # Check whether FV in Capsule is in FD flash region. # If yes, return error. Doesn't support FV in Capsule image is also in FD flash region. # if self.CapsuleName is not None: for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values( ): for RegionObj in FdObj.RegionList: if RegionObj.RegionType == BINARY_FILE_TYPE_FV: for RegionData in RegionObj.RegionDataList: if RegionData.endswith(".fv"): continue elif RegionData.upper( ) + 'fv' in GenFdsGlobalVariable.ImageBinDict: continue elif self.UiFvName.upper() == RegionData.upper(): GenFdsGlobalVariable.ErrorLogger( "Capsule %s in FD region can't contain a FV %s in FD region." % (self.CapsuleName, self.UiFvName.upper())) if not Flag: GenFdsGlobalVariable.InfLogger("\nGenerating %s FV" % self.UiFvName) GenFdsGlobalVariable.LargeFileInFvFlags.append(False) FFSGuid = None if self.FvBaseAddress is not None: BaseAddress = self.FvBaseAddress if not Flag: self._InitializeInf(BaseAddress, BlockSize, BlockNum, ErasePloarity) # # First Process the Apriori section # MacroDict.update(self.DefineVarDict) GenFdsGlobalVariable.VerboseLogger('First generate Apriori file !') FfsFileList = [] for AprSection in self.AprioriSectionList: FileName = AprSection.GenFfs(self.UiFvName, MacroDict, IsMakefile=Flag) FfsFileList.append(FileName) # Add Apriori file name to Inf file if not Flag: self.FvInfFile.writelines("EFI_FILE_NAME = " + \ FileName + \ TAB_LINE_BREAK) # Process Modules in FfsList for FfsFile in self.FfsList: if Flag: if isinstance(FfsFile, FfsFileStatement.FileStatement): continue if GenFdsGlobalVariable.EnableGenfdsMultiThread and GenFdsGlobalVariable.ModuleFile and GenFdsGlobalVariable.ModuleFile.Path.find( os.path.normpath(FfsFile.InfFileName)) == -1: continue FileName = FfsFile.GenFfs(MacroDict, FvParentAddr=BaseAddress, IsMakefile=Flag, FvName=self.UiFvName) FfsFileList.append(FileName) if not Flag: self.FvInfFile.writelines("EFI_FILE_NAME = " + \ FileName + \ TAB_LINE_BREAK) if not Flag: SaveFileOnChange(self.InfFileName, self.FvInfFile.getvalue(), False) self.FvInfFile.close() # # Call GenFv tool # FvOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName) FvOutputFile = FvOutputFile + '.Fv' # BUGBUG: FvOutputFile could be specified from FDF file (FV section, CreateFile statement) if self.CreateFileName is not None: FvOutputFile = self.CreateFileName if Flag: GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile return FvOutputFile FvInfoFileName = os.path.join(GenFdsGlobalVariable.FfsDir, self.UiFvName + '.inf') if not Flag: CopyLongFilePath(GenFdsGlobalVariable.FvAddressFileName, FvInfoFileName) OrigFvInfo = None if os.path.exists(FvInfoFileName): OrigFvInfo = open(FvInfoFileName, 'r').read() if GenFdsGlobalVariable.LargeFileInFvFlags[-1]: FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID GenFdsGlobalVariable.GenerateFirmwareVolume( FvOutputFile, [self.InfFileName], AddressFile=FvInfoFileName, FfsList=FfsFileList, ForceRebase=self.FvForceRebase, FileSystemGuid=FFSGuid) NewFvInfo = None if os.path.exists(FvInfoFileName): NewFvInfo = open(FvInfoFileName, 'r').read() if NewFvInfo is not None and NewFvInfo != OrigFvInfo: FvChildAddr = [] AddFileObj = open(FvInfoFileName, 'r') AddrStrings = AddFileObj.readlines() AddrKeyFound = False for AddrString in AddrStrings: if AddrKeyFound: #get base address for the inside FvImage FvChildAddr.append(AddrString) elif AddrString.find("[FV_BASE_ADDRESS]") != -1: AddrKeyFound = True AddFileObj.close() if FvChildAddr != []: # Update Ffs again for FfsFile in self.FfsList: FileName = FfsFile.GenFfs(MacroDict, FvChildAddr, BaseAddress, IsMakefile=Flag, FvName=self.UiFvName) if GenFdsGlobalVariable.LargeFileInFvFlags[-1]: FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID #Update GenFv again GenFdsGlobalVariable.GenerateFirmwareVolume( FvOutputFile, [self.InfFileName], AddressFile=FvInfoFileName, FfsList=FfsFileList, ForceRebase=self.FvForceRebase, FileSystemGuid=FFSGuid) # # Write the Fv contents to Buffer # if os.path.isfile( FvOutputFile) and os.path.getsize(FvOutputFile) >= 0x48: FvFileObj = open(FvOutputFile, 'rb') # PI FvHeader is 0x48 byte FvHeaderBuffer = FvFileObj.read(0x48) Signature = FvHeaderBuffer[0x28:0x32] if Signature and Signature.startswith('_FVH'): GenFdsGlobalVariable.VerboseLogger( "\nGenerate %s FV Successfully" % self.UiFvName) GenFdsGlobalVariable.SharpCounter = 0 FvFileObj.seek(0) Buffer.write(FvFileObj.read()) # FV alignment position. FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F) if FvAlignmentValue >= 0x400: if FvAlignmentValue >= 0x100000: if FvAlignmentValue >= 0x1000000: #The max alignment supported by FFS is 16M. self.FvAlignment = "16M" else: self.FvAlignment = str( FvAlignmentValue / 0x100000) + "M" else: self.FvAlignment = str( FvAlignmentValue / 0x400) + "K" else: # FvAlignmentValue is less than 1K self.FvAlignment = str(FvAlignmentValue) FvFileObj.close() GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile GenFdsGlobalVariable.LargeFileInFvFlags.pop() else: GenFdsGlobalVariable.ErrorLogger("Invalid FV file %s." % self.UiFvName) else: GenFdsGlobalVariable.ErrorLogger( "Failed to generate %s FV file." % self.UiFvName) return FvOutputFile
def _InitializeInf(self, BaseAddress=None, BlockSize=None, BlockNum=None, ErasePloarity='1'): # # Create FV inf file # self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName + '.inf') self.FvInfFile = BytesIO() # # Add [Options] # self.FvInfFile.writelines("[options]" + TAB_LINE_BREAK) if BaseAddress is not None: self.FvInfFile.writelines("EFI_BASE_ADDRESS = " + \ BaseAddress + \ TAB_LINE_BREAK) if BlockSize is not None: self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \ '0x%X' %BlockSize + \ TAB_LINE_BREAK) if BlockNum is not None: self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \ ' 0x%X' %BlockNum + \ TAB_LINE_BREAK) else: if self.BlockSizeList == []: if not self._GetBlockSize(): #set default block size is 1 self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK) for BlockSize in self.BlockSizeList: if BlockSize[0] is not None: self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \ '0x%X' %BlockSize[0] + \ TAB_LINE_BREAK) if BlockSize[1] is not None: self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \ ' 0x%X' %BlockSize[1] + \ TAB_LINE_BREAK) if self.BsBaseAddress is not None: self.FvInfFile.writelines('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \ '0x%X' %self.BsBaseAddress) if self.RtBaseAddress is not None: self.FvInfFile.writelines('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \ '0x%X' %self.RtBaseAddress) # # Add attribute # self.FvInfFile.writelines("[attributes]" + TAB_LINE_BREAK) self.FvInfFile.writelines("EFI_ERASE_POLARITY = " + \ ' %s' %ErasePloarity + \ TAB_LINE_BREAK) if not (self.FvAttributeDict is None): for FvAttribute in self.FvAttributeDict.keys(): if FvAttribute == "FvUsedSizeEnable": if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1'): self.UsedSizeEnable = True continue self.FvInfFile.writelines("EFI_" + \ FvAttribute + \ ' = ' + \ self.FvAttributeDict[FvAttribute] + \ TAB_LINE_BREAK ) if self.FvAlignment is not None: self.FvInfFile.writelines("EFI_FVB2_ALIGNMENT_" + \ self.FvAlignment.strip() + \ " = TRUE" + \ TAB_LINE_BREAK) # # Generate FV extension header file # if not self.FvNameGuid: if len(self.FvExtEntryType) > 0 or self.UsedSizeEnable: GenFdsGlobalVariable.ErrorLogger( "FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName)) else: TotalSize = 16 + 4 Buffer = '' if self.UsedSizeEnable: TotalSize += (4 + 4) ## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03 #typedef struct # { # EFI_FIRMWARE_VOLUME_EXT_ENTRY Hdr; # UINT32 UsedSize; # } EFI_FIRMWARE_VOLUME_EXT_ENTRY_USED_SIZE_TYPE; Buffer += pack('HHL', 8, 3, 0) if self.FvNameString == 'TRUE': # # Create EXT entry for FV UI name # This GUID is used: A67DF1FA-8DE8-4E98-AF09-4BDF2EFFBC7C # FvUiLen = len(self.UiFvName) TotalSize += (FvUiLen + 16 + 4) Guid = FV_UI_EXT_ENTY_GUID.split('-') # # Layout: # EFI_FIRMWARE_VOLUME_EXT_ENTRY: size 4 # GUID: size 16 # FV UI name # Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002) + PackGUID(Guid) + self.UiFvName) for Index in range(0, len(self.FvExtEntryType)): if self.FvExtEntryType[Index] == 'FILE': # check if the path is absolute or relative if os.path.isabs(self.FvExtEntryData[Index]): FileFullPath = os.path.normpath( self.FvExtEntryData[Index]) else: FileFullPath = os.path.normpath( os.path.join(GenFdsGlobalVariable.WorkSpaceDir, self.FvExtEntryData[Index])) # check if the file path exists or not if not os.path.isfile(FileFullPath): GenFdsGlobalVariable.ErrorLogger( "Error opening FV Extension Header Entry file %s." % (self.FvExtEntryData[Index])) FvExtFile = open(FileFullPath, 'rb') FvExtFile.seek(0, 2) Size = FvExtFile.tell() if Size >= 0x10000: GenFdsGlobalVariable.ErrorLogger( "The size of FV Extension Header Entry file %s exceeds 0x10000." % (self.FvExtEntryData[Index])) TotalSize += (Size + 4) FvExtFile.seek(0) Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16)) Buffer += FvExtFile.read() FvExtFile.close() if self.FvExtEntryType[Index] == 'DATA': ByteList = self.FvExtEntryData[Index].split(',') Size = len(ByteList) if Size >= 0x10000: GenFdsGlobalVariable.ErrorLogger( "The size of FV Extension Header Entry data %s exceeds 0x10000." % (self.FvExtEntryData[Index])) TotalSize += (Size + 4) Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16)) for Index1 in range(0, Size): Buffer += pack('B', int(ByteList[Index1], 16)) Guid = self.FvNameGuid.split('-') Buffer = PackGUID(Guid) + pack('=L', TotalSize) + Buffer # # Generate FV extension header file if the total size is not zero # if TotalSize > 0: FvExtHeaderFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName + '.ext') FvExtHeaderFile = BytesIO() FvExtHeaderFile.write(Buffer) Changed = SaveFileOnChange(FvExtHeaderFileName, FvExtHeaderFile.getvalue(), True) FvExtHeaderFile.close() if Changed: if os.path.exists(self.InfFileName): os.remove(self.InfFileName) self.FvInfFile.writelines("EFI_FV_EXT_HEADER_FILE_NAME = " + \ FvExtHeaderFileName + \ TAB_LINE_BREAK) # # Add [Files] # self.FvInfFile.writelines("[files]" + TAB_LINE_BREAK)
def GenerateSection(Output, Input, Type=None, CompressionType=None, Guid=None, GuidHdrLen=None, GuidAttr=[], Ui=None, Ver=None, InputAlign=[], BuildNumber=None, DummyFile=None, IsMakefile=False): Cmd = ["GenSec"] if Type: Cmd += ("-s", Type) if CompressionType: Cmd += ("-c", CompressionType) if Guid: Cmd += ("-g", Guid) if DummyFile: Cmd += ("--dummy", DummyFile) if GuidHdrLen: Cmd += ("-l", GuidHdrLen) #Add each guided attribute for Attr in GuidAttr: Cmd += ("-r", Attr) #Section Align is only for dummy section without section type for SecAlign in InputAlign: Cmd += ("--sectionalign", SecAlign) CommandFile = Output + '.txt' if Ui: if IsMakefile: if Ui == "$(MODULE_NAME)": Cmd += ('-n', Ui) else: Cmd += ("-n", '"' + Ui + '"') Cmd += ("-o", Output) if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList: GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip()) else: SectionData = array('B', [0, 0, 0, 0]) SectionData.fromlist(array('B',Ui.encode('utf-16-le')).tolist()) SectionData.append(0) SectionData.append(0) Len = len(SectionData) GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x15) DirName = os.path.dirname(Output) if not CreateDirectory(DirName): EdkLogger.error(None, FILE_CREATE_FAILURE, "Could not create directory %s" % DirName) else: if DirName == '': DirName = os.getcwd() if not os.access(DirName, os.W_OK): EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName) try: with open(Output, "wb") as Fd: SectionData.tofile(Fd) Fd.flush() except IOError as X: EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X) elif Ver: Cmd += ("-n", Ver) if BuildNumber: Cmd += ("-j", BuildNumber) Cmd += ("-o", Output) SaveFileOnChange(CommandFile, ' '.join(Cmd), False) if IsMakefile: if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList: GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip()) else: if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]): return GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section") else: Cmd += ("-o", Output) Cmd += Input SaveFileOnChange(CommandFile, ' '.join(Cmd), False) if IsMakefile: if sys.platform == "win32": Cmd = ['if', 'exist', Input[0]] + Cmd else: Cmd = ['-test', '-e', Input[0], "&&"] + Cmd if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList: GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip()) elif GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]): GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section") if (os.path.getsize(Output) >= GenFdsGlobalVariable.LARGE_FILE_SIZE and GenFdsGlobalVariable.LargeFileInFvFlags): GenFdsGlobalVariable.LargeFileInFvFlags[-1] = True
def LoadInfFile(self, Filename): # # Insert a record for file # Filename = NormPath(Filename) self.Identification.FileFullPath = Filename (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename) self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF) # # Init InfTable # #self.TblInf.Table = "Inf%s" % self.FileID #self.TblInf.Create() # # Init common datas # IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \ [], [], TAB_UNKNOWN, [], [], [] LineNo = 0 # # Parse file content # IsFindBlockComment = False ReservedLine = '' for Line in open(Filename, 'r'): LineNo = LineNo + 1 # # Remove comment block # if Line.find(TAB_COMMENT_EDK_START) > -1: ReservedLine = GetSplitList(Line, TAB_COMMENT_EDK_START, 1)[0] IsFindBlockComment = True if Line.find(TAB_COMMENT_EDK_END) > -1: Line = ReservedLine + GetSplitList(Line, TAB_COMMENT_EDK_END, 1)[1] ReservedLine = '' IsFindBlockComment = False if IsFindBlockComment: continue # # Remove comments at tail and remove spaces again # Line = CleanString(Line) if Line == '': continue # # Find a new section tab # First insert previous section items # And then parse the content of the new section # if Line.startswith(TAB_SECTION_START) and Line.endswith( TAB_SECTION_END): if Line[1:3] == "--": continue Model = Section[CurrentSection.upper()] # # Insert items data of previous section # InsertSectionItemsIntoDatabase(self.TblInf, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet) # # Parse the new section # SectionItemList = [] ArchList = [] ThirdList = [] CurrentSection = '' LineList = GetSplitValueList( Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT) for Item in LineList: ItemList = GetSplitValueList(Item, TAB_SPLIT) if CurrentSection == '': CurrentSection = ItemList[0] else: if CurrentSection != ItemList[0]: EdkLogger.error( "Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError=EdkLogger.IsRaiseError) if CurrentSection.upper() not in self.KeyList: RaiseParserError(Line, CurrentSection, Filename, '', LineNo) CurrentSection = TAB_UNKNOWN continue ItemList.append('') ItemList.append('') if len(ItemList) > 5: RaiseParserError(Line, CurrentSection, Filename, '', LineNo) else: if ItemList[1] != '' and ItemList[1].upper( ) not in ARCH_LIST_FULL: EdkLogger.error( "Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError=EdkLogger.IsRaiseError) ArchList.append(ItemList[1].upper()) ThirdList.append(ItemList[2]) continue # # Not in any defined section # if CurrentSection == TAB_UNKNOWN: ErrorMsg = "%s is not in any defined section" % Line EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError=EdkLogger.IsRaiseError) # # Add a section item # SectionItemList.append([Line, LineNo]) # End of parse #End of For # # Insert items data of last section # Model = Section[CurrentSection.upper()] InsertSectionItemsIntoDatabase(self.TblInf, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet) # # Replace all DEFINE macros with its actual values # ParseDefineMacro2(self.TblInf, self.RecordSet, GlobalData.gGlobalDefines)
def __InitializeInf__(self, BaseAddress=None, BlockSize=None, BlockNum=None, ErasePloarity='1', VtfDict=None): # # Create FV inf file # self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName + '.inf') self.FvInfFile = StringIO.StringIO() # # Add [Options] # self.FvInfFile.writelines("[options]" + T_CHAR_LF) if BaseAddress != None: self.FvInfFile.writelines("EFI_BASE_ADDRESS = " + \ BaseAddress + \ T_CHAR_LF) if BlockSize != None: self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \ '0x%X' %BlockSize + \ T_CHAR_LF) if BlockNum != None: self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \ ' 0x%X' %BlockNum + \ T_CHAR_LF) else: if self.BlockSizeList == []: if not self._GetBlockSize(): #set default block size is 1 self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + T_CHAR_LF) for BlockSize in self.BlockSizeList: if BlockSize[0] != None: self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \ '0x%X' %BlockSize[0] + \ T_CHAR_LF) if BlockSize[1] != None: self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \ ' 0x%X' %BlockSize[1] + \ T_CHAR_LF) if self.BsBaseAddress != None: self.FvInfFile.writelines('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \ '0x%X' %self.BsBaseAddress) if self.RtBaseAddress != None: self.FvInfFile.writelines('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \ '0x%X' %self.RtBaseAddress) # # Add attribute # self.FvInfFile.writelines("[attributes]" + T_CHAR_LF) self.FvInfFile.writelines("EFI_ERASE_POLARITY = " + \ ' %s' %ErasePloarity + \ T_CHAR_LF) if not (self.FvAttributeDict == None): for FvAttribute in self.FvAttributeDict.keys(): self.FvInfFile.writelines("EFI_" + \ FvAttribute + \ ' = ' + \ self.FvAttributeDict[FvAttribute] + \ T_CHAR_LF ) if self.FvAlignment != None: self.FvInfFile.writelines("EFI_FVB2_ALIGNMENT_" + \ self.FvAlignment.strip() + \ " = TRUE" + \ T_CHAR_LF) # # Generate FV extension header file # if self.FvNameGuid == None or self.FvNameGuid == '': if len(self.FvExtEntryType) > 0: GenFdsGlobalVariable.ErrorLogger( "FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName)) if self.FvNameGuid <> None and self.FvNameGuid <> '': TotalSize = 16 + 4 Buffer = '' if self.FvNameString == 'TRUE': # # Create EXT entry for FV UI name # This GUID is used: A67DF1FA-8DE8-4E98-AF09-4BDF2EFFBC7C # FvUiLen = len(self.UiFvName) TotalSize += (FvUiLen + 16 + 4) Guid = FV_UI_EXT_ENTY_GUID.split('-') # # Layout: # EFI_FIRMWARE_VOLUME_EXT_ENTRY : size 4 # GUID : size 16 # FV UI name # Buffer += ( pack('HH', (FvUiLen + 16 + 4), 0x0002) + pack('=LHHBBBBBBBB', int(Guid[0], 16), int(Guid[1], 16), int(Guid[2], 16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16)) + self.UiFvName) for Index in range(0, len(self.FvExtEntryType)): if self.FvExtEntryType[Index] == 'FILE': # check if the path is absolute or relative if os.path.isabs(self.FvExtEntryData[Index]): FileFullPath = os.path.normpath( self.FvExtEntryData[Index]) else: FileFullPath = os.path.normpath( os.path.join(GenFdsGlobalVariable.WorkSpaceDir, self.FvExtEntryData[Index])) # check if the file path exists or not if not os.path.isfile(FileFullPath): GenFdsGlobalVariable.ErrorLogger( "Error opening FV Extension Header Entry file %s." % (self.FvExtEntryData[Index])) FvExtFile = open(FileFullPath, 'rb') FvExtFile.seek(0, 2) Size = FvExtFile.tell() if Size >= 0x10000: GenFdsGlobalVariable.ErrorLogger( "The size of FV Extension Header Entry file %s exceeds 0x10000." % (self.FvExtEntryData[Index])) TotalSize += (Size + 4) FvExtFile.seek(0) Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16)) Buffer += FvExtFile.read() FvExtFile.close() if self.FvExtEntryType[Index] == 'DATA': ByteList = self.FvExtEntryData[Index].split(',') Size = len(ByteList) if Size >= 0x10000: GenFdsGlobalVariable.ErrorLogger( "The size of FV Extension Header Entry data %s exceeds 0x10000." % (self.FvExtEntryData[Index])) TotalSize += (Size + 4) Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16)) for Index1 in range(0, Size): Buffer += pack('B', int(ByteList[Index1], 16)) Guid = self.FvNameGuid.split('-') Buffer = pack('=LHHBBBBBBBBL', int(Guid[0], 16), int(Guid[1], 16), int(Guid[2], 16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16), TotalSize) + Buffer # # Generate FV extension header file if the total size is not zero # if TotalSize > 0: FvExtHeaderFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName + '.ext') FvExtHeaderFile = StringIO.StringIO() FvExtHeaderFile.write(Buffer) Changed = SaveFileOnChange(FvExtHeaderFileName, FvExtHeaderFile.getvalue(), True) FvExtHeaderFile.close() if Changed: if os.path.exists(self.InfFileName): os.remove(self.InfFileName) self.FvInfFile.writelines("EFI_FV_EXT_HEADER_FILE_NAME = " + \ FvExtHeaderFileName + \ T_CHAR_LF) # # Add [Files] # self.FvInfFile.writelines("[files]" + T_CHAR_LF) if VtfDict != None and self.UiFvName in VtfDict.keys(): self.FvInfFile.writelines("EFI_FILE_NAME = " + \ VtfDict.get(self.UiFvName) + \ T_CHAR_LF)
def BuildMetaDataFileDatabase(self, SpecificDirs = None): ScanFolders = [] if SpecificDirs is None: ScanFolders.append(EccGlobalData.gTarget) else: for specificDir in SpecificDirs: ScanFolders.append(os.path.join(EccGlobalData.gTarget, specificDir)) EdkLogger.quiet("Building database for meta data files ...") Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+') #SkipDirs = Read from config file SkipDirs = EccGlobalData.gConfig.SkipDirList SkipDirString = '|'.join(SkipDirs) # p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % SkipDirString) p = re.compile(r'.*[\\/](?:%s^\S)[\\/]?.*' % SkipDirString) for scanFolder in ScanFolders: for Root, Dirs, Files in os.walk(scanFolder): if p.match(Root.upper()): continue for Dir in Dirs: Dirname = os.path.join(Root, Dir) if os.path.islink(Dirname): Dirname = os.path.realpath(Dirname) if os.path.isdir(Dirname): # symlinks to directories are treated as directories Dirs.remove(Dir) Dirs.append(Dirname) for File in Files: if len(File) > 4 and File[-4:].upper() == ".DEC": Filename = os.path.normpath(os.path.join(Root, File)) EdkLogger.quiet("Parsing %s" % Filename) Op.write("%s\r" % Filename) #Dec(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb) self.MetaFile = DecParser(Filename, MODEL_FILE_DEC, EccGlobalData.gDb.TblDec) self.MetaFile.Start() continue if len(File) > 4 and File[-4:].upper() == ".DSC": Filename = os.path.normpath(os.path.join(Root, File)) EdkLogger.quiet("Parsing %s" % Filename) Op.write("%s\r" % Filename) #Dsc(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb) self.MetaFile = DscParser(PathClass(Filename, Root), MODEL_FILE_DSC, MetaFileStorage(EccGlobalData.gDb.TblDsc.Cur, Filename, MODEL_FILE_DSC, True)) # always do post-process, in case of macros change self.MetaFile.DoPostProcess() self.MetaFile.Start() self.MetaFile._PostProcess() continue if len(File) > 4 and File[-4:].upper() == ".INF": Filename = os.path.normpath(os.path.join(Root, File)) EdkLogger.quiet("Parsing %s" % Filename) Op.write("%s\r" % Filename) #Inf(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb) self.MetaFile = InfParser(Filename, MODEL_FILE_INF, EccGlobalData.gDb.TblInf) self.MetaFile.Start() continue if len(File) > 4 and File[-4:].upper() == ".FDF": Filename = os.path.normpath(os.path.join(Root, File)) EdkLogger.quiet("Parsing %s" % Filename) Op.write("%s\r" % Filename) Fdf(Filename, True, EccGlobalData.gWorkspace, EccGlobalData.gDb) continue if len(File) > 4 and File[-4:].upper() == ".UNI": Filename = os.path.normpath(os.path.join(Root, File)) EdkLogger.quiet("Parsing %s" % Filename) Op.write("%s\r" % Filename) FileID = EccGlobalData.gDb.TblFile.InsertFile(Filename, MODEL_FILE_UNI) EccGlobalData.gDb.TblReport.UpdateBelongsToItemByFile(FileID, File) continue Op.close() # Commit to database EccGlobalData.gDb.Conn.commit() EdkLogger.quiet("Building database for meta data files done!")
def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong): CreateDirectory(os.path.dirname(Target)) try: f = open(Source, 'r') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) # read whole file Lines = f.readlines() f.close() PreprocessedFile = "" InjectedFile = "" LineIndexOfOriginalFile = None NewLines = [] LineControlDirectiveFound = False for Index in range(len(Lines)): Line = Lines[Index] # # Find out the name of files injected by preprocessor from the lines # with Line Control directive # MatchList = gLineControlDirective.findall(Line) if MatchList != []: MatchList = MatchList[0] if len(MatchList) == 2: LineNumber = int(MatchList[0], 0) InjectedFile = MatchList[1] # The first injetcted file must be the preprocessed file itself if PreprocessedFile == "": PreprocessedFile = InjectedFile LineControlDirectiveFound = True continue elif PreprocessedFile == "" or InjectedFile != PreprocessedFile: continue if LineIndexOfOriginalFile == None: # # Any non-empty lines must be from original preprocessed file. # And this must be the first one. # LineIndexOfOriginalFile = Index EdkLogger.verbose( "Found original file content starting from line %d" % (LineIndexOfOriginalFile + 1)) # convert HEX number format if indicated if ConvertHex: Line = gHexNumberPattern.sub(r"0\2h", Line) else: Line = gHexNumberPattern.sub(r"\1\2", Line) if TrimLong: Line = gLongNumberPattern.sub(r"\1", Line) # convert Decimal number format Line = gDecNumberPattern.sub(r"\1", Line) if LineNumber != None: EdkLogger.verbose("Got line directive: line=%d" % LineNumber) # in case preprocessor removed some lines, like blank or comment lines if LineNumber <= len(NewLines): # possible? NewLines[LineNumber - 1] = Line else: if LineNumber > (len(NewLines) + 1): for LineIndex in range(len(NewLines), LineNumber - 1): NewLines.append(os.linesep) NewLines.append(Line) LineNumber = None EdkLogger.verbose("Now we have lines: %d" % len(NewLines)) else: NewLines.append(Line) # in case there's no line directive or linemarker found if (not LineControlDirectiveFound) and NewLines == []: NewLines = Lines # save to file try: f = open(Target, 'wb') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) f.writelines(NewLines) f.close()
def LoadToolDefFile(self, FileName): FileContent = [] if os.path.isfile(FileName): try: F = open(FileName, 'r') FileContent = F.readlines() except: EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName) else: EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=FileName) self.ToolsDefTxtDatabase = { TAB_TOD_DEFINES_TARGET: [], TAB_TOD_DEFINES_TOOL_CHAIN_TAG: [], TAB_TOD_DEFINES_TARGET_ARCH: [], TAB_TOD_DEFINES_COMMAND_TYPE: [] } for Index in range(len(FileContent)): Line = FileContent[Index].strip() if Line == "" or Line[0] == '#': continue NameValuePair = Line.split("=", 1) if len(NameValuePair) != 2: EdkLogger.warn( "tools_def.txt parser", "Line %d: not correct assignment statement, skipped" % (Index + 1)) continue Name = NameValuePair[0].strip() Value = NameValuePair[1].strip() if Name == "IDENTIFIER": EdkLogger.debug( EdkLogger.DEBUG_8, "Line %d: Found identifier statement, skipped: %s" % ((Index + 1), Value)) continue MacroDefinition = gMacroDefPattern.findall(Name) if MacroDefinition != []: Done, Value = self.ExpandMacros(Value) if not Done: EdkLogger.error( "tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE, "Macro or Environment has not been defined", ExtraData=Value[4:-1], File=FileName, Line=Index + 1) MacroName = MacroDefinition[0].strip() self.MacroDictionary["DEF(%s)" % MacroName] = Value EdkLogger.debug( EdkLogger.DEBUG_8, "Line %d: Found macro: %s = %s" % ((Index + 1), MacroName, Value)) continue Done, Value = self.ExpandMacros(Value) if not Done: EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE, "Macro or Environment has not been defined", ExtraData=Value[4:-1], File=FileName, Line=Index + 1) List = Name.split('_') if len(List) != 5: EdkLogger.verbose( "Line %d: Not a valid name of definition: %s" % ((Index + 1), Name)) continue elif List[4] == '*': EdkLogger.verbose( "Line %d: '*' is not allowed in last field: %s" % ((Index + 1), Name)) continue else: self.ToolsDefTxtDictionary[Name] = Value if List[0] != '*': self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] += [ List[0] ] if List[1] != '*': self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_TOOL_CHAIN_TAG] += [List[1]] if List[2] != '*': self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] += [ List[2] ] if List[3] != '*': self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] += [ List[3] ] if List[4] == TAB_TOD_DEFINES_FAMILY and List[ 2] == '*' and List[3] == '*': if TAB_TOD_DEFINES_FAMILY not in self.ToolsDefTxtDatabase: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {} self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][ List[1]] = Value self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_BUILDRULEFAMILY] = {} self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value elif List[1] not in self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_FAMILY]: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][ List[1]] = Value self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value elif self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][ List[1]] != Value: EdkLogger.verbose( "Line %d: No override allowed for the family of a tool chain: %s" % ((Index + 1), Name)) if List[4] == TAB_TOD_DEFINES_BUILDRULEFAMILY and List[ 2] == '*' and List[3] == '*': if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolsDefTxtDatabase \ or List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]: EdkLogger.verbose( "Line %d: The family is not specified, but BuildRuleFamily is specified for the tool chain: %s" % ((Index + 1), Name)) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][ List[1]] = Value self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] = list( set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET])) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = list( set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG])) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] = list( set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH])) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] = list( set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE])) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET].sort() self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].sort() self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH].sort() self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort() KeyList = [ TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE ] for Index in range(3, -1, -1): for Key in dict(self.ToolsDefTxtDictionary): List = Key.split('_') if List[Index] == '*': for String in self.ToolsDefTxtDatabase[KeyList[Index]]: List[Index] = String NewKey = '%s_%s_%s_%s_%s' % tuple(List) if NewKey not in self.ToolsDefTxtDictionary: self.ToolsDefTxtDictionary[ NewKey] = self.ToolsDefTxtDictionary[Key] continue del self.ToolsDefTxtDictionary[Key] elif List[Index] not in self.ToolsDefTxtDatabase[ KeyList[Index]]: del self.ToolsDefTxtDictionary[Key]
def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, MacroDict={}, Flag=False): Size = self.Size if not Flag: GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset) GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" % Size) GenFdsGlobalVariable.SharpCounter = 0 if Flag and (self.RegionType != BINARY_FILE_TYPE_FV): return if self.RegionType == BINARY_FILE_TYPE_FV: # # Get Fv from FvDict # self.FvAddress = int(BaseAddress, 16) + self.Offset FvBaseAddress = '0x%X' % self.FvAddress FvOffset = 0 for RegionData in self.RegionDataList: FileName = None if RegionData.endswith(".fv"): RegionData = GenFdsGlobalVariable.MacroExtend( RegionData, MacroDict) if not Flag: GenFdsGlobalVariable.InfLogger( ' Region FV File Name = .fv : %s' % RegionData) if RegionData[1] != ':': RegionData = mws.join( GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) FileName = RegionData elif RegionData.upper() + 'fv' in ImageBinDict: if not Flag: GenFdsGlobalVariable.InfLogger(' Region Name = FV') FileName = ImageBinDict[RegionData.upper() + 'fv'] else: # # Generate FvImage. # FvObj = None if RegionData.upper( ) in GenFdsGlobalVariable.FdfParser.Profile.FvDict: FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[ RegionData.upper()] if FvObj is not None: if not Flag: GenFdsGlobalVariable.InfLogger( ' Region Name = FV') # # Call GenFv tool # self.BlockInfoOfRegion(BlockSizeList, FvObj) self.FvAddress = self.FvAddress + FvOffset FvAlignValue = GenFdsGlobalVariable.GetAlignment( FvObj.FvAlignment) if self.FvAddress % FvAlignValue != 0: EdkLogger.error( "GenFds", GENFDS_ERROR, "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment)) FvBuffer = BytesIO() FvBaseAddress = '0x%X' % self.FvAddress BlockSize = None BlockNum = None FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, Flag=Flag) if Flag: continue FvBufferLen = len(FvBuffer.getvalue()) if FvBufferLen > Size: FvBuffer.close() EdkLogger.error( "GenFds", GENFDS_ERROR, "Size of FV (%s) is larger than Region Size 0x%X specified." % (RegionData, Size)) # # Put the generated image into FD buffer. # Buffer.write(FvBuffer.getvalue()) FvBuffer.close() FvOffset = FvOffset + FvBufferLen Size = Size - FvBufferLen continue else: EdkLogger.error( "GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (RegionData)) # # Add the exist Fv image into FD buffer # if not Flag: if FileName is not None: FileLength = os.stat(FileName)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size of FV File (%s) is larger than Region Size 0x%X specified." \ % (RegionData, Size)) BinFile = open(FileName, 'rb') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # if not Flag: self.PadBuffer(Buffer, ErasePolarity, Size) if self.RegionType == 'CAPSULE': # # Get Capsule from Capsule Dict # for RegionData in self.RegionDataList: if RegionData.endswith(".cap"): RegionData = GenFdsGlobalVariable.MacroExtend( RegionData, MacroDict) GenFdsGlobalVariable.InfLogger( ' Region CAPSULE Image Name = .cap : %s' % RegionData) if RegionData[1] != ':': RegionData = mws.join( GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) FileName = RegionData elif RegionData.upper() + 'cap' in ImageBinDict: GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE') FileName = ImageBinDict[RegionData.upper() + 'cap'] else: # # Generate Capsule image and Put it into FD buffer # CapsuleObj = None if RegionData.upper( ) in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict: CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[ RegionData.upper()] if CapsuleObj is not None: CapsuleObj.CapsuleName = RegionData.upper() GenFdsGlobalVariable.InfLogger( ' Region Name = CAPSULE') # # Call GenFv tool to generate Capsule Image # FileName = CapsuleObj.GenCapsule() CapsuleObj.CapsuleName = None else: EdkLogger.error( "GenFds", GENFDS_ERROR, "Capsule (%s) is NOT described in FDF file!" % (RegionData)) # # Add the capsule image into FD buffer # FileLength = os.stat(FileName)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \ % (FileLength, RegionData, Size)) BinFile = open(FileName, 'rb') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # self.PadBuffer(Buffer, ErasePolarity, Size) if self.RegionType in ('FILE', 'INF'): for RegionData in self.RegionDataList: if self.RegionType == 'INF': RegionData.__InfParse__(None) if len(RegionData.BinFileList) != 1: EdkLogger.error( 'GenFds', GENFDS_ERROR, 'INF in FD region can only contain one binary: %s' % RegionData) File = RegionData.BinFileList[0] RegionData = RegionData.PatchEfiFile(File.Path, File.Type) else: RegionData = GenFdsGlobalVariable.MacroExtend( RegionData, MacroDict) if RegionData[1] != ':': RegionData = mws.join( GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) # # Add the file image into FD buffer # FileLength = os.stat(RegionData)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size of File (%s) is larger than Region Size 0x%X specified." \ % (RegionData, Size)) GenFdsGlobalVariable.InfLogger(' Region File Name = %s' % RegionData) BinFile = open(RegionData, 'rb') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # self.PadBuffer(Buffer, ErasePolarity, Size) if self.RegionType == 'DATA': GenFdsGlobalVariable.InfLogger(' Region Name = DATA') DataSize = 0 for RegionData in self.RegionDataList: Data = RegionData.split(',') DataSize = DataSize + len(Data) if DataSize > Size: EdkLogger.error( "GenFds", GENFDS_ERROR, "Size of DATA is larger than Region Size ") else: for item in Data: Buffer.write(pack('B', int(item, 16))) Size = Size - DataSize # # Pad the left buffer # self.PadBuffer(Buffer, ErasePolarity, Size) if self.RegionType is None: GenFdsGlobalVariable.InfLogger(' Region Name = None') self.PadBuffer(Buffer, ErasePolarity, Size)
def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, vtfDict=None, MacroDict={}): Size = self.Size GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset) GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" % Size) GenFdsGlobalVariable.SharpCounter = 0 if self.RegionType == 'FV': # # Get Fv from FvDict # self.FvAddress = int(BaseAddress, 16) + self.Offset FvBaseAddress = '0x%X' % self.FvAddress FvOffset = 0 for RegionData in self.RegionDataList: FileName = None if RegionData.endswith(".fv"): RegionData = GenFdsGlobalVariable.MacroExtend( RegionData, MacroDict) GenFdsGlobalVariable.InfLogger( ' Region FV File Name = .fv : %s' % RegionData) if RegionData[1] != ':': RegionData = os.path.join( GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) FileName = RegionData elif RegionData.upper() + 'fv' in ImageBinDict.keys(): GenFdsGlobalVariable.InfLogger(' Region Name = FV') FileName = ImageBinDict[RegionData.upper() + 'fv'] else: # # Generate FvImage. # FvObj = None if RegionData.upper( ) in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get( RegionData.upper()) if FvObj != None: GenFdsGlobalVariable.InfLogger(' Region Name = FV') # # Call GenFv tool # self.BlockInfoOfRegion(BlockSizeList, FvObj) self.FvAddress = self.FvAddress + FvOffset FvAlignValue = self.GetFvAlignValue(FvObj.FvAlignment) if self.FvAddress % FvAlignValue != 0: EdkLogger.error( "GenFds", GENFDS_ERROR, "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment)) FvBuffer = StringIO.StringIO('') FvBaseAddress = '0x%X' % self.FvAddress BlockSize = None BlockNum = None FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict) if FvBuffer.len > Size: FvBuffer.close() EdkLogger.error( "GenFds", GENFDS_ERROR, "Size of FV (%s) is larger than Region Size 0x%X specified." % (RegionData, Size)) # # Put the generated image into FD buffer. # Buffer.write(FvBuffer.getvalue()) FvBuffer.close() FvOffset = FvOffset + FvBuffer.len Size = Size - FvBuffer.len continue else: EdkLogger.error( "GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (RegionData)) # # Add the exist Fv image into FD buffer # if FileName != None: FileLength = os.stat(FileName)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size of FV File (%s) is larger than Region Size 0x%X specified." \ % (RegionData, Size)) BinFile = open(FileName, 'r+b') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # if Size > 0: if (ErasePolarity == '1'): PadData = 0xFF else: PadData = 0 for i in range(0, Size): Buffer.write(pack('B', PadData)) if self.RegionType == 'CAPSULE': # # Get Capsule from Capsule Dict # for RegionData in self.RegionDataList: if RegionData.endswith(".cap"): RegionData = GenFdsGlobalVariable.MacroExtend( RegionData, MacroDict) GenFdsGlobalVariable.InfLogger( ' Region CAPSULE Image Name = .cap : %s' % RegionData) if RegionData[1] != ':': RegionData = os.path.join( GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) FileName = RegionData elif RegionData.upper() + 'cap' in ImageBinDict.keys(): GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE') FileName = ImageBinDict[RegionData.upper() + 'cap'] else: # # Generate Capsule image and Put it into FD buffer # CapsuleObj = None if RegionData.upper( ) in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys( ): CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[ RegionData.upper()] if CapsuleObj != None: CapsuleObj.CapsuleName = RegionData.upper() GenFdsGlobalVariable.InfLogger( ' Region Name = CAPSULE') # # Call GenFv tool to generate Capsule Image # FileName = CapsuleObj.GenCapsule() CapsuleObj.CapsuleName = None else: EdkLogger.error( "GenFds", GENFDS_ERROR, "Capsule (%s) is NOT described in FDF file!" % (RegionData)) # # Add the capsule image into FD buffer # FileLength = os.stat(FileName)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \ % (FileLength, RegionData, Size)) BinFile = open(FileName, 'r+b') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # if Size > 0: if (ErasePolarity == '1'): PadData = 0xFF else: PadData = 0 for i in range(0, Size): Buffer.write(pack('B', PadData)) if self.RegionType == 'FILE': for RegionData in self.RegionDataList: RegionData = GenFdsGlobalVariable.MacroExtend( RegionData, MacroDict) if RegionData[1] != ':': RegionData = os.path.join( GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) # # Add the file image into FD buffer # FileLength = os.stat(RegionData)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size of File (%s) is larger than Region Size 0x%X specified." \ % (RegionData, Size)) GenFdsGlobalVariable.InfLogger(' Region File Name = %s' % RegionData) BinFile = open(RegionData, 'rb') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # if Size > 0: if (ErasePolarity == '1'): PadData = 0xFF else: PadData = 0 for i in range(0, Size): Buffer.write(pack('B', PadData)) if self.RegionType == 'DATA': GenFdsGlobalVariable.InfLogger(' Region Name = DATA') DataSize = 0 for RegionData in self.RegionDataList: Data = RegionData.split(',') DataSize = DataSize + len(Data) if DataSize > Size: EdkLogger.error( "GenFds", GENFDS_ERROR, "Size of DATA is larger than Region Size ") else: for item in Data: Buffer.write(pack('B', int(item, 16))) Size = Size - DataSize # # Pad the left buffer # if Size > 0: if (ErasePolarity == '1'): PadData = 0xFF else: PadData = 0 for i in range(0, Size): Buffer.write(pack('B', PadData)) if self.RegionType == None: GenFdsGlobalVariable.InfLogger(' Region Name = None') if (ErasePolarity == '1'): PadData = 0xFF else: PadData = 0 for i in range(0, Size): Buffer.write(pack('B', PadData))
def IncludeToolDefFile(self, FileName): FileContent = [] if os.path.isfile(FileName): try: F = open(FileName, 'r') FileContent = F.readlines() except: EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName) else: EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=FileName) for Index in range(len(FileContent)): Line = FileContent[Index].strip() if Line == "" or Line[0] == '#': continue if Line.startswith("!include"): IncFile = Line[8:].strip() Done, IncFile = self.ExpandMacros(IncFile) if not Done: EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE, "Macro or Environment has not been defined", ExtraData=IncFile[4:-1], File=FileName, Line=Index+1) IncFile = NormPath(IncFile) if not os.path.isabs(IncFile): # # try WORKSPACE # IncFileTmp = PathClass(IncFile, GlobalData.gWorkspace) ErrorCode = IncFileTmp.Validate()[0] if ErrorCode != 0: # # try PACKAGES_PATH # IncFileTmp = mws.join(GlobalData.gWorkspace, IncFile) if not os.path.exists(IncFileTmp): # # try directory of current file # IncFileTmp = PathClass(IncFile, os.path.dirname(FileName)) ErrorCode = IncFileTmp.Validate()[0] if ErrorCode != 0: EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=IncFile) if type(IncFileTmp) is PathClass: IncFile = IncFileTmp.Path else: IncFile = IncFileTmp self.IncludeToolDefFile(IncFile) continue NameValuePair = Line.split("=", 1) if len(NameValuePair) != 2: EdkLogger.warn("tools_def.txt parser", "Line %d: not correct assignment statement, skipped" % (Index + 1)) continue Name = NameValuePair[0].strip() Value = NameValuePair[1].strip() if Name == "IDENTIFIER": EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found identifier statement, skipped: %s" % ((Index + 1), Value)) continue MacroDefinition = gMacroDefPattern.findall(Name) if MacroDefinition != []: Done, Value = self.ExpandMacros(Value) if not Done: EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE, "Macro or Environment has not been defined", ExtraData=Value[4:-1], File=FileName, Line=Index+1) MacroName = MacroDefinition[0].strip() self.MacroDictionary["DEF(%s)" % MacroName] = Value EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found macro: %s = %s" % ((Index + 1), MacroName, Value)) continue Done, Value = self.ExpandMacros(Value) if not Done: EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE, "Macro or Environment has not been defined", ExtraData=Value[4:-1], File=FileName, Line=Index+1) List = Name.split('_') if len(List) != 5: EdkLogger.verbose("Line %d: Not a valid name of definition: %s" % ((Index + 1), Name)) continue elif List[4] == '*': EdkLogger.verbose("Line %d: '*' is not allowed in last field: %s" % ((Index + 1), Name)) continue else: self.ToolsDefTxtDictionary[Name] = Value if List[0] != '*': self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] += [List[0]] if List[1] != '*': self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] += [List[1]] if List[2] != '*': self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] += [List[2]] if List[3] != '*': self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] += [List[3]] if List[4] == TAB_TOD_DEFINES_FAMILY and List[2] == '*' and List[3] == '*': if TAB_TOD_DEFINES_FAMILY not in self.ToolsDefTxtDatabase: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {} self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY] = {} self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value elif List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value elif self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] != Value: EdkLogger.verbose("Line %d: No override allowed for the family of a tool chain: %s" % ((Index + 1), Name)) if List[4] == TAB_TOD_DEFINES_BUILDRULEFAMILY and List[2] == '*' and List[3] == '*': if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolsDefTxtDatabase \ or List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]: EdkLogger.verbose("Line %d: The family is not specified, but BuildRuleFamily is specified for the tool chain: %s" % ((Index + 1), Name)) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
# Global macro gMACRO = {} gMACRO['EFI_SOURCE'] = gEFI_SOURCE gMACRO['EDK_SOURCE'] = gEDK_SOURCE gMACRO['SHELL_INF'] = gSHELL_INF gMACRO['CAPSULE_INF'] = '' gNOT_FOUND_FILES = [] gSOURCE_FILES = [] gINF_FILES = {} gDEC_FILES = [] # Log file for unmatched variables gUN_MATCHED_LOG = 'Log_UnMatched.log' gOP_UN_MATCHED = open(gUN_MATCHED_LOG, 'w+') # Log file for all INF files gINF_FILES = 'Log_Inf_File.log' gOP_INF = open(gINF_FILES, 'w+') # Log file for not dispatched PEIM/DRIVER gUN_DISPATCHED_LOG = 'Log_UnDispatched.log' gOP_UN_DISPATCHED = open(gUN_DISPATCHED_LOG, 'w+') # Log file for unmatched variables in function calling gUN_MATCHED_IN_LIBRARY_CALLING_LOG = 'Log_UnMatchedInLibraryCalling.log' gOP_UN_MATCHED_IN_LIBRARY_CALLING = open(gUN_MATCHED_IN_LIBRARY_CALLING_LOG, 'w+') # Log file for order of dispatched PEIM/DRIVER
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict={}, IsMakefile=False): if self.FileName is not None and self.FileName.startswith('PCD('): self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName) """Prepare the parameter of GenSection""" if FfsInf is not None: InfFileName = FfsInf.InfFileName SectionType = FfsInf.__ExtendMacro__(self.SectionType) Filename = FfsInf.__ExtendMacro__(self.FileName) BuildNum = FfsInf.__ExtendMacro__(self.BuildNum) StringData = FfsInf.__ExtendMacro__(self.StringData) ModuleNameStr = FfsInf.__ExtendMacro__('$(MODULE_NAME)') NoStrip = True if FfsInf.ModuleType in ( SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_MM_CORE_STANDALONE) and SectionType in ( BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32): if FfsInf.KeepReloc is not None: NoStrip = FfsInf.KeepReloc elif FfsInf.KeepRelocFromRule is not None: NoStrip = FfsInf.KeepRelocFromRule elif self.KeepReloc is not None: NoStrip = self.KeepReloc elif FfsInf.ShadowFromInfFile is not None: NoStrip = FfsInf.ShadowFromInfFile else: EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s apply rule for None!" % ModuleName) """If the file name was pointed out, add it in FileList""" FileList = [] if Filename is not None: Filename = GenFdsGlobalVariable.MacroExtend(Filename, Dict) # check if the path is absolute or relative if os.path.isabs(Filename): Filename = os.path.normpath(Filename) else: Filename = os.path.normpath( os.path.join(FfsInf.EfiOutputPath, Filename)) if not self.Optional: FileList.append(Filename) elif os.path.exists(Filename): FileList.append(Filename) elif IsMakefile: SuffixMap = FfsInf.GetFinalTargetSuffixMap() if '.depex' in SuffixMap: FileList.append(Filename) else: FileList, IsSect = Section.Section.GetFileList( FfsInf, self.FileType, self.FileExtension, Dict, IsMakefile=IsMakefile) if IsSect: return FileList, self.Alignment Index = 0 Align = self.Alignment """ If Section type is 'VERSION'""" OutputFileList = [] if SectionType == 'VERSION': InfOverrideVerString = False if FfsInf.Version is not None: #StringData = FfsInf.Version BuildNum = FfsInf.Version InfOverrideVerString = True if InfOverrideVerString: #VerTuple = ('-n', '"' + StringData + '"') if BuildNum is not None and BuildNum != '': BuildNumTuple = ('-j', BuildNum) else: BuildNumTuple = tuple() Num = SecNum OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType)) GenFdsGlobalVariable.GenerateSection( OutputFile, [], 'EFI_SECTION_VERSION', #Ui=StringData, Ver=BuildNum, IsMakefile=IsMakefile) OutputFileList.append(OutputFile) elif FileList != []: for File in FileList: Index = Index + 1 Num = '%s.%d' % (SecNum, Index) OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType)) f = open(File, 'r') VerString = f.read() f.close() BuildNum = VerString if BuildNum is not None and BuildNum != '': BuildNumTuple = ('-j', BuildNum) GenFdsGlobalVariable.GenerateSection( OutputFile, [], 'EFI_SECTION_VERSION', #Ui=VerString, Ver=BuildNum, IsMakefile=IsMakefile) OutputFileList.append(OutputFile) else: BuildNum = StringData if BuildNum is not None and BuildNum != '': BuildNumTuple = ('-j', BuildNum) else: BuildNumTuple = tuple() BuildNumString = ' ' + ' '.join(BuildNumTuple) #if VerString == '' and if BuildNumString == '': if self.Optional == True: GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!") return [], None else: EdkLogger.error( "GenFds", GENFDS_ERROR, "File: %s miss Version Section value" % InfFileName) Num = SecNum OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType)) GenFdsGlobalVariable.GenerateSection( OutputFile, [], 'EFI_SECTION_VERSION', #Ui=VerString, Ver=BuildNum, IsMakefile=IsMakefile) OutputFileList.append(OutputFile) # # If Section Type is BINARY_FILE_TYPE_UI # elif SectionType == BINARY_FILE_TYPE_UI: InfOverrideUiString = False if FfsInf.Ui is not None: StringData = FfsInf.Ui InfOverrideUiString = True if InfOverrideUiString: Num = SecNum if IsMakefile and StringData == ModuleNameStr: StringData = "$(MODULE_NAME)" OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType)) GenFdsGlobalVariable.GenerateSection( OutputFile, [], 'EFI_SECTION_USER_INTERFACE', Ui=StringData, IsMakefile=IsMakefile) OutputFileList.append(OutputFile) elif FileList != []: for File in FileList: Index = Index + 1 Num = '%s.%d' % (SecNum, Index) OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType)) f = open(File, 'r') UiString = f.read() f.close() if IsMakefile and UiString == ModuleNameStr: UiString = "$(MODULE_NAME)" GenFdsGlobalVariable.GenerateSection( OutputFile, [], 'EFI_SECTION_USER_INTERFACE', Ui=UiString, IsMakefile=IsMakefile) OutputFileList.append(OutputFile) else: if StringData is not None and len(StringData) > 0: UiTuple = ('-n', '"' + StringData + '"') else: UiTuple = tuple() if self.Optional == True: GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!") return '', None else: EdkLogger.error( "GenFds", GENFDS_ERROR, "File: %s miss UI Section value" % InfFileName) Num = SecNum if IsMakefile and StringData == ModuleNameStr: StringData = "$(MODULE_NAME)" OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType)) GenFdsGlobalVariable.GenerateSection( OutputFile, [], 'EFI_SECTION_USER_INTERFACE', Ui=StringData, IsMakefile=IsMakefile) OutputFileList.append(OutputFile) else: """If File List is empty""" if FileList == []: if self.Optional == True: GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!") return [], None else: EdkLogger.error( "GenFds", GENFDS_ERROR, "Output file for %s section could not be found for %s" % (SectionType, InfFileName)) else: """Convert the File to Section file one by one """ for File in FileList: """ Copy Map file to FFS output path """ Index = Index + 1 Num = '%s.%d' % (SecNum, Index) OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType)) File = GenFdsGlobalVariable.MacroExtend(File, Dict) #Get PE Section alignment when align is set to AUTO if self.Alignment == 'Auto' and ( SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE): ImageObj = PeImageClass(File) if ImageObj.SectionAlignment < 0x400: Align = str(ImageObj.SectionAlignment) elif ImageObj.SectionAlignment < 0x100000: Align = str( ImageObj.SectionAlignment // 0x400) + 'K' else: Align = str( ImageObj.SectionAlignment // 0x100000) + 'M' if File[(len(File) - 4):] == '.efi': MapFile = File.replace('.efi', '.map') CopyMapFile = os.path.join(OutputPath, ModuleName + '.map') if IsMakefile: if GenFdsGlobalVariable.CopyList == []: GenFdsGlobalVariable.CopyList = [(MapFile, CopyMapFile)] else: GenFdsGlobalVariable.CopyList.append( (MapFile, CopyMapFile)) else: if os.path.exists(MapFile): if not os.path.exists(CopyMapFile) or \ (os.path.getmtime(MapFile) > os.path.getmtime(CopyMapFile)): CopyLongFilePath(MapFile, CopyMapFile) if not NoStrip: FileBeforeStrip = os.path.join(OutputPath, ModuleName + '.efi') if IsMakefile: if GenFdsGlobalVariable.CopyList == []: GenFdsGlobalVariable.CopyList = [ (File, FileBeforeStrip) ] else: GenFdsGlobalVariable.CopyList.append( (File, FileBeforeStrip)) else: if not os.path.exists(FileBeforeStrip) or \ (os.path.getmtime(File) > os.path.getmtime(FileBeforeStrip)): CopyLongFilePath(File, FileBeforeStrip) StrippedFile = os.path.join(OutputPath, ModuleName + '.stripped') GenFdsGlobalVariable.GenerateFirmwareImage( StrippedFile, [File], Strip=True, IsMakefile=IsMakefile) File = StrippedFile """For TE Section call GenFw to generate TE image""" if SectionType == BINARY_FILE_TYPE_TE: TeFile = os.path.join(OutputPath, ModuleName + 'Te.raw') GenFdsGlobalVariable.GenerateFirmwareImage( TeFile, [File], Type='te', IsMakefile=IsMakefile) File = TeFile """Call GenSection""" GenFdsGlobalVariable.GenerateSection( OutputFile, [File], Section.Section.SectionType.get(SectionType), IsMakefile=IsMakefile) OutputFileList.append(OutputFile) return OutputFileList, Align
def GenBsfInf(self): FvList = self.GetFvList() self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf') BsfInf = open(self.BsfInfName, 'w+') if self.ResetBin is not None: BsfInf.writelines("[OPTIONS]" + T_CHAR_LF) BsfInf.writelines ("IA32_RST_BIN" + \ " = " + \ GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.ResetBin)) + \ T_CHAR_LF) BsfInf.writelines(T_CHAR_LF) BsfInf.writelines("[COMPONENTS]" + T_CHAR_LF) for ComponentObj in self.ComponentStatementList: BsfInf.writelines ("COMP_NAME" + \ " = " + \ ComponentObj.CompName + \ T_CHAR_LF) if ComponentObj.CompLoc.upper() == 'NONE': BsfInf.writelines ("COMP_LOC" + \ " = " + \ 'N' + \ T_CHAR_LF) elif ComponentObj.FilePos is not None: BsfInf.writelines ("COMP_LOC" + \ " = " + \ ComponentObj.FilePos + \ T_CHAR_LF) else: Index = FvList.index(ComponentObj.CompLoc.upper()) if Index == 0: BsfInf.writelines ("COMP_LOC" + \ " = " + \ 'F' + \ T_CHAR_LF) elif Index == 1: BsfInf.writelines ("COMP_LOC" + \ " = " + \ 'S' + \ T_CHAR_LF) BsfInf.writelines ("COMP_TYPE" + \ " = " + \ ComponentObj.CompType + \ T_CHAR_LF) BsfInf.writelines ("COMP_VER" + \ " = " + \ ComponentObj.CompVer + \ T_CHAR_LF) BsfInf.writelines ("COMP_CS" + \ " = " + \ ComponentObj.CompCs + \ T_CHAR_LF) BinPath = ComponentObj.CompBin if BinPath != '-': BinPath = GenFdsGlobalVariable.MacroExtend( GenFdsGlobalVariable.ReplaceWorkspaceMacro(BinPath)) BsfInf.writelines ("COMP_BIN" + \ " = " + \ BinPath + \ T_CHAR_LF) SymPath = ComponentObj.CompSym if SymPath != '-': SymPath = GenFdsGlobalVariable.MacroExtend( GenFdsGlobalVariable.ReplaceWorkspaceMacro(SymPath)) BsfInf.writelines ("COMP_SYM" + \ " = " + \ SymPath + \ T_CHAR_LF) BsfInf.writelines ("COMP_SIZE" + \ " = " + \ ComponentObj.CompSize + \ T_CHAR_LF) BsfInf.writelines(T_CHAR_LF) BsfInf.close()