def ParseConfig(self): Filepath = os.path.normpath(self.Filename) if not os.path.isfile(Filepath): ErrorMsg = "Can't find configuration file '%s'" % Filepath EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath) LineNo = 0 for Line in open(Filepath, 'r'): LineNo = LineNo + 1 Line = CleanString(Line) if Line != '': List = GetSplitValueList(Line, TAB_EQUAL_SPLIT) if List[0] not in self.__dict__: ErrorMsg = "Invalid configuration option '%s' was found" % List[0] EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath, Line = LineNo) if List[0] == 'ModifierList': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) if List[0] == 'MetaDataFileCheckPathOfGenerateFileList' and List[1] == "": continue if List[0] == 'SkipDirList': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) if List[0] == 'SkipFileList': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) if List[0] == 'BinaryExtList': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) if List[0] == 'Copyright': List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT) self.__dict__[List[0]] = List[1]
def GetValidExpression(self, TokenSpaceGuid, PcdCName): SqlCommand = "select Value1,StartLine from %s WHERE Value2='%s' and Value3='%s'" % (self.Table, TokenSpaceGuid, PcdCName) self.Cur.execute(SqlCommand) validateranges = [] validlists = [] expressions = [] try: for row in self.Cur: comment = row[0] LineNum = row[1] comment = comment.strip("#") comment = comment.strip() oricomment = comment if comment.startswith("@ValidRange"): comment = comment.replace("@ValidRange", "", 1) validateranges.append(comment.split("|")[1].strip()) if comment.startswith("@ValidList"): comment = comment.replace("@ValidList", "", 1) validlists.append(comment.split("|")[1].strip()) if comment.startswith("@Expression"): comment = comment.replace("@Expression", "", 1) expressions.append(comment.split("|")[1].strip()) except Exception, Exc: ValidType = "" if oricomment.startswith("@ValidRange"): ValidType = "@ValidRange" if oricomment.startswith("@ValidList"): ValidType = "@ValidList" if oricomment.startswith("@Expression"): ValidType = "@Expression" EdkLogger.error('Parser', FORMAT_INVALID, "The syntax for %s of PCD %s.%s is incorrect" % (ValidType,TokenSpaceGuid, PcdCName), ExtraData=oricomment,File=self.MetaFile, Line=LineNum) return set(), set(), set()
def MacroExtend (Str, MacroDict={}, Arch='COMMON'): if Str == None : return None Dict = {'$(WORKSPACE)' : GenFdsGlobalVariable.WorkSpaceDir, '$(EDK_SOURCE)' : GenFdsGlobalVariable.EdkSourceDir, # '$(OUTPUT_DIRECTORY)': GenFdsGlobalVariable.OutputDirFromDsc, '$(TARGET)' : GenFdsGlobalVariable.TargetName, '$(TOOL_CHAIN_TAG)' : GenFdsGlobalVariable.ToolChainTag, '$(SPACE)' : ' ' } OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[GenFdsGlobalVariable.ArchList[0]] if Arch != 'COMMON' and Arch in GenFdsGlobalVariable.ArchList: OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[Arch] Dict['$(OUTPUT_DIRECTORY)'] = OutputDir if MacroDict != None and len (MacroDict) != 0: Dict.update(MacroDict) for key in Dict.keys(): if Str.find(key) >= 0 : Str = Str.replace (key, Dict[key]) if Str.find('$(ARCH)') >= 0: if len(GenFdsGlobalVariable.ArchList) == 1: Str = Str.replace('$(ARCH)', GenFdsGlobalVariable.ArchList[0]) else: EdkLogger.error("GenFds", GENFDS_ERROR, "No way to determine $(ARCH) for %s" % Str) return Str
def Options(): OptionList = [ make_option("-f", "--offset", dest="PcdOffset", action="store", type="int", help="Start offset to the image is used to store PCD value."), make_option("-u", "--value", dest="PcdValue", action="store", help="PCD value will be updated into the image."), make_option("-t", "--type", dest="PcdTypeName", action="store", help="The name of PCD data type may be one of VOID*,BOOLEAN, UINT8, UINT16, UINT32, UINT64."), make_option("-s", "--maxsize", dest="PcdMaxSize", action="store", type="int", help="Max size of data buffer is taken by PCD value.It must be set when PCD type is VOID*."), make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE, help="Run verbosely"), make_option("-d", "--debug", dest="LogLevel", type="int", help="Run with debug information"), make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET, help="Run quietly"), make_option("-?", action="help", help="show this help message and exit"), ] # use clearer usage to override default usage message UsageString = "%prog -f Offset -u Value -t Type [-s MaxSize] <input_file>" Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString) Parser.set_defaults(LogLevel=EdkLogger.INFO) Options, Args = Parser.parse_args() # error check if len(Args) == 0: EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData=Parser.get_usage()) InputFile = Args[len(Args) - 1] return Options, InputFile
def GetStringObject(self, Item): Language = '' Value = '' Name = Item.split()[1] # Check the string name is the upper character if Name != '': MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE) if MatchString == None or MatchString.end(0) != len(Name): EdkLogger.error( 'Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' % (Name, self.File)) LanguageList = Item.split(u'#language ') for IndexI in range(len(LanguageList)): if IndexI == 0: continue else: Language = LanguageList[IndexI].split()[0] Value = LanguageList[IndexI][ LanguageList[IndexI].find(u'\"') + len(u'\"'): LanguageList[IndexI].rfind(u'\"')] #.replace(u'\r\n', u'') Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File) self.AddStringToList(Name, Language, Value)
def Query(self, Model): SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s where Model = %s and Enabled > -1""" % (self.Table, Model) EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand) self.Cur.execute(SqlCommand) return self.Cur.fetchall()
def CreateCCodeDB(FileNameList): FileObjList = [] ParseErrorFileList = [] ParsedFiles = {} for FullName in FileNameList: if os.path.splitext(FullName)[1] in ('.h', '.c'): if FullName.lower() in ParsedFiles: continue ParsedFiles[FullName.lower()] = 1 EdkLogger.info("Parsing " + FullName) model = FullName.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H collector = CodeFragmentCollector.CodeFragmentCollector(FullName) try: collector.ParseFile() except: ParseErrorFileList.append(FullName) BaseName = os.path.basename(FullName) DirName = os.path.dirname(FullName) Ext = os.path.splitext(BaseName)[1].lstrip('.') ModifiedTime = os.path.getmtime(FullName) FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), []) FileObjList.append(FileObj) collector.CleanFileProfileBuffer() if len(ParseErrorFileList) > 0: EdkLogger.info("Found unrecoverable error during parsing:\n\t%s\n" % "\n\t".join(ParseErrorFileList)) Db = EotGlobalData.gDb for file in FileObjList: Db.InsertOneFile(file) Db.UpdateIdentifierBelongsToFunction()
def InitDatabase(self, NewDatabase = True): EdkLogger.verbose("\nInitialize ECC database started ...") # # Drop all old existing tables # if NewDatabase: if os.path.exists(self.DbPath): os.remove(self.DbPath) self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED') self.Conn.execute("PRAGMA page_size=4096") self.Conn.execute("PRAGMA synchronous=OFF") # to avoid non-ascii charater conversion error self.Conn.text_factory = str self.Cur = self.Conn.cursor() self.TblDataModel = TableDataModel(self.Cur) self.TblFile = TableFile(self.Cur) self.TblFunction = TableFunction(self.Cur) self.TblIdentifier = TableIdentifier(self.Cur) self.TblPcd = TablePcd(self.Cur) self.TblReport = TableReport(self.Cur) self.TblInf = ModuleTable(self.Cur) self.TblDec = PackageTable(self.Cur) self.TblDsc = PlatformTable(self.Cur) self.TblFdf = TableFdf(self.Cur) # # Create new tables # if NewDatabase: self.TblDataModel.Create() self.TblFile.Create() self.TblFunction.Create() self.TblPcd.Create() self.TblReport.Create() self.TblInf.Create() self.TblDec.Create() self.TblDsc.Create() self.TblFdf.Create() # # Init each table's ID # self.TblDataModel.InitID() self.TblFile.InitID() self.TblFunction.InitID() self.TblPcd.InitID() self.TblReport.InitID() self.TblInf.InitID() self.TblDec.InitID() self.TblDsc.InitID() self.TblFdf.InitID() # # Initialize table DataModel # if NewDatabase: self.TblDataModel.InitTable() EdkLogger.verbose("Initialize ECC database ... DONE!")
def Generate(self, File=None): Buffer = BytesIO() if len(self.PostfixNotation) == 0: return False for Item in self.PostfixNotation: if Item in self.Opcode[self.Phase]: Buffer.write(pack("B", self.Opcode[self.Phase][Item])) elif Item in self.SupportedOpcode: EdkLogger.error("GenDepex", FORMAT_INVALID, "Opcode [%s] is not expected in %s phase" % (Item, self.Phase), ExtraData=self.ExpressionString) else: Buffer.write(self.GetGuidValue(Item)) FilePath = "" FileChangeFlag = True if File is None: sys.stdout.write(Buffer.getvalue()) FilePath = "STDOUT" else: FileChangeFlag = SaveFileOnChange(File, Buffer.getvalue(), True) Buffer.close() return FileChangeFlag
def VerifyUcs2Data(self, FileIn, FileName, Encoding): Ucs2Info = codecs.lookup('ucs-2') # # Convert to unicode # try: FileDecoded = codecs.decode(FileIn, Encoding) Ucs2Info.encode(FileDecoded) except: UniFile = StringIO.StringIO(FileIn) Info = codecs.lookup(Encoding) (Reader, Writer) = (Info.streamreader, Info.streamwriter) File = codecs.StreamReaderWriter(UniFile, Reader, Writer) LineNumber = 0 ErrMsg = lambda Encoding, LineNumber: \ '%s contains invalid %s characters on line %d.' % \ (FileName, Encoding, LineNumber) while True: LineNumber = LineNumber + 1 try: Line = File.readline() if Line == '': EdkLogger.error('Unicode File Parser', PARSER_ERROR, ErrMsg(Encoding, LineNumber)) Ucs2Info.encode(Line) except: EdkLogger.error('Unicode File Parser', PARSER_ERROR, ErrMsg('UCS-2', LineNumber))
def GetLanguageCode(LangName, IsCompatibleMode, File): global LangConvTable length = len(LangName) if IsCompatibleMode: if length == 3 and LangName.isalpha(): TempLangName = LangConvTable.get(LangName.lower()) if TempLangName != None: return TempLangName return LangName else: EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid ISO 639-2 language code : %s" % LangName, File) if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-': return LangName if length == 2: if LangName.isalpha(): return LangName elif length == 3: if LangName.isalpha() and LangConvTable.get(LangName.lower()) == None: return LangName elif length == 5: if LangName[0:2].isalpha() and LangName[2] == '-': return LangName elif length >= 6: if LangName[0:2].isalpha() and LangName[2] == '-': return LangName if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) == None and LangName[3] == '-': return LangName EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
def Main(): try: # # Check input parameter # EdkLogger.Initialize() CommandOptions, InputFile = Options() if CommandOptions.LogLevel < EdkLogger.DEBUG_9: EdkLogger.SetLevel(CommandOptions.LogLevel + 1) else: EdkLogger.SetLevel(CommandOptions.LogLevel) if not os.path.exists (InputFile): EdkLogger.error("PatchPcdValue", FILE_NOT_FOUND, ExtraData=InputFile) return 1 if CommandOptions.PcdOffset == None or CommandOptions.PcdValue == None or CommandOptions.PcdTypeName == None: EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.") return 1 if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]: EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." %(CommandOptions.PcdTypeName)) return 1 if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None: EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.") return 1 # # Patch value into binary image. # ReturnValue, ErrorInfo = PatchBinaryFile (InputFile, CommandOptions.PcdOffset, CommandOptions.PcdTypeName, CommandOptions.PcdValue, CommandOptions.PcdMaxSize) if ReturnValue != 0: EdkLogger.error("PatchPcdValue", ReturnValue, ExtraData=ErrorInfo) return 1 return 0 except: return 1
def GetFileList(SourceFileList, IncludeList, SkipList): if IncludeList is None: EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined") FileList = [] if SkipList is None: SkipList = [] for File in SourceFileList: for Dir in IncludeList: if not os.path.exists(Dir): continue File = os.path.join(Dir, File.Path) # # Ignore Dir # if os.path.isfile(File) != True: continue # # Ignore file listed in skip list # IsSkip = False for Skip in SkipList: if os.path.splitext(File)[1].upper() == Skip.upper(): EdkLogger.verbose("Skipped %s for string token uses search" % File) IsSkip = True break if not IsSkip: FileList.append(File) break return FileList
def PackFile(self, File, ArcName=None): try: print "packing ...", File self._ZipFile.write(File, ArcName) except BaseException, X: EdkLogger.error("PackagingTool", FILE_COMPRESS_FAILURE, ExtraData="%s (%s)" % (File, str(X)))
def _ParserInf(self): FileLinesList = [] UserExtFind = False FindEnd = True FileLastLine = False SectionLine = '' SectionData = [] try: FileLinesList = open(self._FilePath, "r", 0).readlines() except BaseException: EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath) for Index in range(0, len(FileLinesList)): line = str(FileLinesList[Index]).strip() if Index + 1 == len(FileLinesList): FileLastLine = True NextLine = '' else: NextLine = str(FileLinesList[Index + 1]).strip() if UserExtFind and FindEnd == False: if line: SectionData.append(line) if line.startswith(TAB_SECTION_START) and line.endswith(TAB_SECTION_END): SectionLine = line UserExtFind = True FindEnd = False if (NextLine != '' and NextLine[0] == TAB_SECTION_START and \ NextLine[-1] == TAB_SECTION_END) or FileLastLine: UserExtFind = False FindEnd = True self._FileSectionDataList.append({SectionLine: SectionData[:]}) del SectionData[:] SectionLine = ''
def ToCSV(self, Filename='Report.csv'): try: File = open(Filename, 'w+') File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""") RecordSet = self.Query() Index = 0 for Record in RecordSet: Index = Index + 1 ErrorID = Record[1] OtherMsg = Record[2] BelongsToTable = Record[3] BelongsToItem = Record[4] IsCorrected = Record[5] SqlCommand = '' if BelongsToTable == 'File': SqlCommand = """select 1, FullPath from %s where ID = %s """ % (BelongsToTable, BelongsToItem) else: SqlCommand = """select A.StartLine, B.FullPath from %s as A, File as B where A.ID = %s and B.ID = A.BelongsToFile """ % (BelongsToTable, BelongsToItem) NewRecord = self.Exec(SqlCommand) if NewRecord != []: File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg)) EdkLogger.quiet("%s(%s): [%s]%s %s" % (NewRecord[0][1], NewRecord[0][0], ErrorID, EccToolError.gEccErrorMessage[ErrorID], OtherMsg)) File.close() except IOError: NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime()) EdkLogger.warn("ECC", "The report file %s is locked by other progress, use %s instead!" % (Filename, NewFilename)) self.ToCSV(NewFilename)
def LoadFvInfo(self): EdkLogger.quiet("Parsing FV file ... ") EotGlobalData.gFV = MultipleFv(EotGlobalData.gFV_FILE) EotGlobalData.gFV.Dispatch(EotGlobalData.gDb) for Protocol in EotGlobalData.gProtocolList: EotGlobalData.gOP_UN_MATCHED_IN_LIBRARY_CALLING.write("%s\n" % Protocol)
def MyOptionParser(): # # Process command line firstly. # parser = OptionParser(version="%s - Version %s" % (PROJECT_NAME, VERSION), description='', prog='BPDG', usage=st.LBL_BPDG_USAGE ) parser.add_option('-d', '--debug', action='store', type="int", dest='debug_level', help=st.MSG_OPTION_DEBUG_LEVEL) parser.add_option('-v', '--verbose', action='store_true', dest='opt_verbose', help=st.MSG_OPTION_VERBOSE) parser.add_option('-q', '--quiet', action='store_true', dest='opt_quiet', default=False, help=st.MSG_OPTION_QUIET) parser.add_option('-o', '--vpd-filename', action='store', dest='bin_filename', help=st.MSG_OPTION_VPD_FILENAME) parser.add_option('-m', '--map-filename', action='store', dest='filename', help=st.MSG_OPTION_MAP_FILENAME) parser.add_option('-f', '--force', action='store_true', dest='opt_force', help=st.MSG_OPTION_FORCE) (options, args) = parser.parse_args() if len(args) == 0: EdkLogger.info("Please specify the filename.txt file which contain the VPD pcd info!") EdkLogger.info(parser.usage) sys.exit(1) return options, args
def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]): self.RuleFile = File # Read build rules from file if it's not none if File != None: try: self.RuleContent = open(File, 'r').readlines() except: EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File) elif Content != None: self.RuleContent = Content else: EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given") self.SupportedToolChainFamilyList = SupportedFamily self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object} self.Ext2FileType = {} # {ext : file-type} self.FileTypeList = set() self._LineIndex = LineIndex self._State = "" self._RuleInfo = tdict(True, 2) # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}} self._FileType = '' self._BuildTypeList = [] self._ArchList = [] self._FamilyList = [] self._TotalToolChainFamilySet = set() self._RuleObjectList = [] # FileBuildRule object list self._FileVersion = "" self.Parse() # some intrinsic rules self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, "COMMON", "COMMON", "COMMON"] = self._BinaryFileRule self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE)
def Capacity(self): if self._Capacity is None: self._Capacity = [] dimension = ArrayIndex.findall(self._DatumType) for item in dimension: maxsize = item.lstrip("[").rstrip("]").strip() if not maxsize: maxsize = "-1" maxsize = str(int(maxsize,16)) if maxsize.startswith(("0x","0X")) else maxsize self._Capacity.append(maxsize) if hasattr(self, "SkuOverrideValues"): for sku in self.SkuOverrideValues: for defaultstore in self.SkuOverrideValues[sku]: fields = self.SkuOverrideValues[sku][defaultstore] for demesionattr in fields: fieldinfo = fields[demesionattr] deme = ArrayIndex.findall(demesionattr) for i in range(len(deme)): if int(deme[i].lstrip("[").rstrip("]").strip()) >= int(self._Capacity[i]): if self._Capacity[i] != "-1": firstfieldinfo = list(fieldinfo.values())[0] EdkLogger.error('Build', OPTION_VALUE_INVALID, "For Pcd %s, Array Index exceed the Array size. From %s Line %s \n " % (".".join((self.TokenSpaceGuidCName, self.TokenCName)), firstfieldinfo[1],firstfieldinfo[2] )) if hasattr(self,"DefaultValues"): for demesionattr in self.DefaultValues: fieldinfo = self.DefaultValues[demesionattr] deme = ArrayIndex.findall(demesionattr) for i in range(len(deme)): if int(deme[i].lstrip("[").rstrip("]").strip()) >= int(self._Capacity[i]): if self._Capacity[i] != "-1": firstfieldinfo = list(fieldinfo.values())[0] EdkLogger.error('Build', OPTION_VALUE_INVALID, "For Pcd %s, Array Index exceed the Array size. From %s Line %s \n " % (".".join((self.TokenSpaceGuidCName, self.TokenCName)), firstfieldinfo[1],firstfieldinfo[2] )) return self._Capacity
def __GenSimpleFileFfs__(self, Rule, InputFileList): FfsOutput = self.OutputPath + \ os.sep + \ self.__ExtendMacro__(Rule.NameGuid) + \ '.ffs' GenFdsGlobalVariable.VerboseLogger(self.__ExtendMacro__(Rule.NameGuid)) InputSection = [] SectionAlignments = [] for InputFile in InputFileList: InputSection.append(InputFile) SectionAlignments.append(Rule.SectAlignment) if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('): PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid) if len(PcdValue) == 0: EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \ % (Rule.NameGuid)) if PcdValue.startswith('{'): PcdValue = GuidStructureByteArrayToGuidString(PcdValue) RegistryGuidStr = PcdValue if len(RegistryGuidStr) == 0: EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \ % (Rule.NameGuid)) self.ModuleGuid = RegistryGuidStr GenFdsGlobalVariable.GenerateFfs(FfsOutput, InputSection, Ffs.Ffs.FdfFvFileTypeToFileType[Rule.FvFileType], self.ModuleGuid, Fixed=Rule.Fixed, CheckSum=Rule.CheckSum, Align=Rule.Alignment, SectionAlign=SectionAlignments ) return FfsOutput
def DoInclude(Source, Indent=''): NewFileContent = [] # avoid A "include" B and B "include" A if Source in gIncludedAslFile: EdkLogger.warn("Trim", "Circular include", ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), Source)) return [] gIncludedAslFile.append(Source) try: F = open(Source,'r') except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) for Line in F: Result = gAslIncludePattern.findall(Line) if len(Result) == 0: NewFileContent.append("%s%s" % (Indent, Line)) continue CurrentIndent = Indent + Result[0][0] IncludedFile = Result[0][1] NewFileContent.extend(DoInclude(IncludedFile, CurrentIndent)) gIncludedAslFile.pop() F.close() return NewFileContent
def __init__(self): # Version and Copyright self.VersionNumber = ("1.0" + " Build " + gBUILD_VERSION) self.Version = "%prog Version " + self.VersionNumber self.Copyright = "Copyright (c) 2009 - 2018, Intel Corporation All rights reserved." self.InitDefaultConfigIni() self.OutputFile = 'output.txt' self.ReportFile = 'Report.csv' self.ExceptionFile = 'exception.xml' self.IsInit = True self.ScanSourceCode = True self.ScanMetaData = True self.MetaFile = '' self.OnlyScan = None # Parse the options and args self.ParseOption() EdkLogger.info(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n") WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"])) os.environ["WORKSPACE"] = WorkspaceDir # set multiple workspace PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(WorkspaceDir, PackagesPath) GlobalData.gWorkspace = WorkspaceDir GlobalData.gGlobalDefines["WORKSPACE"] = WorkspaceDir EdkLogger.info("Loading ECC configuration ... done") # Generate checkpoints list EccGlobalData.gConfig = Configuration(self.ConfigFile) # Generate exception list EccGlobalData.gException = ExceptionCheck(self.ExceptionFile) # Init Ecc database EccGlobalData.gDb = Database.Database(Database.DATABASE_PATH) EccGlobalData.gDb.InitDatabase(self.IsInit) # # Get files real name in workspace dir # GlobalData.gAllFiles = DirCache(GlobalData.gWorkspace) # Build ECC database # self.BuildDatabase() self.DetectOnlyScanDirs() # Start to check self.Check() # Show report self.GenReport() # Close Database EccGlobalData.gDb.Close()
def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile): VfrNameList = [] if os.path.isdir(DebugDir): for CurrentDir, Dirs, Files in os.walk(DebugDir): for FileName in Files: Name, Ext = os.path.splitext(FileName) if Ext == '.c' and Name != 'AutoGen': VfrNameList.append (Name + 'Bin') VfrNameList.append (ModuleName + 'Strings') EfiFileName = os.path.join(DebugDir, ModuleName + '.efi') MapFileName = os.path.join(DebugDir, ModuleName + '.map') VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrNameList) if not VfrUniOffsetList: return try: fInputfile = open(OutputFile, "wb+") except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None) # Use a instance of BytesIO to cache data fStringIO = BytesIO() for Item in VfrUniOffsetList: if (Item[0].find("Strings") != -1): # # UNI offset in image. # GUID + Offset # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } } # UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f' fStringIO.write(UniGuid) UniValue = pack ('Q', int (Item[1], 16)) fStringIO.write (UniValue) else: # # VFR binary offset in image. # GUID + Offset # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } }; # VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2' fStringIO.write(VfrGuid) type (Item[1]) VfrValue = pack ('Q', int (Item[1], 16)) fStringIO.write (VfrValue) # # write data into file. # try : fInputfile.write (fStringIO.getvalue()) except: EdkLogger.error("Trim", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %OutputFile, None) fStringIO.close () fInputfile.close ()
def GetLangDef(self, File, Line): Lang = distutils.util.split_quoted((Line.split(u"//")[0])) if len(Lang) != 3: try: FileIn = UniFileClassObject.OpenUniFile(LongFilePath(File.Path)) except UnicodeError, X: EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File); except:
def InitTable(self): EdkLogger.verbose("\nInitialize table DataModel started ...") for Item in DataClass.MODEL_LIST: CrossIndex = Item[1] Name = Item[0] Description = Item[0] self.Insert(CrossIndex, Name, Description) EdkLogger.verbose("Initialize table DataModel ... DONE!")
def PreProcess(self, File): if not os.path.exists(File.Path) or not os.path.isfile(File.Path): EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, ExtraData=File.Path) try: FileIn = self.OpenUniFile(LongFilePath(File.Path)) except UnicodeError, X: EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File.Path);
def PreProcess(self, File): if not os.path.exists(File.Path) or not os.path.isfile(File.Path): EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, ExtraData=File.Path) try: FileIn = codecs.open(LongFilePath(File.Path), mode='rb', encoding='utf-16').readlines() except UnicodeError, X: EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File.Path);
def PackFiles(self, Files): for F in Files: try: print "packing ...", F self._ZipFile.write(F) except BaseException, X: EdkLogger.error("PackagingTool", FILE_COMPRESS_FAILURE, ExtraData="%s (%s)" % (F, str(X)))
def GetLangDef(self, File, Line): Lang = distutils.util.split_quoted((Line.split(u"//")[0])) if len(Lang) != 3: try: FileIn = codecs.open(LongFilePath(File.Path), mode='rb', encoding='utf-16').read() except UnicodeError, X: EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File); except:
def ErrorLogger (msg, File = None, Line = None, ExtraData = None): EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData)
action="store_true", type=None, help= "Only scanning specified folders which are recorded in config.ini file." ) (Opt, Args) = Parser.parse_args() return (Opt, Args) ## # # This acts like the main() function for the script, unless it is 'import'ed into another # script. # if __name__ == '__main__': # Initialize log system EdkLogger.Initialize() EdkLogger.IsRaiseError = False StartTime = time.clock() Ecc = Ecc() FinishTime = time.clock() BuildDuration = time.strftime( "%M:%S", time.gmtime(int(round(FinishTime - StartTime)))) EdkLogger.quiet( "\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
def VerboseLogger (msg): EdkLogger.verbose(msg)
def DebugLogger (Level, msg): EdkLogger.debug(Level, msg)
class GenFdsGlobalVariable: FvDir = '' OutputDirDict = {} BinDir = '' # will be FvDir + os.sep + 'Ffs' FfsDir = '' FdfParser = None LibDir = '' WorkSpace = None WorkSpaceDir = '' ConfDir = '' EdkSourceDir = '' OutputDirFromDscDict = {} TargetName = '' ToolChainTag = '' RuleDict = {} ArchList = None VtfDict = {} ActivePlatform = None FvAddressFileName = '' VerboseMode = False DebugLevel = -1 SharpCounter = 0 SharpNumberPerLine = 40 FdfFile = '' FdfFileTimeStamp = 0 FixedLoadAddress = False PlatformName = '' BuildRuleFamily = "MSFT" ToolChainFamily = "MSFT" __BuildRuleDatabase = None # # The list whose element are flags to indicate if large FFS or SECTION files exist in FV. # At the beginning of each generation of FV, false flag is appended to the list, # after the call to GenerateSection returns, check the size of the output file, # if it is greater than 0xFFFFFF, the tail flag in list is set to true, # and EFI_FIRMWARE_FILE_SYSTEM3_GUID is passed to C GenFv. # At the end of generation of FV, pop the flag. # List is used as a stack to handle nested FV generation. # LargeFileInFvFlags = [] EFI_FIRMWARE_FILE_SYSTEM3_GUID = '5473C07A-3DCB-4dca-BD6F-1E9689E7349A' LARGE_FILE_SIZE = 0x1000000 SectionHeader = struct.Struct("3B 1B") ## LoadBuildRule # @staticmethod def __LoadBuildRule(): if GenFdsGlobalVariable.__BuildRuleDatabase: return GenFdsGlobalVariable.__BuildRuleDatabase BuildConfigurationFile = os.path.normpath(os.path.join(GenFdsGlobalVariable.ConfDir, "target.txt")) TargetTxt = TargetTxtClassObject() if os.path.isfile(BuildConfigurationFile) == True: TargetTxt.LoadTargetTxtFile(BuildConfigurationFile) if DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF in TargetTxt.TargetTxtDictionary: BuildRuleFile = TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF] if BuildRuleFile in [None, '']: BuildRuleFile = 'Conf/build_rule.txt' GenFdsGlobalVariable.__BuildRuleDatabase = BuildRule(BuildRuleFile) ToolDefinitionFile = TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF] if ToolDefinitionFile == '': ToolDefinitionFile = "Conf/tools_def.txt" if os.path.isfile(ToolDefinitionFile): ToolDef = ToolDefClassObject() ToolDef.LoadToolDefFile(ToolDefinitionFile) ToolDefinition = ToolDef.ToolsDefTxtDatabase if DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDefinition \ and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY] \ and ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]: GenFdsGlobalVariable.BuildRuleFamily = ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag] if DataType.TAB_TOD_DEFINES_FAMILY in ToolDefinition \ and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY] \ and ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY][GenFdsGlobalVariable.ToolChainTag]: GenFdsGlobalVariable.ToolChainFamily = ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY][GenFdsGlobalVariable.ToolChainTag] return GenFdsGlobalVariable.__BuildRuleDatabase ## GetBuildRules # @param Inf: object of InfBuildData # @param Arch: current arch # @staticmethod def GetBuildRules(Inf, Arch): if not Arch: Arch = 'COMMON' if not Arch in GenFdsGlobalVariable.OutputDirDict: return {} BuildRuleDatabase = GenFdsGlobalVariable.__LoadBuildRule() if not BuildRuleDatabase: return {} PathClassObj = PathClass(Inf.MetaFile.File, GenFdsGlobalVariable.WorkSpaceDir) Macro = {} Macro["WORKSPACE" ] = GenFdsGlobalVariable.WorkSpaceDir Macro["MODULE_NAME" ] = Inf.BaseName Macro["MODULE_GUID" ] = Inf.Guid Macro["MODULE_VERSION" ] = Inf.Version Macro["MODULE_TYPE" ] = Inf.ModuleType Macro["MODULE_FILE" ] = str(PathClassObj) Macro["MODULE_FILE_BASE_NAME" ] = PathClassObj.BaseName Macro["MODULE_RELATIVE_DIR" ] = PathClassObj.SubDir Macro["MODULE_DIR" ] = PathClassObj.SubDir Macro["BASE_NAME" ] = Inf.BaseName Macro["ARCH" ] = Arch Macro["TOOLCHAIN" ] = GenFdsGlobalVariable.ToolChainTag Macro["TOOLCHAIN_TAG" ] = GenFdsGlobalVariable.ToolChainTag Macro["TOOL_CHAIN_TAG" ] = GenFdsGlobalVariable.ToolChainTag Macro["TARGET" ] = GenFdsGlobalVariable.TargetName Macro["BUILD_DIR" ] = GenFdsGlobalVariable.OutputDirDict[Arch] Macro["BIN_DIR" ] = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch], Arch) Macro["LIB_DIR" ] = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch], Arch) BuildDir = os.path.join( GenFdsGlobalVariable.OutputDirDict[Arch], Arch, PathClassObj.SubDir, PathClassObj.BaseName ) Macro["MODULE_BUILD_DIR" ] = BuildDir Macro["OUTPUT_DIR" ] = os.path.join(BuildDir, "OUTPUT") Macro["DEBUG_DIR" ] = os.path.join(BuildDir, "DEBUG") BuildRules = {} for Type in BuildRuleDatabase.FileTypeList: #first try getting build rule by BuildRuleFamily RuleObject = BuildRuleDatabase[Type, Inf.BuildType, Arch, GenFdsGlobalVariable.BuildRuleFamily] if not RuleObject: # build type is always module type, but ... if Inf.ModuleType != Inf.BuildType: RuleObject = BuildRuleDatabase[Type, Inf.ModuleType, Arch, GenFdsGlobalVariable.BuildRuleFamily] #second try getting build rule by ToolChainFamily if not RuleObject: RuleObject = BuildRuleDatabase[Type, Inf.BuildType, Arch, GenFdsGlobalVariable.ToolChainFamily] if not RuleObject: # build type is always module type, but ... if Inf.ModuleType != Inf.BuildType: RuleObject = BuildRuleDatabase[Type, Inf.ModuleType, Arch, GenFdsGlobalVariable.ToolChainFamily] if not RuleObject: continue RuleObject = RuleObject.Instantiate(Macro) BuildRules[Type] = RuleObject for Ext in RuleObject.SourceFileExtList: BuildRules[Ext] = RuleObject return BuildRules ## GetModuleCodaTargetList # # @param Inf: object of InfBuildData # @param Arch: current arch # @staticmethod def GetModuleCodaTargetList(Inf, Arch): BuildRules = GenFdsGlobalVariable.GetBuildRules(Inf, Arch) if not BuildRules: return [] TargetList = set() FileList = [] if not Inf.IsBinaryModule: for File in Inf.Sources: if File.TagName in ("", "*", GenFdsGlobalVariable.ToolChainTag) and \ File.ToolChainFamily in ("", "*", GenFdsGlobalVariable.ToolChainFamily): FileList.append((File, DataType.TAB_UNKNOWN_FILE)) for File in Inf.Binaries: if File.Target in ['COMMON', '*', GenFdsGlobalVariable.TargetName]: FileList.append((File, File.Type)) for File, FileType in FileList: LastTarget = None RuleChain = [] SourceList = [File] Index = 0 while Index < len(SourceList): Source = SourceList[Index] Index = Index + 1 if File.IsBinary and File == Source and Inf.Binaries != None and File in Inf.Binaries: # Skip all files that are not binary libraries if not Inf.LibraryClass: continue RuleObject = BuildRules[DataType.TAB_DEFAULT_BINARY_FILE] elif FileType in BuildRules: RuleObject = BuildRules[FileType] elif Source.Ext in BuildRules: RuleObject = BuildRules[Source.Ext] else: # stop at no more rules if LastTarget: TargetList.add(str(LastTarget)) break FileType = RuleObject.SourceFileType # stop at STATIC_LIBRARY for library if Inf.LibraryClass and FileType == DataType.TAB_STATIC_LIBRARY: if LastTarget: TargetList.add(str(LastTarget)) break Target = RuleObject.Apply(Source) if not Target: if LastTarget: TargetList.add(str(LastTarget)) break elif not Target.Outputs: # Only do build for target with outputs TargetList.add(str(Target)) # to avoid cyclic rule if FileType in RuleChain: break RuleChain.append(FileType) SourceList.extend(Target.Outputs) LastTarget = Target FileType = DataType.TAB_UNKNOWN_FILE return list(TargetList) ## SetDir() # # @param OutputDir Output directory # @param FdfParser FDF contents parser # @param Workspace The directory of workspace # @param ArchList The Arch list of platform # def SetDir (OutputDir, FdfParser, WorkSpace, ArchList): GenFdsGlobalVariable.VerboseLogger( "GenFdsGlobalVariable.OutputDir :%s" %OutputDir) # GenFdsGlobalVariable.OutputDirDict = OutputDir GenFdsGlobalVariable.FdfParser = FdfParser GenFdsGlobalVariable.WorkSpace = WorkSpace GenFdsGlobalVariable.FvDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[ArchList[0]], 'FV') if not os.path.exists(GenFdsGlobalVariable.FvDir) : os.makedirs(GenFdsGlobalVariable.FvDir) GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs') if not os.path.exists(GenFdsGlobalVariable.FfsDir) : os.makedirs(GenFdsGlobalVariable.FfsDir) if ArchList != None: GenFdsGlobalVariable.ArchList = ArchList T_CHAR_LF = '\n' # # Create FV Address inf file # GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf') FvAddressFile = open (GenFdsGlobalVariable.FvAddressFileName, 'w') # # Add [Options] # FvAddressFile.writelines("[options]" + T_CHAR_LF) BsAddress = '0' for Arch in ArchList: if GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].BsBaseAddress: BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].BsBaseAddress break FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \ BsAddress + \ T_CHAR_LF) RtAddress = '0' for Arch in ArchList: if GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].RtBaseAddress: RtAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].RtBaseAddress FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \ RtAddress + \ T_CHAR_LF) FvAddressFile.close() ## ReplaceWorkspaceMacro() # # @param String String that may contain macro # def ReplaceWorkspaceMacro(String): String = mws.handleWsMacro(String) Str = String.replace('$(WORKSPACE)', GenFdsGlobalVariable.WorkSpaceDir) if os.path.exists(Str): if not os.path.isabs(Str): Str = os.path.abspath(Str) else: Str = mws.join(GenFdsGlobalVariable.WorkSpaceDir, String) return os.path.normpath(Str) ## Check if the input files are newer than output files # # @param Output Path of output file # @param Input Path list of input files # # @retval True if Output doesn't exist, or any Input is newer # @retval False if all Input is older than Output # @staticmethod def NeedsUpdate(Output, Input): if not os.path.exists(Output): return True # always update "Output" if no "Input" given if Input == None or len(Input) == 0: return True # if fdf file is changed after the 'Output" is generated, update the 'Output' OutputTime = os.path.getmtime(Output) if GenFdsGlobalVariable.FdfFileTimeStamp > OutputTime: return True for F in Input: # always update "Output" if any "Input" doesn't exist if not os.path.exists(F): return True # always update "Output" if any "Input" is newer than "Output" if os.path.getmtime(F) > OutputTime: return True return False @staticmethod def GenerateSection(Output, Input, Type=None, CompressionType=None, Guid=None, GuidHdrLen=None, GuidAttr=[], Ui=None, Ver=None, InputAlign=None, BuildNumber=None): Cmd = ["GenSec"] if Type not in [None, '']: Cmd += ["-s", Type] if CompressionType not in [None, '']: Cmd += ["-c", CompressionType] if Guid != None: Cmd += ["-g", Guid] if GuidHdrLen not in [None, '']: Cmd += ["-l", GuidHdrLen] if len(GuidAttr) != 0: #Add each guided attribute for Attr in GuidAttr: Cmd += ["-r", Attr] if InputAlign != None: #Section Align is only for dummy section without section type for SecAlign in InputAlign: Cmd += ["--sectionalign", SecAlign] CommandFile = Output + '.txt' if Ui not in [None, '']: #Cmd += ["-n", '"' + Ui + '"'] SectionData = array.array('B', [0,0,0,0]) SectionData.fromstring(Ui.encode("utf_16_le")) SectionData.append(0) SectionData.append(0) Len = len(SectionData) GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x15) SaveFileOnChange(Output, SectionData.tostring()) elif Ver not in [None, '']: Cmd += ["-n", Ver] if BuildNumber: Cmd += ["-j", BuildNumber] Cmd += ["-o", Output] SaveFileOnChange(CommandFile, ' '.join(Cmd), False) if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]): return GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section") else: Cmd += ["-o", Output] Cmd += Input SaveFileOnChange(CommandFile, ' '.join(Cmd), False) if GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]): GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section") if (os.path.getsize(Output) >= GenFdsGlobalVariable.LARGE_FILE_SIZE and GenFdsGlobalVariable.LargeFileInFvFlags): GenFdsGlobalVariable.LargeFileInFvFlags[-1] = True @staticmethod def GetAlignment (AlignString): if AlignString == None: return 0 if AlignString in ("1K", "2K", "4K", "8K", "16K", "32K", "64K"): return int (AlignString.rstrip('K')) * 1024 else: return int (AlignString) @staticmethod def GenerateFfs(Output, Input, Type, Guid, Fixed=False, CheckSum=False, Align=None, SectionAlign=None): Cmd = ["GenFfs", "-t", Type, "-g", Guid] if Fixed == True: Cmd += ["-x"] if CheckSum: Cmd += ["-s"] if Align not in [None, '']: Cmd += ["-a", Align] Cmd += ["-o", Output] for I in range(0, len(Input)): Cmd += ("-i", Input[I]) if SectionAlign not in [None, '', []] and SectionAlign[I] not in [None, '']: Cmd += ("-n", SectionAlign[I]) CommandFile = Output + '.txt' SaveFileOnChange(CommandFile, ' '.join(Cmd), False) if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]): return GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate FFS") @staticmethod def GenerateFirmwareVolume(Output, Input, BaseAddress=None, ForceRebase=None, Capsule=False, Dump=False, AddressFile=None, MapFile=None, FfsList=[], FileSystemGuid=None): if not GenFdsGlobalVariable.NeedsUpdate(Output, Input+FfsList): return GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) Cmd = ["GenFv"] if BaseAddress not in [None, '']: Cmd += ["-r", BaseAddress] if ForceRebase == False: Cmd +=["-F", "FALSE"] elif ForceRebase == True: Cmd +=["-F", "TRUE"] if Capsule: Cmd += ["-c"] if Dump: Cmd += ["-p"] if AddressFile not in [None, '']: Cmd += ["-a", AddressFile] if MapFile not in [None, '']: Cmd += ["-m", MapFile] if FileSystemGuid: Cmd += ["-g", FileSystemGuid] Cmd += ["-o", Output] for I in Input: Cmd += ["-i", I] GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate FV") @staticmethod def GenerateVtf(Output, Input, BaseAddress=None, FvSize=None): if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): return GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) Cmd = ["GenVtf"] if BaseAddress not in [None, ''] and FvSize not in [None, ''] \ and len(BaseAddress) == len(FvSize): for I in range(0, len(BaseAddress)): Cmd += ["-r", BaseAddress[I], "-s", FvSize[I]] Cmd += ["-o", Output] for F in Input: Cmd += ["-f", F] GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate VTF") @staticmethod def GenerateFirmwareImage(Output, Input, Type="efi", SubType=None, Zero=False, Strip=False, Replace=False, TimeStamp=None, Join=False, Align=None, Padding=None, Convert=False): if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): return GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) Cmd = ["GenFw"] if Type.lower() == "te": Cmd += ["-t"] if SubType not in [None, '']: Cmd += ["-e", SubType] if TimeStamp not in [None, '']: Cmd += ["-s", TimeStamp] if Align not in [None, '']: Cmd += ["-a", Align] if Padding not in [None, '']: Cmd += ["-p", Padding] if Zero: Cmd += ["-z"] if Strip: Cmd += ["-l"] if Replace: Cmd += ["-r"] if Join: Cmd += ["-j"] if Convert: Cmd += ["-m"] Cmd += ["-o", Output] Cmd += Input GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate firmware image") @staticmethod def GenerateOptionRom(Output, EfiInput, BinaryInput, Compress=False, ClassCode=None, Revision=None, DeviceId=None, VendorId=None): InputList = [] Cmd = ["EfiRom"] if len(EfiInput) > 0: if Compress: Cmd += ["-ec"] else: Cmd += ["-e"] for EfiFile in EfiInput: Cmd += [EfiFile] InputList.append (EfiFile) if len(BinaryInput) > 0: Cmd += ["-b"] for BinFile in BinaryInput: Cmd += [BinFile] InputList.append (BinFile) # Check List if not GenFdsGlobalVariable.NeedsUpdate(Output, InputList): return GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, InputList)) if ClassCode != None: Cmd += ["-l", ClassCode] if Revision != None: Cmd += ["-r", Revision] if DeviceId != None: Cmd += ["-i", DeviceId] if VendorId != None: Cmd += ["-f", VendorId] Cmd += ["-o", Output] GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate option rom") @staticmethod def GuidTool(Output, Input, ToolPath, Options='', returnValue=[]): if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): return GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) Cmd = [ToolPath, ] Cmd += Options.split(' ') Cmd += ["-o", Output] Cmd += Input GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to call " + ToolPath, returnValue) def CallExternalTool (cmd, errorMess, returnValue=[]): if type(cmd) not in (tuple, list): GenFdsGlobalVariable.ErrorLogger("ToolError! Invalid parameter type in call to CallExternalTool") if GenFdsGlobalVariable.DebugLevel != -1: cmd += ('--debug', str(GenFdsGlobalVariable.DebugLevel)) GenFdsGlobalVariable.InfLogger (cmd) if GenFdsGlobalVariable.VerboseMode: cmd += ('-v',) GenFdsGlobalVariable.InfLogger (cmd) else: sys.stdout.write ('#') sys.stdout.flush() GenFdsGlobalVariable.SharpCounter = GenFdsGlobalVariable.SharpCounter + 1 if GenFdsGlobalVariable.SharpCounter % GenFdsGlobalVariable.SharpNumberPerLine == 0: sys.stdout.write('\n') try: PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr= subprocess.PIPE, shell=True) except Exception, X: EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0])) (out, error) = PopenObject.communicate() while PopenObject.returncode == None : PopenObject.wait() if returnValue != [] and returnValue[0] != 0: #get command return value returnValue[0] = PopenObject.returncode return if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1: GenFdsGlobalVariable.InfLogger ("Return Value = %d" %PopenObject.returncode) GenFdsGlobalVariable.InfLogger (out) GenFdsGlobalVariable.InfLogger (error) if PopenObject.returncode != 0: print "###", cmd EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess)
def InfLogger (msg): EdkLogger.info(msg)