def GetIncludeListOfFile(WorkSpace, Filepath, Db): IncludeList = [] Filepath = os.path.normpath(Filepath) SqlCommand = """ select Value1, FullPath from Inf, File where Inf.Model = %s and Inf.BelongsToFile in( select distinct B.BelongsToFile from File as A left join Inf as B where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s') and Inf.BelongsToFile = File.ID""" \ % (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath) RecordSet = Db.TblFile.Exec(SqlCommand) for Record in RecordSet: DecFullPath = os.path.normpath(mws.join(WorkSpace, Record[0])) InfFullPath = os.path.normpath(mws.join(WorkSpace, Record[1])) (DecPath, DecName) = os.path.split(DecFullPath) (InfPath, InfName) = os.path.split(InfFullPath) SqlCommand = """select Value1 from Dec where BelongsToFile = (select ID from File where FullPath = '%s') and Model = %s""" \ % (DecFullPath, MODEL_EFI_INCLUDE) NewRecordSet = Db.TblDec.Exec(SqlCommand) if InfPath not in IncludeList: IncludeList.append(InfPath) for NewRecord in NewRecordSet: IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0])) if IncludePath not in IncludeList: IncludeList.append(IncludePath) return IncludeList
def __init__(self): # Version and Copyright self.VersionNumber = ("1.0" + " Build " + gBUILD_VERSION) self.Version = "%prog Version " + self.VersionNumber self.Copyright = "Copyright (c) 2009 - 2018, Intel Corporation All rights reserved." self.InitDefaultConfigIni() self.OutputFile = 'output.txt' self.ReportFile = 'Report.csv' self.ExceptionFile = 'exception.xml' self.IsInit = True self.ScanSourceCode = True self.ScanMetaData = True self.MetaFile = '' self.OnlyScan = None # Parse the options and args self.ParseOption() EdkLogger.info(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n") WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"])) os.environ["WORKSPACE"] = WorkspaceDir # set multiple workspace PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(WorkspaceDir, PackagesPath) GlobalData.gWorkspace = WorkspaceDir GlobalData.gGlobalDefines["WORKSPACE"] = WorkspaceDir EdkLogger.info("Loading ECC configuration ... done") # Generate checkpoints list EccGlobalData.gConfig = Configuration(self.ConfigFile) # Generate exception list EccGlobalData.gException = ExceptionCheck(self.ExceptionFile) # Init Ecc database EccGlobalData.gDb = Database.Database(Database.DATABASE_PATH) EccGlobalData.gDb.InitDatabase(self.IsInit) # # Get files real name in workspace dir # GlobalData.gAllFiles = DirCache(GlobalData.gWorkspace) # Build ECC database # self.BuildDatabase() self.DetectOnlyScanDirs() # Start to check self.Check() # Show report self.GenReport() # Close Database EccGlobalData.gDb.Close()
def ReplaceWorkspaceMacro(String): String = mws.handleWsMacro(String) Str = String.replace('$(WORKSPACE)', GenFdsGlobalVariable.WorkSpaceDir) if os.path.exists(Str): if not os.path.isabs(Str): Str = os.path.abspath(Str) else: Str = mws.join(GenFdsGlobalVariable.WorkSpaceDir, String) return os.path.normpath(Str)
def ApplyBuildOption(self, module): PlatformOptions = self.DataPipe.Get("PLA_BO") ModuleBuildOptions = self.DataPipe.Get("MOL_BO") ModuleOptionFromDsc = ModuleBuildOptions.get( (module.MetaFile.File, module.MetaFile.Root)) if ModuleOptionFromDsc: ModuleTypeOptions, PlatformModuleOptions = ModuleOptionFromDsc[ "ModuleTypeOptions"], ModuleOptionFromDsc[ "PlatformModuleOptions"] else: ModuleTypeOptions, PlatformModuleOptions = {}, {} ToolDefinition = self.DataPipe.Get("TOOLDEF") ModuleOptions = self._ExpandBuildOption(module.BuildOptions) BuildRuleOrder = None for Options in [ ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions ]: for Tool in Options: for Attr in Options[Tool]: if Attr == TAB_TOD_DEFINES_BUILDRULEORDER: BuildRuleOrder = Options[Tool][Attr] AllTools = set( list(ModuleOptions.keys()) + list(PlatformOptions.keys()) + list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) + list(ToolDefinition.keys())) BuildOptions = defaultdict(lambda: defaultdict(str)) for Tool in AllTools: for Options in [ ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions ]: if Tool not in Options: continue for Attr in Options[Tool]: # # Do not generate it in Makefile # if Attr == TAB_TOD_DEFINES_BUILDRULEORDER: continue Value = Options[Tool][Attr] # check if override is indicated if Value.startswith('='): BuildOptions[Tool][Attr] = mws.handleWsMacro(Value[1:]) else: if Attr != 'PATH': BuildOptions[Tool][ Attr] += " " + mws.handleWsMacro(Value) else: BuildOptions[Tool][Attr] = mws.handleWsMacro(Value) return BuildOptions, BuildRuleOrder
def LoadToolDefFile(self, FileName): # set multiple workspace PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(GlobalData.gWorkspace, PackagesPath) self.ToolsDefTxtDatabase = { TAB_TOD_DEFINES_TARGET: [], TAB_TOD_DEFINES_TOOL_CHAIN_TAG: [], TAB_TOD_DEFINES_TARGET_ARCH: [], TAB_TOD_DEFINES_COMMAND_TYPE: [] } self.IncludeToolDefFile(FileName) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] = list( set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET])) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = list( set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG])) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] = list( set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH])) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] = list( set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE])) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET].sort() self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].sort() self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH].sort() self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort() KeyList = [ TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE ] for Index in range(3, -1, -1): # make a copy of the keys to enumerate over to prevent issues when # adding/removing items from the original dict. for Key in list(self.ToolsDefTxtDictionary.keys()): List = Key.split('_') if List[Index] == TAB_STAR: for String in self.ToolsDefTxtDatabase[KeyList[Index]]: List[Index] = String NewKey = '%s_%s_%s_%s_%s' % tuple(List) if NewKey not in self.ToolsDefTxtDictionary: self.ToolsDefTxtDictionary[ NewKey] = self.ToolsDefTxtDictionary[Key] del self.ToolsDefTxtDictionary[Key] elif List[Index] not in self.ToolsDefTxtDatabase[ KeyList[Index]]: del self.ToolsDefTxtDictionary[Key]
def _GenPackages(self, Skip): Logger.Debug(2, "Generate %s ..." % DT.TAB_PACKAGES) # # Get all Packages # PackageObj = self.Parser.InfPackageSection.Packages # # Go through each arch # for PackageItemObj in PackageObj: # # Need package information for dependency check usage # PackageDependency = PackageDependencyObject() PackageDependency.SetPackageFilePath(NormPath(PackageItemObj.GetPackageName())) PackageDependency.SetSupArchList(ConvertArchList(PackageItemObj.GetSupArchList())) PackageDependency.SetFeatureFlag(PackageItemObj.GetFeatureFlagExp()) PkgInfo = GetPkgInfoFromDec(mws.join(self.WorkSpace, NormPath(PackageItemObj.GetPackageName()))) if PkgInfo[1] and PkgInfo[2]: PackageDependency.SetGuid(PkgInfo[1]) PackageDependency.SetVersion(PkgInfo[2]) elif Skip: continue else: Logger.Error("\nUPT", PARSER_ERROR, ST.ERR_INF_GET_PKG_DEPENDENCY_FAIL % PackageItemObj.GetPackageName(), File=self.FullPath) PackageDependencyList = self.GetPackageDependencyList() PackageDependencyList.append(PackageDependency) self.SetPackageDependencyList(PackageDependencyList)
def NormPath(Path, Defines=None): IsRelativePath = False if Path: if Path[0] == '.': IsRelativePath = True # # Replace with Define # if Defines: Path = ReplaceMacro(Path, Defines) # # To local path format # Path = os.path.normpath(Path) if Path.startswith(GlobalData.gWorkspace) and not Path.startswith( GlobalData.gBuildDirectory) and not os.path.exists(Path): Path = Path[len(GlobalData.gWorkspace):] if Path[0] == os.path.sep: Path = Path[1:] Path = mws.join(GlobalData.gWorkspace, Path) if IsRelativePath and Path[0] != '.': Path = os.path.join('.', Path) return Path
def CheckFileList(QualifiedExt, FileList, ErrorStringExt, ErrorStringFullPath): if not FileList: return WorkspaceDir = GlobalData.gWORKSPACE WorkspaceDir = os.path.normpath(WorkspaceDir) for Item in FileList: Ext = os.path.splitext(Item)[1] if Ext.upper() != QualifiedExt.upper(): Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \ ErrorStringExt % Item) Item = os.path.normpath(Item) Path = mws.join(WorkspaceDir, Item) if not os.path.exists(Path): Logger.Error("\nMkPkg", FILE_NOT_FOUND, ST.ERR_NOT_FOUND % Item) elif Item == Path: Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, ErrorStringFullPath % Item) elif not IsValidPath(Item, WorkspaceDir): Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \ ErrorStringExt % Item) if not os.path.split(Item)[0]: Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \ ST.ERR_INVALID_METAFILE_PATH % Item)
def GetWorkspace(): # # check WORKSPACE # if "WORKSPACE" in environ: WorkspaceDir = os.path.normpath(environ["WORKSPACE"]) if not os.path.exists(WorkspaceDir): Logger.Error("UPT", ToolError.UPT_ENVIRON_MISSING_ERROR, ST.ERR_WORKSPACE_NOTEXIST, ExtraData="%s" % WorkspaceDir) else: WorkspaceDir = os.getcwd() if WorkspaceDir[-1] == ':': WorkspaceDir += os.sep PackagesPath = os.environ.get("PACKAGES_PATH") mws.setWs(WorkspaceDir, PackagesPath) return WorkspaceDir, mws.PACKAGES_PATH
def _GetTarget(self): Target = TargetTxtClassObject() ConfDirectory = GlobalData.gCmdConfDir if ConfDirectory: # Get alternate Conf location, if it is absolute, then just use the absolute directory name ConfDirectoryPath = os.path.normpath(ConfDirectory) if not os.path.isabs(ConfDirectoryPath): # Since alternate directory name is not absolute, the alternate directory is located within the WORKSPACE # This also handles someone specifying the Conf directory in the workspace. Using --conf=Conf ConfDirectoryPath = mws.join(os.environ["WORKSPACE"], ConfDirectoryPath) else: if "CONF_PATH" in os.environ: ConfDirectoryPath = os.path.normcase(os.path.normpath(os.environ["CONF_PATH"])) else: # Get standard WORKSPACE/Conf use the absolute path to the WORKSPACE/Conf ConfDirectoryPath = mws.join(os.environ["WORKSPACE"], 'Conf') GlobalData.gConfDirectory = ConfDirectoryPath targettxt = os.path.normpath(os.path.join(ConfDirectoryPath, gDefaultTargetTxtFile)) if os.path.exists(targettxt): Target.LoadTargetTxtFile(targettxt) self.TxtTarget = Target
def IsValidPath(Path, Root): Path = Path.strip() OrigPath = Path.replace('\\', '/') Path = os.path.normpath(Path).replace('\\', '/') Root = os.path.normpath(Root).replace('\\', '/') FullPath = mws.join(Root, Path) if not os.path.exists(FullPath): return False # # If Path is absolute path. # It should be in Root. # if os.path.isabs(Path): if not Path.startswith(Root): return False return True # # Check illegal character # for Rel in ['/', './', '../']: if OrigPath.startswith(Rel): return False for Rel in ['//', '/./', '/../']: if Rel in OrigPath: return False for Rel in ['/.', '/..', '/']: if OrigPath.endswith(Rel): return False Path = Path.rstrip('/') # # Check relative path # for Word in Path.split('/'): if not IsValidWord(Word): return False return True
def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, MacroDict=None, Flag=False): Size = self.Size if MacroDict is None: MacroDict = {} if not Flag: GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset) GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" % Size) GenFdsGlobalVariable.SharpCounter = 0 if Flag and (self.RegionType != BINARY_FILE_TYPE_FV): return if self.RegionType == BINARY_FILE_TYPE_FV: # # Get Fv from FvDict # self.FvAddress = int(BaseAddress, 16) + self.Offset FvBaseAddress = '0x%X' % self.FvAddress FvOffset = 0 for RegionData in self.RegionDataList: FileName = None if RegionData.endswith(".fv"): RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict) if not Flag: GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s' % RegionData) if RegionData[1] != ':' : RegionData = mws.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) FileName = RegionData elif RegionData.upper() + 'fv' in ImageBinDict: if not Flag: GenFdsGlobalVariable.InfLogger(' Region Name = FV') FileName = ImageBinDict[RegionData.upper() + 'fv'] else: # # Generate FvImage. # FvObj = None if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict: FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[RegionData.upper()] if FvObj is not None : if not Flag: GenFdsGlobalVariable.InfLogger(' Region Name = FV') # # Call GenFv tool # self.BlockInfoOfRegion(BlockSizeList, FvObj) self.FvAddress = self.FvAddress + FvOffset FvAlignValue = GenFdsGlobalVariable.GetAlignment(FvObj.FvAlignment) if self.FvAddress % FvAlignValue != 0: EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment)) FvBuffer = BytesIO() FvBaseAddress = '0x%X' % self.FvAddress BlockSize = None BlockNum = None FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, Flag=Flag) if Flag: continue FvBufferLen = len(FvBuffer.getvalue()) if FvBufferLen > Size: FvBuffer.close() EdkLogger.error("GenFds", GENFDS_ERROR, "Size of FV (%s) is larger than Region Size 0x%X specified." % (RegionData, Size)) # # Put the generated image into FD buffer. # Buffer.write(FvBuffer.getvalue()) FvBuffer.close() FvOffset = FvOffset + FvBufferLen Size = Size - FvBufferLen continue else: EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (RegionData)) # # Add the exist Fv image into FD buffer # if not Flag: if FileName is not None: FileLength = os.stat(FileName)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size of FV File (%s) is larger than Region Size 0x%X specified." \ % (RegionData, Size)) BinFile = open(FileName, 'rb') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # if not Flag: self.PadBuffer(Buffer, ErasePolarity, Size) if self.RegionType == 'CAPSULE': # # Get Capsule from Capsule Dict # for RegionData in self.RegionDataList: if RegionData.endswith(".cap"): RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict) GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s' % RegionData) if RegionData[1] != ':' : RegionData = mws.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) FileName = RegionData elif RegionData.upper() + 'cap' in ImageBinDict: GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE') FileName = ImageBinDict[RegionData.upper() + 'cap'] else: # # Generate Capsule image and Put it into FD buffer # CapsuleObj = None if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict: CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[RegionData.upper()] if CapsuleObj is not None : CapsuleObj.CapsuleName = RegionData.upper() GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE') # # Call GenFv tool to generate Capsule Image # FileName = CapsuleObj.GenCapsule() CapsuleObj.CapsuleName = None else: EdkLogger.error("GenFds", GENFDS_ERROR, "Capsule (%s) is NOT described in FDF file!" % (RegionData)) # # Add the capsule image into FD buffer # FileLength = os.stat(FileName)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \ % (FileLength, RegionData, Size)) BinFile = open(FileName, 'rb') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # self.PadBuffer(Buffer, ErasePolarity, Size) if self.RegionType in ('FILE', 'INF'): for RegionData in self.RegionDataList: if self.RegionType == 'INF': RegionData.__InfParse__(None) if len(RegionData.BinFileList) != 1: EdkLogger.error('GenFds', GENFDS_ERROR, 'INF in FD region can only contain one binary: %s' % RegionData) File = RegionData.BinFileList[0] RegionData = RegionData.PatchEfiFile(File.Path, File.Type) else: RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict) if RegionData[1] != ':' : RegionData = mws.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData) if not os.path.exists(RegionData): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData) # # Add the file image into FD buffer # FileLength = os.stat(RegionData)[ST_SIZE] if FileLength > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size of File (%s) is larger than Region Size 0x%X specified." \ % (RegionData, Size)) GenFdsGlobalVariable.InfLogger(' Region File Name = %s' % RegionData) BinFile = open(RegionData, 'rb') Buffer.write(BinFile.read()) BinFile.close() Size = Size - FileLength # # Pad the left buffer # self.PadBuffer(Buffer, ErasePolarity, Size) if self.RegionType == 'DATA' : GenFdsGlobalVariable.InfLogger(' Region Name = DATA') DataSize = 0 for RegionData in self.RegionDataList: Data = RegionData.split(',') DataSize = DataSize + len(Data) if DataSize > Size: EdkLogger.error("GenFds", GENFDS_ERROR, "Size of DATA is larger than Region Size ") else: for item in Data : Buffer.write(pack('B', int(item, 16))) Size = Size - DataSize # # Pad the left buffer # self.PadBuffer(Buffer, ErasePolarity, Size) if self.RegionType is None: GenFdsGlobalVariable.InfLogger(' Region Name = None') self.PadBuffer(Buffer, ErasePolarity, Size)
def WorkspaceFile(WorkspaceDir, Filename): return mws.join(NormPath(WorkspaceDir), NormPath(Filename))
def GetDistributionPackage(self, WorkspaceDir, PackageList, ModuleList): # Backup WorkspaceDir Root = WorkspaceDir # # Get Packages # if PackageList: for PackageFile in PackageList: PackageFileFullPath = mws.join(Root, PackageFile) WorkspaceDir = mws.getWs(Root, PackageFile) DecObj = DecPomAlignment(PackageFileFullPath, WorkspaceDir, CheckMulDec=True) PackageObj = DecObj # # Parser inf file one bye one # ModuleInfFileList = PackageObj.GetModuleFileList() for File in ModuleInfFileList: WsRelPath = os.path.join(PackageObj.GetPackagePath(), File) WsRelPath = os.path.normpath(WsRelPath) if ModuleList and WsRelPath in ModuleList: Logger.Error("UPT", OPTION_VALUE_INVALID, ST.ERR_NOT_STANDALONE_MODULE_ERROR%\ (WsRelPath, PackageFile)) Filename = os.path.normpath\ (os.path.join(PackageObj.GetRelaPath(), File)) os.path.splitext(Filename) # # Call INF parser to generate Inf Object. # Actually, this call is not directly call, but wrapped by # Inf class in InfPomAlignment. # try: ModuleObj = InfPomAlignment( Filename, WorkspaceDir, PackageObj.GetPackagePath()) # # Add module to package # ModuleDict = PackageObj.GetModuleDict() ModuleDict[(ModuleObj.GetGuid(), \ ModuleObj.GetVersion(), \ ModuleObj.GetName(), \ ModuleObj.GetCombinePath())] = ModuleObj PackageObj.SetModuleDict(ModuleDict) except FatalError as ErrCode: if ErrCode.message == EDK1_INF_ERROR: Logger.Warn("UPT", ST.WRN_EDK1_INF_FOUND % Filename) else: raise self.PackageSurfaceArea\ [(PackageObj.GetGuid(), PackageObj.GetVersion(), \ PackageObj.GetCombinePath())] = PackageObj # # Get Modules # if ModuleList: for ModuleFile in ModuleList: ModuleFileFullPath = mws.join(Root, ModuleFile) WorkspaceDir = mws.getWs(Root, ModuleFile) try: ModuleObj = InfPomAlignment(ModuleFileFullPath, WorkspaceDir) ModuleKey = (ModuleObj.GetGuid(), ModuleObj.GetVersion(), ModuleObj.GetName(), ModuleObj.GetCombinePath()) self.ModuleSurfaceArea[ModuleKey] = ModuleObj except FatalError as ErrCode: if ErrCode.message == EDK1_INF_ERROR: Logger.Error("UPT", EDK1_INF_ERROR, ST.WRN_EDK1_INF_FOUND % ModuleFileFullPath, ExtraData=ST.ERR_NOT_SUPPORTED_SA_MODULE) else: raise # Recover WorkspaceDir WorkspaceDir = Root
def run(self): try: taskname = "Init" with self.file_lock: try: self.data_pipe = MemoryDataPipe() self.data_pipe.load(self.data_pipe_file_path) except: self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path) EdkLogger.LogClientInitialize(self.log_q) loglevel = self.data_pipe.Get("LogLevel") if not loglevel: loglevel = EdkLogger.INFO EdkLogger.SetLevel(loglevel) target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") archlist = self.data_pipe.Get("P_Info").get("ArchList") active_p = self.data_pipe.Get("P_Info").get("ActivePlatform") workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir") PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(workspacedir, PackagesPath) self.Wa = WorkSpaceInfo( workspacedir,active_p,target,toolchain,archlist ) self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp") GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines") GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines") GlobalData.gCommandMaxLength = self.data_pipe.Get('gCommandMaxLength') os.environ._data = self.data_pipe.Get("Env_Var") GlobalData.gWorkspace = workspacedir GlobalData.gDisableIncludePathCheck = False GlobalData.gFdfParser = self.data_pipe.Get("FdfParser") GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath") GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache") GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource") GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest") GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile") GlobalData.gModulePreMakeCacheStatus = dict() GlobalData.gModuleMakeCacheStatus = dict() GlobalData.gHashChainStatus = dict() GlobalData.gCMakeHashFile = dict() GlobalData.gModuleHashFile = dict() GlobalData.gFileHashDict = dict() GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread") GlobalData.gPlatformFinalPcds = self.data_pipe.Get("gPlatformFinalPcds") GlobalData.file_lock = self.file_lock CommandTarget = self.data_pipe.Get("CommandTarget") pcd_from_build_option = [] for pcd_tuple in self.data_pipe.Get("BuildOptPcd"): pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1])) if pcd_tuple[2].strip(): pcd_id = ".".join((pcd_id,pcd_tuple[2])) pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3]))) GlobalData.BuildOptionPcd = pcd_from_build_option module_count = 0 FfsCmd = self.data_pipe.Get("FfsCommand") if FfsCmd is None: FfsCmd = {} GlobalData.FfsCmd = FfsCmd PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"), self.data_pipe.Get("P_Info").get("WorkspaceDir")) while True: if self.error_event.is_set(): break module_count += 1 try: module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait() except Empty: EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty.")) time.sleep(0.01) continue if module_file is None: EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue.")) self.feedback_q.put("QueueEmpty") time.sleep(0.01) continue modulefullpath = os.path.join(module_root,module_file) taskname = " : ".join((modulefullpath,module_arch)) module_metafile = PathClass(module_file,module_root) if module_path: module_metafile.Path = module_path if module_basename: module_metafile.BaseName = module_basename if module_originalpath: module_metafile.OriginalPath = PathClass(module_originalpath,module_root) arch = module_arch target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe) Ma.IsLibrary = IsLib # SourceFileList calling sequence impact the makefile string sequence. # Create cached SourceFileList here to unify its calling sequence for both # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile. RetVal = Ma.SourceFileList if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]: try: CacheResult = Ma.CanSkipbyPreMakeCache() except: CacheResult = False self.feedback_q.put(taskname) if CacheResult: self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True)) continue else: self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False)) Ma.CreateCodeFile(False) Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[])) Ma.CreateAsBuiltInf() if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]: try: CacheResult = Ma.CanSkipbyMakeCache() except: CacheResult = False self.feedback_q.put(taskname) if CacheResult: self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True)) continue else: self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False)) except Exception as e: EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e))) self.feedback_q.put(taskname) finally: EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done")) self.feedback_q.put("Done") self.cache_q.put("CacheDone")
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile=False): OutputFileList = [] if Dict is None: Dict = {} if self.FvFileType is not None: FileList, IsSect = Section.Section.GetFileList( FfsInf, self.FvFileType, self.FvFileExtension) if IsSect: return FileList, self.Alignment Num = SecNum MaxFvAlignment = 0 for FvFileName in FileList: FvAlignmentValue = 0 if os.path.isfile(FvFileName): FvFileObj = open(FvFileName, 'rb') FvFileObj.seek(0) # PI FvHeader is 0x48 byte FvHeaderBuffer = FvFileObj.read(0x48) # FV alignment position. if isinstance(FvHeaderBuffer[0x2E], str): FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F) else: FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F) FvFileObj.close() if FvAlignmentValue > MaxFvAlignment: MaxFvAlignment = FvAlignmentValue OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get("FV_IMAGE")) GenFdsGlobalVariable.GenerateSection( OutputFile, [FvFileName], 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE', IsMakefile=IsMakefile) OutputFileList.append(OutputFile) # MaxFvAlignment is larger than or equal to 1K if MaxFvAlignment >= 0x400: if MaxFvAlignment >= 0x100000: #The max alignment supported by FFS is 16M. if MaxFvAlignment >= 0x1000000: self.Alignment = "16M" else: self.Alignment = str(MaxFvAlignment // 0x100000) + "M" else: self.Alignment = str(MaxFvAlignment // 0x400) + "K" else: # MaxFvAlignment is less than 1K self.Alignment = str(MaxFvAlignment) return OutputFileList, self.Alignment # # Generate Fv # if self.FvName is not None: Buffer = BytesIO() Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName) if Fv is not None: self.Fv = Fv if not self.FvAddr and self.Fv.BaseAddress: self.FvAddr = self.Fv.BaseAddress FvFileName = Fv.AddToBuffer(Buffer, self.FvAddr, MacroDict=Dict, Flag=IsMakefile) if Fv.FvAlignment is not None: if self.Alignment is None: self.Alignment = Fv.FvAlignment else: if GenFdsGlobalVariable.GetAlignment( Fv.FvAlignment ) > GenFdsGlobalVariable.GetAlignment(self.Alignment): self.Alignment = Fv.FvAlignment else: if self.FvFileName is not None: FvFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro( self.FvFileName) if os.path.isfile(FvFileName): FvFileObj = open(FvFileName, 'rb') FvFileObj.seek(0) # PI FvHeader is 0x48 byte FvHeaderBuffer = FvFileObj.read(0x48) # FV alignment position. if isinstance(FvHeaderBuffer[0x2E], str): FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E]) & 0x1F) else: FvAlignmentValue = 1 << (FvHeaderBuffer[0x2E] & 0x1F) # FvAlignmentValue is larger than or equal to 1K if FvAlignmentValue >= 0x400: if FvAlignmentValue >= 0x100000: #The max alignment supported by FFS is 16M. if FvAlignmentValue >= 0x1000000: self.Alignment = "16M" else: self.Alignment = str( FvAlignmentValue // 0x100000) + "M" else: self.Alignment = str( FvAlignmentValue // 0x400) + "K" else: # FvAlignmentValue is less than 1K self.Alignment = str(FvAlignmentValue) FvFileObj.close() else: if len(mws.getPkgPath()) == 0: EdkLogger.error( "GenFds", FILE_NOT_FOUND, "%s is not found in WORKSPACE: %s" % self.FvFileName, GenFdsGlobalVariable.WorkSpaceDir) else: EdkLogger.error( "GenFds", FILE_NOT_FOUND, "%s is not found in packages path:\n\t%s" % (self.FvFileName, '\n\t'.join( mws.getPkgPath()))) else: EdkLogger.error( "GenFds", GENFDS_ERROR, "FvImageSection Failed! %s NOT found in FDF" % self.FvName) # # Prepare the parameter of GenSection # OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get("FV_IMAGE")) GenFdsGlobalVariable.GenerateSection( OutputFile, [FvFileName], 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE', IsMakefile=IsMakefile) OutputFileList.append(OutputFile) return OutputFileList, self.Alignment
def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj): GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref") GuidXRefFile = [] PkgGuidDict = {} GuidDict = {} ModuleList = [] FileGuidList = [] VariableGuidSet = set() for Arch in ArchList: PlatformDataBase = BuildDb.BuildObject[ GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag] PkgList = GenFdsGlobalVariable.WorkSpace.GetPackageList( GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag) for P in PkgList: PkgGuidDict.update(P.Guids) for Name, Guid in PlatformDataBase.Pcds: Pcd = PlatformDataBase.Pcds[Name, Guid] if Pcd.Type in [TAB_PCDS_DYNAMIC_HII, TAB_PCDS_DYNAMIC_EX_HII]: for SkuId in Pcd.SkuInfoList: Sku = Pcd.SkuInfoList[SkuId] if Sku.VariableGuid in VariableGuidSet: continue VariableGuidSet.add(Sku.VariableGuid) if Sku.VariableGuid and Sku.VariableGuid in PkgGuidDict.keys( ): GuidDict[Sku.VariableGuid] = PkgGuidDict[ Sku.VariableGuid] for ModuleFile in PlatformDataBase.Modules: Module = BuildDb.BuildObject[ModuleFile, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag] if Module in ModuleList: continue else: ModuleList.append(Module) if GlobalData.gGuidPattern.match(ModuleFile.BaseName): GuidXRefFile.append("%s %s\n" % (ModuleFile.BaseName, Module.BaseName)) else: GuidXRefFile.append("%s %s\n" % (Module.Guid, Module.BaseName)) GuidDict.update(Module.Protocols) GuidDict.update(Module.Guids) GuidDict.update(Module.Ppis) for FvName in FdfParserObj.Profile.FvDict: for FfsObj in FdfParserObj.Profile.FvDict[FvName].FfsList: if not isinstance(FfsObj, FileStatement): InfPath = PathClass( NormPath( mws.join(GenFdsGlobalVariable.WorkSpaceDir, FfsObj.InfFileName))) FdfModule = BuildDb.BuildObject[ InfPath, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag] if FdfModule in ModuleList: continue else: ModuleList.append(FdfModule) GuidXRefFile.append( "%s %s\n" % (FdfModule.Guid, FdfModule.BaseName)) GuidDict.update(FdfModule.Protocols) GuidDict.update(FdfModule.Guids) GuidDict.update(FdfModule.Ppis) else: FileStatementGuid = FfsObj.NameGuid if FileStatementGuid in FileGuidList: continue else: FileGuidList.append(FileStatementGuid) Name = [] FfsPath = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs') FfsPath = glob( os.path.join(FfsPath, FileStatementGuid) + TAB_STAR) if not FfsPath: continue if not os.path.exists(FfsPath[0]): continue MatchDict = {} ReFileEnds = compile( '\S+(.ui)$|\S+(fv.sec.txt)$|\S+(.pe32.txt)$|\S+(.te.txt)$|\S+(.pic.txt)$|\S+(.raw.txt)$|\S+(.ffs.txt)$' ) FileList = os.listdir(FfsPath[0]) for File in FileList: Match = ReFileEnds.search(File) if Match: for Index in range(1, 8): if Match.group(Index) and Match.group( Index) in MatchDict: MatchDict[Match.group(Index)].append( File) elif Match.group(Index): MatchDict[Match.group(Index)] = [File] if not MatchDict: continue if '.ui' in MatchDict: for File in MatchDict['.ui']: with open(os.path.join(FfsPath[0], File), 'rb') as F: F.read() length = F.tell() F.seek(4) TmpStr = unpack( '%dh' % ((length - 4) // 2), F.read()) Name = ''.join(chr(c) for c in TmpStr[:-1]) else: FileList = [] if 'fv.sec.txt' in MatchDict: FileList = MatchDict['fv.sec.txt'] elif '.pe32.txt' in MatchDict: FileList = MatchDict['.pe32.txt'] elif '.te.txt' in MatchDict: FileList = MatchDict['.te.txt'] elif '.pic.txt' in MatchDict: FileList = MatchDict['.pic.txt'] elif '.raw.txt' in MatchDict: FileList = MatchDict['.raw.txt'] elif '.ffs.txt' in MatchDict: FileList = MatchDict['.ffs.txt'] else: pass for File in FileList: with open(os.path.join(FfsPath[0], File), 'r') as F: Name.append((F.read().split()[-1])) if not Name: continue Name = ' '.join(Name) if isinstance(Name, type( [])) else Name GuidXRefFile.append("%s %s\n" % (FileStatementGuid, Name)) # Append GUIDs, Protocols, and PPIs to the Xref file GuidXRefFile.append("\n") for key, item in GuidDict.items(): GuidXRefFile.append( "%s %s\n" % (GuidStructureStringToGuidString(item).upper(), key)) if GuidXRefFile: GuidXRefFile = ''.join(GuidXRefFile) SaveFileOnChange(GuidXRefFileName, GuidXRefFile, False) GenFdsGlobalVariable.InfLogger( "\nGUID cross reference file can be found at %s" % GuidXRefFileName) elif os.path.exists(GuidXRefFileName): os.remove(GuidXRefFileName)
def GenFdsApi(FdsCommandDict, WorkSpaceDataBase=None): global Workspace Workspace = "" ArchList = None ReturnCode = 0 resetFdsGlobalVariable() try: if FdsCommandDict.get("verbose"): EdkLogger.SetLevel(EdkLogger.VERBOSE) GenFdsGlobalVariable.VerboseMode = True if FdsCommandDict.get("FixedAddress"): GenFdsGlobalVariable.FixedLoadAddress = True if FdsCommandDict.get("quiet"): EdkLogger.SetLevel(EdkLogger.QUIET) if FdsCommandDict.get("debug"): EdkLogger.SetLevel(FdsCommandDict.get("debug") + 1) GenFdsGlobalVariable.DebugLevel = FdsCommandDict.get("debug") else: EdkLogger.SetLevel(EdkLogger.INFO) if not FdsCommandDict.get("Workspace", os.environ.get('WORKSPACE')): EdkLogger.error( "GenFds", OPTION_MISSING, "WORKSPACE not defined", ExtraData= "Please use '-w' switch to pass it or set the WORKSPACE environment variable." ) elif not os.path.exists( FdsCommandDict.get("Workspace", os.environ.get('WORKSPACE'))): EdkLogger.error( "GenFds", PARAMETER_INVALID, "WORKSPACE is invalid", ExtraData= "Please use '-w' switch to pass it or set the WORKSPACE environment variable." ) else: Workspace = os.path.normcase( FdsCommandDict.get("Workspace", os.environ.get('WORKSPACE'))) GenFdsGlobalVariable.WorkSpaceDir = Workspace if FdsCommandDict.get("debug"): GenFdsGlobalVariable.VerboseLogger("Using Workspace:" + Workspace) if FdsCommandDict.get("GenfdsMultiThread"): GenFdsGlobalVariable.EnableGenfdsMultiThread = True else: GenFdsGlobalVariable.EnableGenfdsMultiThread = False os.chdir(GenFdsGlobalVariable.WorkSpaceDir) # set multiple workspace PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(GenFdsGlobalVariable.WorkSpaceDir, PackagesPath) if FdsCommandDict.get("fdf_file"): FdfFilename = FdsCommandDict.get("fdf_file")[0].Path FdfFilename = GenFdsGlobalVariable.ReplaceWorkspaceMacro( FdfFilename) if FdfFilename[0:2] == '..': FdfFilename = os.path.abspath(FdfFilename) if not os.path.isabs(FdfFilename): FdfFilename = mws.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename) if not os.path.exists(FdfFilename): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename) GenFdsGlobalVariable.FdfFile = FdfFilename GenFdsGlobalVariable.FdfFileTimeStamp = os.path.getmtime( FdfFilename) else: EdkLogger.error("GenFds", OPTION_MISSING, "Missing FDF filename") if FdsCommandDict.get("build_target"): GenFdsGlobalVariable.TargetName = FdsCommandDict.get( "build_target") if FdsCommandDict.get("toolchain_tag"): GenFdsGlobalVariable.ToolChainTag = FdsCommandDict.get( "toolchain_tag") if FdsCommandDict.get("active_platform"): ActivePlatform = FdsCommandDict.get("active_platform") ActivePlatform = GenFdsGlobalVariable.ReplaceWorkspaceMacro( ActivePlatform) if ActivePlatform[0:2] == '..': ActivePlatform = os.path.abspath(ActivePlatform) if not os.path.isabs(ActivePlatform): ActivePlatform = mws.join(GenFdsGlobalVariable.WorkSpaceDir, ActivePlatform) if not os.path.exists(ActivePlatform): EdkLogger.error("GenFds", FILE_NOT_FOUND, "ActivePlatform doesn't exist!") else: EdkLogger.error("GenFds", OPTION_MISSING, "Missing active platform") GenFdsGlobalVariable.ActivePlatform = PathClass( NormPath(ActivePlatform)) if FdsCommandDict.get("conf_directory"): # Get alternate Conf location, if it is absolute, then just use the absolute directory name ConfDirectoryPath = os.path.normpath( FdsCommandDict.get("conf_directory")) if ConfDirectoryPath.startswith('"'): ConfDirectoryPath = ConfDirectoryPath[1:] if ConfDirectoryPath.endswith('"'): ConfDirectoryPath = ConfDirectoryPath[:-1] if not os.path.isabs(ConfDirectoryPath): # Since alternate directory name is not absolute, the alternate directory is located within the WORKSPACE # This also handles someone specifying the Conf directory in the workspace. Using --conf=Conf ConfDirectoryPath = os.path.join( GenFdsGlobalVariable.WorkSpaceDir, ConfDirectoryPath) else: if "CONF_PATH" in os.environ: ConfDirectoryPath = os.path.normcase(os.environ["CONF_PATH"]) else: # Get standard WORKSPACE/Conf, use the absolute path to the WORKSPACE/Conf ConfDirectoryPath = mws.join(GenFdsGlobalVariable.WorkSpaceDir, 'Conf') GenFdsGlobalVariable.ConfDir = ConfDirectoryPath if not GlobalData.gConfDirectory: GlobalData.gConfDirectory = GenFdsGlobalVariable.ConfDir BuildConfigurationFile = os.path.normpath( os.path.join(ConfDirectoryPath, gDefaultTargetTxtFile)) if os.path.isfile(BuildConfigurationFile) == True: # if no build target given in command line, get it from target.txt TargetObj = TargetTxtDict() TargetTxt = TargetObj.Target if not GenFdsGlobalVariable.TargetName: BuildTargetList = TargetTxt.TargetTxtDictionary[ TAB_TAT_DEFINES_TARGET] if len(BuildTargetList) != 1: EdkLogger.error( "GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for Target.") GenFdsGlobalVariable.TargetName = BuildTargetList[0] # if no tool chain given in command line, get it from target.txt if not GenFdsGlobalVariable.ToolChainTag: ToolChainList = TargetTxt.TargetTxtDictionary[ TAB_TAT_DEFINES_TOOL_CHAIN_TAG] if ToolChainList is None or len(ToolChainList) == 0: EdkLogger.error( "GenFds", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build." ) if len(ToolChainList) != 1: EdkLogger.error( "GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for ToolChain.") GenFdsGlobalVariable.ToolChainTag = ToolChainList[0] else: EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=BuildConfigurationFile) #Set global flag for build mode GlobalData.gIgnoreSource = FdsCommandDict.get("IgnoreSources") if FdsCommandDict.get("macro"): for Pair in FdsCommandDict.get("macro"): if Pair.startswith('"'): Pair = Pair[1:] if Pair.endswith('"'): Pair = Pair[:-1] List = Pair.split('=') if len(List) == 2: if not List[1].strip(): EdkLogger.error( "GenFds", OPTION_VALUE_INVALID, ExtraData="No Value given for Macro %s" % List[0]) if List[0].strip() in ["WORKSPACE", "TARGET", "TOOLCHAIN"]: GlobalData.gGlobalDefines[ List[0].strip()] = List[1].strip() else: GlobalData.gCommandLineDefines[ List[0].strip()] = List[1].strip() else: GlobalData.gCommandLineDefines[List[0].strip()] = "TRUE" os.environ["WORKSPACE"] = Workspace # Use the -t and -b option as gGlobalDefines's TOOLCHAIN and TARGET if they are not defined if "TARGET" not in GlobalData.gGlobalDefines: GlobalData.gGlobalDefines[ "TARGET"] = GenFdsGlobalVariable.TargetName if "TOOLCHAIN" not in GlobalData.gGlobalDefines: GlobalData.gGlobalDefines[ "TOOLCHAIN"] = GenFdsGlobalVariable.ToolChainTag if "TOOL_CHAIN_TAG" not in GlobalData.gGlobalDefines: GlobalData.gGlobalDefines[ 'TOOL_CHAIN_TAG'] = GenFdsGlobalVariable.ToolChainTag """call Workspace build create database""" GlobalData.gDatabasePath = os.path.normpath( os.path.join(ConfDirectoryPath, GlobalData.gDatabasePath)) if WorkSpaceDataBase: BuildWorkSpace = WorkSpaceDataBase else: BuildWorkSpace = WorkspaceDatabase() # # Get files real name in workspace dir # GlobalData.gAllFiles = DirCache(Workspace) GlobalData.gWorkspace = Workspace if FdsCommandDict.get("build_architecture_list"): ArchList = FdsCommandDict.get("build_architecture_list").split(',') else: ArchList = BuildWorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, TAB_COMMON, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].SupArchList TargetArchList = set(BuildWorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, TAB_COMMON, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].SupArchList) & set(ArchList) if len(TargetArchList) == 0: EdkLogger.error( "GenFds", GENFDS_ERROR, "Target ARCH %s not in platform supported ARCH %s" % (str(ArchList), str(BuildWorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, TAB_COMMON].SupArchList))) for Arch in ArchList: GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = NormPath( BuildWorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, Arch, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].OutputDirectory) # assign platform name based on last entry in ArchList GenFdsGlobalVariable.PlatformName = BuildWorkSpace.BuildObject[ GenFdsGlobalVariable.ActivePlatform, ArchList[-1], FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].PlatformName if FdsCommandDict.get("platform_build_directory"): OutputDirFromCommandLine = GenFdsGlobalVariable.ReplaceWorkspaceMacro( FdsCommandDict.get("platform_build_directory")) if not os.path.isabs(OutputDirFromCommandLine): OutputDirFromCommandLine = os.path.join( GenFdsGlobalVariable.WorkSpaceDir, OutputDirFromCommandLine) for Arch in ArchList: GenFdsGlobalVariable.OutputDirDict[ Arch] = OutputDirFromCommandLine else: for Arch in ArchList: GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.join( GenFdsGlobalVariable.OutputDirFromDscDict[Arch], GenFdsGlobalVariable.TargetName + '_' + GenFdsGlobalVariable.ToolChainTag) for Key in GenFdsGlobalVariable.OutputDirDict: OutputDir = GenFdsGlobalVariable.OutputDirDict[Key] if OutputDir[0:2] == '..': OutputDir = os.path.abspath(OutputDir) if OutputDir[1] != ':': OutputDir = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, OutputDir) if not os.path.exists(OutputDir): EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=OutputDir) GenFdsGlobalVariable.OutputDirDict[Key] = OutputDir """ Parse Fdf file, has to place after build Workspace as FDF may contain macros from DSC file """ if WorkSpaceDataBase: FdfParserObj = GlobalData.gFdfParser else: FdfParserObj = FdfParser(FdfFilename) FdfParserObj.ParseFile() if FdfParserObj.CycleReferenceCheck(): EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Cycle Reference Detected in FDF file") if FdsCommandDict.get("fd"): if FdsCommandDict.get( "fd")[0].upper() in FdfParserObj.Profile.FdDict: GenFds.OnlyGenerateThisFd = FdsCommandDict.get("fd")[0] else: EdkLogger.error( "GenFds", OPTION_VALUE_INVALID, "No such an FD in FDF file: %s" % FdsCommandDict.get("fd")[0]) if FdsCommandDict.get("fv"): if FdsCommandDict.get( "fv")[0].upper() in FdfParserObj.Profile.FvDict: GenFds.OnlyGenerateThisFv = FdsCommandDict.get("fv")[0] else: EdkLogger.error( "GenFds", OPTION_VALUE_INVALID, "No such an FV in FDF file: %s" % FdsCommandDict.get("fv")[0]) if FdsCommandDict.get("cap"): if FdsCommandDict.get( "cap")[0].upper() in FdfParserObj.Profile.CapsuleDict: GenFds.OnlyGenerateThisCap = FdsCommandDict.get("cap")[0] else: EdkLogger.error( "GenFds", OPTION_VALUE_INVALID, "No such a Capsule in FDF file: %s" % FdsCommandDict.get("cap")[0]) GenFdsGlobalVariable.WorkSpace = BuildWorkSpace if ArchList: GenFdsGlobalVariable.ArchList = ArchList # Dsc Build Data will handle Pcd Settings from CommandLine. """Modify images from build output if the feature of loading driver at fixed address is on.""" if GenFdsGlobalVariable.FixedLoadAddress: GenFds.PreprocessImage(BuildWorkSpace, GenFdsGlobalVariable.ActivePlatform) # Record the FV Region info that may specific in the FD if FdfParserObj.Profile.FvDict and FdfParserObj.Profile.FdDict: for FvObj in FdfParserObj.Profile.FvDict.values(): for FdObj in FdfParserObj.Profile.FdDict.values(): for RegionObj in FdObj.RegionList: if RegionObj.RegionType != BINARY_FILE_TYPE_FV: continue for RegionData in RegionObj.RegionDataList: if FvObj.UiFvName.upper() == RegionData.upper(): if not FvObj.BaseAddress: FvObj.BaseAddress = '0x%x' % ( int(FdObj.BaseAddress, 0) + RegionObj.Offset) if FvObj.FvRegionInFD: if FvObj.FvRegionInFD != RegionObj.Size: EdkLogger.error( "GenFds", FORMAT_INVALID, "The FV %s's region is specified in multiple FD with different value." % FvObj.UiFvName) else: FvObj.FvRegionInFD = RegionObj.Size RegionObj.BlockInfoOfRegion( FdObj.BlockSizeList, FvObj) """Call GenFds""" GenFds.GenFd('', FdfParserObj, BuildWorkSpace, ArchList) """Generate GUID cross reference file""" GenFds.GenerateGuidXRefFile(BuildWorkSpace, ArchList, FdfParserObj) """Display FV space info.""" GenFds.DisplayFvSpaceInfo(FdfParserObj) except Warning as X: EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False) ReturnCode = FORMAT_INVALID except FatalError as X: if FdsCommandDict.get("debug") is not None: import traceback EdkLogger.quiet(traceback.format_exc()) ReturnCode = X.args[0] except: import traceback EdkLogger.error( "\nPython", CODE_ERROR, "Tools code failure", ExtraData= "Please send email to %s for help, attaching following call stack trace!\n" % MSG_EDKII_MAIL_ADDR, RaiseError=False) EdkLogger.quiet(traceback.format_exc()) ReturnCode = CODE_ERROR finally: ClearDuplicatedInf() return ReturnCode
def Main(Options=None): if Options is None: Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND) try: DataBase = GlobalData.gDB ContentFileClosed = True WorkspaceDir = GlobalData.gWORKSPACE # # Init PackFileToCreate # if not Options.PackFileToCreate: Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND) # # Handle if the distribution package file already exists # CheckForExistingDp(Options.PackFileToCreate) # # Check package file existing and valid # CheckFileList('.DEC', Options.PackageFileList, ST.ERR_INVALID_PACKAGE_NAME, ST.ERR_INVALID_PACKAGE_PATH) # # Check module file existing and valid # CheckFileList('.INF', Options.ModuleFileList, ST.ERR_INVALID_MODULE_NAME, ST.ERR_INVALID_MODULE_PATH) # # Get list of files that installed with RePackage attribute available # RePkgDict = DataBase.GetRePkgDict() ContentFile = PackageFile(GlobalData.gCONTENT_FILE, "w") ContentFileClosed = False # # Add temp distribution header # if Options.PackageInformationDataFile: XmlFile = IniToXml(Options.PackageInformationDataFile) DistPkg = DistributionPackageXml().FromXml(XmlFile) remove(XmlFile) # # add distribution level tool/misc files # before pack, current dir should be workspace dir, else the full # path will be in the pack file # Cwd = getcwd() chdir(WorkspaceDir) ToolObject = DistPkg.Tools MiscObject = DistPkg.MiscellaneousFiles FileList = [] if ToolObject: FileList += ToolObject.GetFileList() if MiscObject: FileList += MiscObject.GetFileList() for FileObject in FileList: # # If you have unicode file names, please convert them to byte # strings in your desired encoding before passing them to # write(). # FromFile = os.path.normpath( FileObject.GetURI()).encode('utf_8') FileFullPath = mws.join(WorkspaceDir, FromFile) if FileFullPath in RePkgDict: (DpGuid, DpVersion, DpName, Repackage) = RePkgDict[FileFullPath] if not Repackage: Logger.Error("\nMkPkg", UPT_REPKG_ERROR, ST.ERR_UPT_REPKG_ERROR, ExtraData=ST.MSG_REPKG_CONFLICT %\ (FileFullPath, DpGuid, DpVersion, DpName) ) else: DistPkg.Header.RePackage = True ContentFile.PackFile(FromFile) chdir(Cwd) # # Add init dp information # else: DistPkg = DistributionPackageClass() DistPkg.Header.Name = 'Distribution Package' DistPkg.Header.Guid = str(uuid4()) DistPkg.Header.Version = '1.0' DistPkg.GetDistributionPackage(WorkspaceDir, Options.PackageFileList, \ Options.ModuleFileList) FileList, MetaDataFileList = DistPkg.GetDistributionFileList() for File in FileList + MetaDataFileList: FileFullPath = os.path.normpath(os.path.join(WorkspaceDir, File)) # # check whether file was included in a distribution that can not # be repackaged # if FileFullPath in RePkgDict: (DpGuid, DpVersion, DpName, Repackage) = RePkgDict[FileFullPath] if not Repackage: Logger.Error("\nMkPkg", UPT_REPKG_ERROR, ST.ERR_UPT_REPKG_ERROR, ExtraData = \ ST.MSG_REPKG_CONFLICT %(FileFullPath, DpName, \ DpGuid, DpVersion) ) else: DistPkg.Header.RePackage = True Cwd = getcwd() chdir(WorkspaceDir) ContentFile.PackFiles(FileList) chdir(Cwd) Logger.Verbose(ST.MSG_COMPRESS_DISTRIBUTION_PKG) ContentFile.Close() ContentFileClosed = True # # Add Md5Signature # DistPkg.Header.Signature = md5(open(str(ContentFile), 'rb').read()).hexdigest() # # Add current Date # DistPkg.Header.Date = str(strftime("%Y-%m-%dT%H:%M:%S", localtime())) # # Finish final dp file # DistPkgFile = PackageFile(Options.PackFileToCreate, "w") DistPkgFile.PackFile(str(ContentFile)) DistPkgXml = DistributionPackageXml() DistPkgFile.PackData(DistPkgXml.ToXml(DistPkg), GlobalData.gDESC_FILE) DistPkgFile.Close() Logger.Quiet(ST.MSG_FINISH) ReturnCode = 0 except FatalError as XExcept: ReturnCode = XExcept.args[0] if Logger.GetLevel() <= Logger.DEBUG_9: Logger.Quiet(ST.MSG_PYTHON_ON % \ (python_version(), platform) + format_exc()) except KeyboardInterrupt: ReturnCode = ABORT_ERROR if Logger.GetLevel() <= Logger.DEBUG_9: Logger.Quiet(ST.MSG_PYTHON_ON % \ (python_version(), platform) + format_exc()) except OSError: pass except: Logger.Error( "\nMkPkg", CODE_ERROR, ST.ERR_UNKNOWN_FATAL_CREATING_ERR % \ Options.PackFileToCreate, ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR, RaiseError=False ) Logger.Quiet(ST.MSG_PYTHON_ON % \ (python_version(), platform) + format_exc()) ReturnCode = CODE_ERROR finally: if os.path.exists(GlobalData.gCONTENT_FILE): if not ContentFileClosed: ContentFile.Close() os.remove(GlobalData.gCONTENT_FILE) return ReturnCode
def IncludeToolDefFile(self, FileName): FileContent = [] if os.path.isfile(FileName): try: F = open(FileName, 'r') FileContent = F.readlines() except: EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName) else: EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=FileName) for Index in range(len(FileContent)): Line = FileContent[Index].strip() if Line == "" or Line[0] == '#': continue if Line.startswith("!include"): IncFile = Line[8:].strip() Done, IncFile = self.ExpandMacros(IncFile) if not Done: EdkLogger.error( "tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE, "Macro or Environment has not been defined", ExtraData=IncFile[4:-1], File=FileName, Line=Index + 1) IncFile = NormPath(IncFile) if not os.path.isabs(IncFile): # # try WORKSPACE # IncFileTmp = PathClass(IncFile, GlobalData.gWorkspace) ErrorCode = IncFileTmp.Validate()[0] if ErrorCode != 0: # # try PACKAGES_PATH # IncFileTmp = mws.join(GlobalData.gWorkspace, IncFile) if not os.path.exists(IncFileTmp): # # try directory of current file # IncFileTmp = PathClass(IncFile, os.path.dirname(FileName)) ErrorCode = IncFileTmp.Validate()[0] if ErrorCode != 0: EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=IncFile) if isinstance(IncFileTmp, PathClass): IncFile = IncFileTmp.Path else: IncFile = IncFileTmp self.IncludeToolDefFile(IncFile) continue NameValuePair = Line.split("=", 1) if len(NameValuePair) != 2: EdkLogger.warn( "tools_def.txt parser", "Line %d: not correct assignment statement, skipped" % (Index + 1)) continue Name = NameValuePair[0].strip() Value = NameValuePair[1].strip() if Name == "IDENTIFIER": EdkLogger.debug( EdkLogger.DEBUG_8, "Line %d: Found identifier statement, skipped: %s" % ((Index + 1), Value)) continue MacroDefinition = gMacroDefPattern.findall(Name) if MacroDefinition != []: Done, Value = self.ExpandMacros(Value) if not Done: EdkLogger.error( "tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE, "Macro or Environment has not been defined", ExtraData=Value[4:-1], File=FileName, Line=Index + 1) MacroName = MacroDefinition[0].strip() self.MacroDictionary["DEF(%s)" % MacroName] = Value EdkLogger.debug( EdkLogger.DEBUG_8, "Line %d: Found macro: %s = %s" % ((Index + 1), MacroName, Value)) continue Done, Value = self.ExpandMacros(Value) if not Done: EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE, "Macro or Environment has not been defined", ExtraData=Value[4:-1], File=FileName, Line=Index + 1) List = Name.split('_') if len(List) != 5: EdkLogger.verbose( "Line %d: Not a valid name of definition: %s" % ((Index + 1), Name)) continue elif List[4] == TAB_STAR: EdkLogger.verbose( "Line %d: '*' is not allowed in last field: %s" % ((Index + 1), Name)) continue else: self.ToolsDefTxtDictionary[Name] = Value if List[0] != TAB_STAR: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] += [ List[0] ] if List[1] != TAB_STAR: self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_TOOL_CHAIN_TAG] += [List[1]] if List[2] != TAB_STAR: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] += [ List[2] ] if List[3] != TAB_STAR: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] += [ List[3] ] if List[4] == TAB_TOD_DEFINES_FAMILY and List[ 2] == TAB_STAR and List[3] == TAB_STAR: if TAB_TOD_DEFINES_FAMILY not in self.ToolsDefTxtDatabase: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {} self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][ List[1]] = Value self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_BUILDRULEFAMILY] = {} self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value elif List[1] not in self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_FAMILY]: self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][ List[1]] = Value self.ToolsDefTxtDatabase[ TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value elif self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][ List[1]] != Value: EdkLogger.verbose( "Line %d: No override allowed for the family of a tool chain: %s" % ((Index + 1), Name)) if List[4] == TAB_TOD_DEFINES_BUILDRULEFAMILY and List[ 2] == TAB_STAR and List[3] == TAB_STAR: if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolsDefTxtDatabase \ or List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]: EdkLogger.verbose( "Line %d: The family is not specified, but BuildRuleFamily is specified for the tool chain: %s" % ((Index + 1), Name)) self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][ List[1]] = Value
def PackFiles(self, Files): for File in Files: Cwd = os.getcwd() os.chdir(mws.getWs(mws.WORKSPACE, File)) self.PackFile(File) os.chdir(Cwd)