Exemple #1
0
def TrimPreprocessedVfr(Source, Target):
    CreateDirectory(os.path.dirname(Target))

    try:
        with open(Source, "r") as File:
            Lines = File.readlines()
    except:
        EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
    # read whole file

    FoundTypedef = False
    Brace = 0
    TypedefStart = 0
    TypedefEnd = 0
    for Index in range(len(Lines)):
        Line = Lines[Index]
        # don't trim the lines from "formset" definition to the end of file
        if Line.strip() == 'formset':
            break

        if FoundTypedef == False and (Line.find('#line') == 0
                                      or Line.find('# ') == 0):
            # empty the line number directive if it's not aomong "typedef struct"
            Lines[Index] = "\n"
            continue

        if FoundTypedef == False and gTypedefPattern.search(Line) is None:
            # keep "#pragram pack" directive
            if gPragmaPattern.search(Line) is None:
                Lines[Index] = "\n"
            continue
        elif FoundTypedef == False:
            # found "typedef struct", keept its position and set a flag
            FoundTypedef = True
            TypedefStart = Index

        # match { and } to find the end of typedef definition
        if Line.find("{") >= 0:
            Brace += 1
        elif Line.find("}") >= 0:
            Brace -= 1

        # "typedef struct" must end with a ";"
        if Brace == 0 and Line.find(";") >= 0:
            FoundTypedef = False
            TypedefEnd = Index
            # keep all "typedef struct" except to GUID, EFI_PLABEL and PAL_CALL_RETURN
            if Line.strip("} ;\r\n") in [
                    TAB_GUID, "EFI_PLABEL", "PAL_CALL_RETURN"
            ]:
                for i in range(TypedefStart, TypedefEnd + 1):
                    Lines[i] = "\n"

    # save all lines trimmed
    try:
        with open(Target, 'w') as File:
            File.writelines(Lines)
    except:
        EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
Exemple #2
0
def TrimAslFile(Source, Target, IncludePathFile, AslDeps=False):
    CreateDirectory(os.path.dirname(Target))

    SourceDir = os.path.dirname(Source)
    if SourceDir == '':
        SourceDir = '.'

    #
    # Add source directory as the first search directory
    #
    IncludePathList = [SourceDir]

    #
    # If additional include path file is specified, append them all
    # to the search directory list.
    #
    if IncludePathFile:
        try:
            LineNum = 0
            with open(IncludePathFile, 'r') as File:
                FileLines = File.readlines()
            for Line in FileLines:
                LineNum += 1
                if Line.startswith("/I") or Line.startswith("-I"):
                    IncludePathList.append(Line[2:].strip())
                else:
                    EdkLogger.warn(
                        "Trim", "Invalid include line in include list file.",
                        IncludePathFile, LineNum)
        except:
            EdkLogger.error("Trim",
                            FILE_OPEN_FAILURE,
                            ExtraData=IncludePathFile)
    AslIncludes = []
    Lines = DoInclude(Source,
                      '',
                      IncludePathList,
                      IncludeFileList=AslIncludes,
                      filetype='ASL')
    AslIncludes = [item for item in AslIncludes if item != Source]
    SaveFileOnChange(
        os.path.join(os.path.dirname(Target), os.path.basename(Source)) +
        ".trim.deps", " \\\n".join([Source + ":"] + AslIncludes), False)

    #
    # Undef MIN and MAX to avoid collision in ASL source code
    #
    Lines.insert(0, "#undef MIN\n#undef MAX\n")

    # save all lines trimmed
    try:
        with open(Target, 'w') as File:
            File.writelines(Lines)
    except:
        EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
Exemple #3
0
    def RWFile(self, CommentCharacter, KeySplitCharacter, Num):
        try:
            fr = open(self.FileName, 'r')
            fw = open(
                os.path.normpath(
                    os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), 'w')

            existKeys = []
            for Line in fr:
                if Line.startswith(CommentCharacter) or Line.strip() == '':
                    fw.write(Line)
                else:
                    LineList = Line.split(KeySplitCharacter, 1)
                    if len(LineList) >= 2:
                        Key = LineList[0].strip()
                        if Key.startswith(
                                CommentCharacter
                        ) == False and Key in self.TargetTxtDictionary:
                            if Key not in existKeys:
                                existKeys.append(Key)
                            else:
                                print(
                                    "Warning: Found duplicate key item in original configuration files!"
                                )

                            if Num == 0:
                                Line = "%-30s = \n" % Key
                            else:
                                ret = GetConfigureKeyValue(self, Key)
                                if ret is not None:
                                    Line = ret
                            fw.write(Line)
            for key in self.TargetTxtDictionary:
                if key not in existKeys:
                    print(
                        "Warning: %s does not exist in original configuration file"
                        % key)
                    Line = GetConfigureKeyValue(self, key)
                    if Line is None:
                        Line = "%-30s = " % key
                    fw.write(Line)

            fr.close()
            fw.close()
            os.remove(self.FileName)
            os.rename(
                os.path.normpath(
                    os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')),
                self.FileName)

        except:
            last_type, last_value, last_tb = sys.exc_info()
            traceback.print_exception(last_type, last_value, last_tb)
Exemple #4
0
 def __init__(self, ReportName = 'Report.html', FvObj = None, DispatchName=None):
     self.ReportName = ReportName
     self.Op = open(ReportName, 'w+')
     self.DispatchList = None
     if DispatchName:
         self.DispatchList = open(DispatchName, 'w+')
     self.FvObj = FvObj
     self.FfsIndex = 0
     self.PpiIndex = 0
     self.ProtocolIndex = 0
     if EotGlobalData.gMACRO['EFI_SOURCE'] == '':
         EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gMACRO['EDK_SOURCE']
def parsePcdInfoFromMapFile(mapfilepath, efifilepath):
    """ Parse map file to get binary patch pcd information
    @param path    Map file absolution path

    @return a list which element hold (PcdName, Offset, SectionName)
    """
    lines = []
    try:
        f = open(mapfilepath, 'r')
        lines = f.readlines()
        f.close()
    except:
        return None

    if len(lines) == 0: return None
    firstline = lines[0].strip()
    if re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$',
                firstline):
        return _parseForXcodeAndClang9(lines, efifilepath)
    if (firstline.startswith("Archive member included ")
            and firstline.endswith(" file (symbol)")):
        return _parseForGCC(lines, efifilepath)
    if firstline.startswith("# Path:"):
        return _parseForXcodeAndClang9(lines, efifilepath)
    return _parseGeneral(lines, efifilepath)
Exemple #6
0
    def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile = False):
        #
        # Prepare the parameter of GenSection
        #
        if FfsInf is not None:
            self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
            self.StringData = FfsInf.__ExtendMacro__(self.StringData)
            self.FileName = FfsInf.__ExtendMacro__(self.FileName)

        OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get(BINARY_FILE_TYPE_UI))

        if self.StringData is not None :
            NameString = self.StringData
        elif self.FileName is not None:
            if Dict is None:
                Dict = {}
            FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
            FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
            FileObj = open(FileNameStr, 'r')
            NameString = FileObj.read()
            FileObj.close()
        else:
            NameString = ''
        GenFdsGlobalVariable.GenerateSection(OutputFile, None, 'EFI_SECTION_USER_INTERFACE', Ui=NameString, IsMakefile=IsMakefile)

        OutputFileList = []
        OutputFileList.append(OutputFile)
        return OutputFileList, self.Alignment
Exemple #7
0
def ParseMapFile(Files):
    AllMaps = {}
    CurrentModule = ''
    CurrentMaps = {}
    for File in Files:
        Content = open(File, 'r').readlines()
        for Line in Content:
            Line = CleanString(Line)
            # skip empty line
            if Line == '':
                continue

            if Line.find('(') > -1 and Line.find(')') > -1:
                if CurrentModule != '' and CurrentMaps != {}:
                    AllMaps[CurrentModule] = CurrentMaps
                CurrentModule = Line[:Line.find('(')]
                CurrentMaps = {}
                continue
            else:
                Name = ''
                Address = ''
                List = Line.split()
                Address = List[0]
                if List[1] == 'F' or List[1] == 'FS':
                    Name = List[2]
                else:
                    Name = List[1]
                CurrentMaps[Name] = Address
                continue

    return AllMaps
Exemple #8
0
 def ConvertTextFileToDict(self, FileName, CommentCharacter,
                           KeySplitCharacter):
     """Convert a text file to a dictionary of (name:value) pairs."""
     try:
         f = open(FileName, 'r')
         for Line in f:
             if Line.startswith(CommentCharacter) or Line.strip() == '':
                 continue
             LineList = Line.split(KeySplitCharacter, 1)
             if len(LineList) >= 2:
                 Key = LineList[0].strip()
                 if Key.startswith(
                         CommentCharacter
                 ) == False and Key in self.TargetTxtDictionary:
                     if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM or Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF \
                       or Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER \
                       or Key == TAB_TAT_DEFINES_ACTIVE_MODULE:
                         self.TargetTxtDictionary[Key] = LineList[
                             1].replace('\\', '/').strip()
                     elif Key == TAB_TAT_DEFINES_TARGET or Key == TAB_TAT_DEFINES_TARGET_ARCH \
                       or Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG or Key == TAB_TAT_DEFINES_BUILD_RULE_CONF:
                         self.TargetTxtDictionary[Key] = LineList[1].split()
         f.close()
         return 0
     except:
         last_type, last_value, last_tb = sys.exc_info()
         traceback.print_exception(last_type, last_value, last_tb)
Exemple #9
0
    def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
        F = None
        try:
            F = open(FileName, 'r')
            self.ConfDirectoryPath = os.path.dirname(FileName)
        except:
            EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
            if F is not None:
                F.close()

        for Line in F:
            Line = Line.strip()
            if Line.startswith(CommentCharacter) or Line == '':
                continue

            LineList = Line.split(KeySplitCharacter, 1)
            Key = LineList[0].strip()
            if len(LineList) == 2:
                Value = LineList[1].strip()
            else:
                Value = ""

            if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \
                       DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]:
                self.TargetTxtDictionary[Key] = Value.replace('\\', '/')
                if Key == DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.TargetTxtDictionary[Key]:
                    if self.TargetTxtDictionary[Key].startswith("Conf/"):
                        Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
                        if not os.path.exists(Tools_Def) or not os.path.isfile(Tools_Def):
                            # If Conf/Conf does not exist, try just the Conf/ directory
                            Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip())
                    else:
                        # The File pointed to by TOOL_CHAIN_CONF is not in a Conf/ directory
                        Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
                    self.TargetTxtDictionary[Key] = Tools_Def
                if Key == DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF and self.TargetTxtDictionary[Key]:
                    if self.TargetTxtDictionary[Key].startswith("Conf/"):
                        Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
                        if not os.path.exists(Build_Rule) or not os.path.isfile(Build_Rule):
                            # If Conf/Conf does not exist, try just the Conf/ directory
                            Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip())
                    else:
                        # The File pointed to by BUILD_RULE_CONF is not in a Conf/ directory
                        Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
                    self.TargetTxtDictionary[Key] = Build_Rule
            elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \
                         DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]:
                self.TargetTxtDictionary[Key] = Value.split()
            elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER:
                try:
                    V = int(Value, 0)
                except:
                    EdkLogger.error("build", FORMAT_INVALID, "Invalid number of [%s]: %s." % (Key, Value),
                                    File=FileName)
                self.TargetTxtDictionary[Key] = Value
            #elif Key not in GlobalData.gGlobalDefines:
            #    GlobalData.gGlobalDefines[Key] = Value

        F.close()
        return 0
    def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile = False):
        #
        # Prepare the parameter of GenSection
        #
        if FfsInf:
            self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
            self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
            self.StringData = FfsInf.__ExtendMacro__(self.StringData)
            self.FileName = FfsInf.__ExtendMacro__(self.FileName)

        OutputFile = os.path.join(OutputPath,
                                  ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get('VERSION'))
        OutputFile = os.path.normpath(OutputFile)

        # Get String Data
        StringData = ''
        if self.StringData:
            StringData = self.StringData
        elif self.FileName:
            if Dict is None:
                Dict = {}
            FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
            FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
            FileObj = open(FileNameStr, 'r')
            StringData = FileObj.read()
            StringData = '"' + StringData + '"'
            FileObj.close()
        GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
                                             Ver=StringData, BuildNumber=self.BuildNum, IsMakefile=IsMakefile)
        OutputFileList = []
        OutputFileList.append(OutputFile)
        return OutputFileList, self.Alignment
Exemple #11
0
    def ParseConfig(self):
        Filepath = os.path.normpath(self.Filename)
        if not os.path.isfile(Filepath):
            ErrorMsg = "Can't find configuration file '%s'" % Filepath
            EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)

        LineNo = 0
        for Line in open(Filepath, 'r'):
            LineNo = LineNo + 1
            Line = CleanString(Line)
            if Line != '':
                List = GetSplitValueList(Line, TAB_EQUAL_SPLIT)
                if List[0] not in _ConfigFileToInternalTranslation:
                    ErrorMsg = "Invalid configuration option '%s' was found" % List[0]
                    EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath, Line = LineNo)
                assert _ConfigFileToInternalTranslation[List[0]] in self.__dict__
                if List[0] == 'ModifierList':
                    List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
                if List[0] == 'MetaDataFileCheckPathOfGenerateFileList' and List[1] == "":
                    continue
                if List[0] == 'SkipDirList':
                    List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
                if List[0] == 'SkipFileList':
                    List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
                if List[0] == 'BinaryExtList':
                    List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
                if List[0] == 'Copyright':
                    List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
                if List[0] == 'TokenReleaceList':
                    List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
                self.__dict__[_ConfigFileToInternalTranslation[List[0]]] = List[1]
Exemple #12
0
def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
    VfrNameList = []
    if os.path.isdir(DebugDir):
        for CurrentDir, Dirs, Files in os.walk(DebugDir):
            for FileName in Files:
                Name, Ext = os.path.splitext(FileName)
                if Ext == '.c' and Name != 'AutoGen':
                    VfrNameList.append (Name + 'Bin')

    VfrNameList.append (ModuleName + 'Strings')

    EfiFileName = os.path.join(DebugDir, ModuleName + '.efi')
    MapFileName = os.path.join(DebugDir, ModuleName + '.map')
    VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrNameList)

    if not VfrUniOffsetList:
        return

    try:
        fInputfile = open(OutputFile, "wb+")
    except:
        EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None)

    # Use a instance of BytesIO to cache data
    fStringIO = BytesIO()

    for Item in VfrUniOffsetList:
        if (Item[0].find("Strings") != -1):
            #
            # UNI offset in image.
            # GUID + Offset
            # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
            #
            UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
            fStringIO.write(UniGuid)
            UniValue = pack ('Q', int (Item[1], 16))
            fStringIO.write (UniValue)
        else:
            #
            # VFR binary offset in image.
            # GUID + Offset
            # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
            #
            VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
            fStringIO.write(VfrGuid)
            type (Item[1])
            VfrValue = pack ('Q', int (Item[1], 16))
            fStringIO.write (VfrValue)

    #
    # write data into file.
    #
    try :
        fInputfile.write (fStringIO.getvalue())
    except:
        EdkLogger.error("Trim", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %OutputFile, None)

    fStringIO.close ()
    fInputfile.close ()
    def SetDir(OutputDir, FdfParser, WorkSpace, ArchList):
        GenFdsGlobalVariable.VerboseLogger(
            "GenFdsGlobalVariable.OutputDir:%s" % OutputDir)
        GenFdsGlobalVariable.FdfParser = FdfParser
        GenFdsGlobalVariable.WorkSpace = WorkSpace
        GenFdsGlobalVariable.FvDir = os.path.join(
            GenFdsGlobalVariable.OutputDirDict[ArchList[0]],
            DataType.TAB_FV_DIRECTORY)
        if not os.path.exists(GenFdsGlobalVariable.FvDir):
            os.makedirs(GenFdsGlobalVariable.FvDir)
        GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir,
                                                   'Ffs')
        if not os.path.exists(GenFdsGlobalVariable.FfsDir):
            os.makedirs(GenFdsGlobalVariable.FfsDir)

        #
        # Create FV Address inf file
        #
        GenFdsGlobalVariable.FvAddressFileName = os.path.join(
            GenFdsGlobalVariable.FfsDir, 'FvAddress.inf')
        FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w')
        #
        # Add [Options]
        #
        FvAddressFile.writelines("[options]" + DataType.TAB_LINE_BREAK)
        BsAddress = '0'
        for Arch in ArchList:
            if GenFdsGlobalVariable.WorkSpace.BuildObject[
                    GenFdsGlobalVariable.ActivePlatform, Arch,
                    GenFdsGlobalVariable.TargetName,
                    GenFdsGlobalVariable.ToolChainTag].BsBaseAddress:
                BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[
                    GenFdsGlobalVariable.ActivePlatform, Arch,
                    GenFdsGlobalVariable.TargetName,
                    GenFdsGlobalVariable.ToolChainTag].BsBaseAddress
                break

        FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \
                                       BsAddress + \
                                       DataType.TAB_LINE_BREAK)

        RtAddress = '0'
        for Arch in reversed(ArchList):
            temp = GenFdsGlobalVariable.WorkSpace.BuildObject[
                GenFdsGlobalVariable.ActivePlatform, Arch,
                GenFdsGlobalVariable.TargetName,
                GenFdsGlobalVariable.ToolChainTag].RtBaseAddress
            if temp:
                RtAddress = temp
                break

        FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \
                                       RtAddress + \
                                       DataType.TAB_LINE_BREAK)

        FvAddressFile.close()
def Main():
    EdkLogger.Initialize()
    Option, Input = GetOptions()

    # Set log level
    if Option.quiet:
        EdkLogger.SetLevel(EdkLogger.QUIET)
    elif Option.verbose:
        EdkLogger.SetLevel(EdkLogger.VERBOSE)
    elif Option.debug is not None:
        EdkLogger.SetLevel(Option.debug + 1)
    else:
        EdkLogger.SetLevel(EdkLogger.INFO)

    try:
        if Option.ModuleType is None or Option.ModuleType not in gType2Phase:
            EdkLogger.error("GenDepex", OPTION_MISSING,
                            "Module type is not specified or supported")

        DxsFile = ''
        if len(Input) > 0 and Option.Expression == "":
            DxsFile = Input[0]
            DxsString = open(DxsFile,
                             'r').read().replace("\n", " ").replace("\r", " ")
            DxsString = gStartClosePattern.sub("\\1", DxsString)
        elif Option.Expression != "":
            if Option.Expression[0] == '"':
                DxsString = Option.Expression[1:-1]
            else:
                DxsString = Option.Expression
        else:
            EdkLogger.error("GenDepex", OPTION_MISSING,
                            "No expression string or file given")

        Dpx = DependencyExpression(DxsString, Option.ModuleType,
                                   Option.Optimize)
        if Option.OutputFile is not None:
            FileChangeFlag = Dpx.Generate(Option.OutputFile)
            if not FileChangeFlag and DxsFile:
                #
                # Touch the output file if its time stamp is older than the original
                # DXS file to avoid re-invoke this tool for the dependency check in build rule.
                #
                if os.stat(DxsFile)[8] > os.stat(Option.OutputFile)[8]:
                    os.utime(Option.OutputFile, None)
        else:
            Dpx.Generate()
    except BaseException as X:
        EdkLogger.quiet("")
        if Option is not None and Option.debug is not None:
            EdkLogger.quiet(traceback.format_exc())
        else:
            EdkLogger.quiet(str(X))
        return 1

    return 0
Exemple #15
0
def DeCompress(Method, Input):
    # Write the input to a temp file
    open('_Temp.bin', 'wb').write(Input)
    cmd = ''
    if Method == 'Lzma':
        cmd = r'LzmaCompress -o _New.bin -d _Temp.bin'
    if Method == 'Efi':
        cmd = r'TianoCompress -d --uefi -o _New.bin _Temp.bin'
    if Method == 'Framework':
        cmd = r'TianoCompress -d -o _New.bin _Temp.bin'

    # Call tool to create the decompressed output file
    Process = subprocess.Popen(cmd,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.STDOUT)
    Process.communicate()[0]

    # Return the beffer of New.bin
    if os.path.exists('_New.bin'):
        return open('_New.bin', 'rb').read()
    def __init__(self, FileName):
        self.FileLinesList = []
        self.FileLinesListFromFile = []
        try:
            fsock = open(FileName, "r")
            try:
                self.FileLinesListFromFile = fsock.readlines()
            finally:
                fsock.close()

        except IOError:
            raise Warning("Error when opening file %s" % FileName)
def generatePcdTable(list, pcdpath):
    try:
        f = open(pcdpath, 'w')
    except:
        pass

    f.write('PCD Name                       Offset    Section Name\r\n')

    for pcditem in list:
        f.write('%-30s 0x%-08X %-6s\r\n' %
                (pcditem[0], pcditem[1], pcditem[2]))
    f.close()
Exemple #18
0
    def Read(self, FilePath):
        try:
            fd = open(FilePath, "r")
        except:
            EdkLogger.error("VpdInfoFile", BuildToolError.FILE_OPEN_FAILURE,
                            "Fail to open file %s for written." % FilePath)
        Lines = fd.readlines()
        for Line in Lines:
            Line = Line.strip()
            if len(Line) == 0 or Line.startswith("#"):
                continue

            #
            # the line must follow output format defined in BPDG spec.
            #
            try:
                PcdName, SkuId, Offset, Size, Value = Line.split("#")[0].split(
                    "|")
                PcdName, SkuId, Offset, Size, Value = PcdName.strip(
                ), SkuId.strip(), Offset.strip(), Size.strip(), Value.strip()
                TokenSpaceName, PcdTokenName = PcdName.split(".")
            except:
                EdkLogger.error(
                    "BPDG", BuildToolError.PARSER_ERROR,
                    "Fail to parse VPD information file %s" % FilePath)

            Found = False

            if (TokenSpaceName, PcdTokenName) not in self._VpdInfo:
                self._VpdInfo[(TokenSpaceName, PcdTokenName)] = {}
            self._VpdInfo[(TokenSpaceName, PcdTokenName)][(SkuId,
                                                           Offset)] = Value
            for VpdObject in self._VpdArray:
                VpdObjectTokenCName = VpdObject.TokenCName
                for PcdItem in GlobalData.MixedPcd:
                    if (VpdObject.TokenCName, VpdObject.TokenSpaceGuidCName
                        ) in GlobalData.MixedPcd[PcdItem]:
                        VpdObjectTokenCName = PcdItem[0]
                for sku in VpdObject.SkuInfoList:
                    if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObjectTokenCName == PcdTokenName.strip(
                    ) and sku == SkuId:
                        if self._VpdArray[VpdObject][sku] == TAB_STAR:
                            if Offset == TAB_STAR:
                                EdkLogger.error(
                                    "BPDG", BuildToolError.FORMAT_INVALID,
                                    "The offset of %s has not been fixed up by third-party BPDG tool."
                                    % PcdName)
                            self._VpdArray[VpdObject][sku] = Offset
                        Found = True
            if not Found:
                EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR,
                                "Can not find PCD defined in VPD guid file.")
Exemple #19
0
def PreProcess(Filename, MergeMultipleLines=True, LineNo=-1):
    Lines = []
    Filename = os.path.normpath(Filename)
    if not os.path.isfile(Filename):
        EdkLogger.error("Eot", EdkLogger.FILE_NOT_FOUND, ExtraData=Filename)

    IsFindBlockComment = False
    IsFindBlockCode = False
    ReservedLine = ''
    ReservedLineLength = 0
    for Line in open(Filename, 'r'):
        Line = Line.strip()
        # Remove comment block
        if Line.find(TAB_COMMENT_EDK_START) > -1:
            ReservedLine = GetSplitList(Line, TAB_COMMENT_EDK_START, 1)[0]
            IsFindBlockComment = True
        if Line.find(TAB_COMMENT_EDK_END) > -1:
            Line = ReservedLine + GetSplitList(Line, TAB_COMMENT_EDK_END, 1)[1]
            ReservedLine = ''
            IsFindBlockComment = False
        if IsFindBlockComment:
            Lines.append('')
            continue

        # Remove comments at tail and remove spaces again
        Line = CleanString(Line)
        if Line == '':
            Lines.append('')
            continue

        if MergeMultipleLines:
            # Add multiple lines to one line
            if IsFindBlockCode and Line[-1] != TAB_SLASH:
                ReservedLine = (ReservedLine + TAB_SPACE_SPLIT + Line).strip()
                Lines.append(ReservedLine)
                for Index in (0, ReservedLineLength):
                    Lines.append('')
                ReservedLine = ''
                ReservedLineLength = 0
                IsFindBlockCode = False
                continue
            if Line[-1] == TAB_SLASH:
                ReservedLine = ReservedLine + TAB_SPACE_SPLIT + Line[
                    0:-1].strip()
                ReservedLineLength = ReservedLineLength + 1
                IsFindBlockCode = True
                continue

        Lines.append(Line)

    return Lines
Exemple #20
0
def RaiseParserError(Line, Section, File, Format='', LineNo=-1):
    if LineNo == -1:
        LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line)
    ErrorMsg = "Invalid statement '%s' is found in section '%s'" % (Line,
                                                                    Section)
    if Format != '':
        Format = "Correct format is " + Format
    EdkLogger.error("Parser",
                    PARSER_ERROR,
                    ErrorMsg,
                    File=File,
                    Line=LineNo,
                    ExtraData=Format,
                    RaiseError=EdkLogger.IsRaiseError)
Exemple #21
0
    def __init__(self,
                 File=None,
                 Content=None,
                 LineIndex=0,
                 SupportedFamily=[TAB_COMPILER_MSFT, "INTEL", "GCC"]):
        self.RuleFile = File
        # Read build rules from file if it's not none
        if File is not None:
            try:
                self.RuleContent = open(File, 'r').readlines()
            except:
                EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
        elif Content is not None:
            self.RuleContent = Content
        else:
            EdkLogger.error("build",
                            PARAMETER_MISSING,
                            ExtraData="No rule file or string given")

        self.SupportedToolChainFamilyList = SupportedFamily
        self.RuleDatabase = tdict(
            True,
            4)  # {FileExt, ModuleType, Arch, Family : FileBuildRule object}
        self.Ext2FileType = {}  # {ext : file-type}
        self.FileTypeList = set()

        self._LineIndex = LineIndex
        self._State = ""
        self._RuleInfo = tdict(
            True, 2
        )  # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}}
        self._FileType = ''
        self._BuildTypeList = set()
        self._ArchList = set()
        self._FamilyList = []
        self._TotalToolChainFamilySet = set()
        self._RuleObjectList = []  # FileBuildRule object list
        self._FileVersion = ""

        self.Parse()

        # some intrinsic rules
        self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, TAB_COMMON, TAB_COMMON,
                          TAB_COMMON] = self._BinaryFileRule
        self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE)
    def ToCSV(self, Filename='Report.csv'):
        try:
            File = open(Filename, 'w+')
            File.write(
                """No, Error Code, Error Message, File, LineNo, Other Error Message\n"""
            )
            RecordSet = self.Query()
            Index = 0
            for Record in RecordSet:
                Index = Index + 1
                ErrorID = Record[1]
                OtherMsg = Record[2]
                BelongsToTable = Record[3]
                BelongsToItem = Record[4]
                IsCorrected = Record[5]
                SqlCommand = ''
                if BelongsToTable == 'File':
                    SqlCommand = """select 1, FullPath from %s where ID = %s
                             """ % (BelongsToTable, BelongsToItem)
                else:
                    SqlCommand = """select A.StartLine, B.FullPath from %s as A, File as B
                                    where A.ID = %s and B.ID = A.BelongsToFile
                                 """ % (BelongsToTable, BelongsToItem)
                NewRecord = self.Exec(SqlCommand)
                if NewRecord != []:
                    File.write("""%s,%s,"%s",%s,%s,"%s"\n""" %
                               (Index, ErrorID,
                                EccToolError.gEccErrorMessage[ErrorID],
                                NewRecord[0][1], NewRecord[0][0], OtherMsg))
                    EdkLogger.quiet(
                        "%s(%s): [%s]%s %s" %
                        (NewRecord[0][1], NewRecord[0][0], ErrorID,
                         EccToolError.gEccErrorMessage[ErrorID], OtherMsg))

            File.close()
        except IOError:
            NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv",
                                                    time.localtime())
            EdkLogger.warn(
                "ECC",
                "The report file %s is locked by other progress, use %s instead!"
                % (Filename, NewFilename))
            self.ToCSV(NewFilename)
Exemple #23
0
 def __init__(self, InputFileName, MapFileName, VpdFileName):
     self.InputFileName = InputFileName
     self.MapFileName = MapFileName
     self.VpdFileName = VpdFileName
     self.FileLinesList = []
     self.PcdFixedOffsetSizeList = []
     self.PcdUnknownOffsetList = []
     try:
         fInputfile = open(InputFileName, "r")
         try:
             self.FileLinesList = fInputfile.readlines()
         except:
             EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE,
                             "File read failed for %s" % InputFileName,
                             None)
         finally:
             fInputfile.close()
     except:
         EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE,
                         "File open failed for %s" % InputFileName, None)
Exemple #24
0
def CheckFileType(CheckFilename,
                  ExtName,
                  ContainerFilename,
                  SectionName,
                  Line,
                  LineNo=-1):
    if CheckFilename != '' and CheckFilename is not None:
        (Root, Ext) = os.path.splitext(CheckFilename)
        if Ext.upper() != ExtName.upper():
            ContainerFile = open(ContainerFilename, 'r').read()
            if LineNo == -1:
                LineNo = GetLineNo(ContainerFile, Line)
            ErrorMsg = "Invalid %s. '%s' is found, but '%s' file is needed" % (
                SectionName, CheckFilename, ExtName)
            EdkLogger.error("Parser",
                            PARSER_ERROR,
                            ErrorMsg,
                            Line=LineNo,
                            File=ContainerFilename,
                            RaiseError=EdkLogger.IsRaiseError)

    return True
Exemple #25
0
def CheckFileExist(WorkspaceDir,
                   CheckFilename,
                   ContainerFilename,
                   SectionName,
                   Line,
                   LineNo=-1):
    CheckFile = ''
    if CheckFilename != '' and CheckFilename is not None:
        CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
        if not os.path.isfile(CheckFile):
            ContainerFile = open(ContainerFilename, 'r').read()
            if LineNo == -1:
                LineNo = GetLineNo(ContainerFile, Line)
            ErrorMsg = "Can't find file '%s' defined in section '%s'" % (
                CheckFile, SectionName)
            EdkLogger.error("Parser",
                            PARSER_ERROR,
                            ErrorMsg,
                            File=ContainerFilename,
                            Line=LineNo,
                            RaiseError=EdkLogger.IsRaiseError)

    return CheckFile
def SearchString(UniObjectClass, FileList, IsCompatibleMode):
    if FileList == []:
        return UniObjectClass

    for File in FileList:
        try:
            if os.path.isfile(File):
                Lines = open(File, 'r')
                for Line in Lines:
                    for StrName in STRING_TOKEN.findall(Line):
                        EdkLogger.debug(EdkLogger.DEBUG_5,
                                        "Found string identifier: " + StrName)
                        UniObjectClass.SetStringReferenced(StrName)
        except:
            EdkLogger.error("UnicodeStringGather",
                            AUTOGEN_ERROR,
                            "SearchString: Error while processing file",
                            File=File,
                            RaiseError=False)
            raise

    UniObjectClass.ReToken()

    return UniObjectClass
Exemple #27
0
    def GenerateVpdFile(self, MapFileName, BinFileName):
        #Open an VPD file to process

        try:
            fVpdFile = open(BinFileName, "wb")
        except:
            # Open failed
            EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE,
                            "File open failed for %s" % self.VpdFileName, None)

        try:
            fMapFile = open(MapFileName, "w")
        except:
            # Open failed
            EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE,
                            "File open failed for %s" % self.MapFileName, None)

        # Use a instance of BytesIO to cache data
        fStringIO = BytesIO()

        # Write the header of map file.
        try:
            fMapFile.write(st.MAP_FILE_COMMENT_TEMPLATE + "\n")
        except:
            EdkLogger.error(
                "BPDG", BuildToolError.FILE_WRITE_FAILURE,
                "Write data to file %s failed, please check whether the file been locked or using by other applications."
                % self.MapFileName, None)

        for eachPcd in self.PcdFixedOffsetSizeList:
            # write map file
            try:
                fMapFile.write(
                    "%s | %s | %s | %s | %s  \n" %
                    (eachPcd.PcdCName, eachPcd.SkuId, eachPcd.PcdOffset,
                     eachPcd.PcdSize, eachPcd.PcdUnpackValue))
            except:
                EdkLogger.error(
                    "BPDG", BuildToolError.FILE_WRITE_FAILURE,
                    "Write data to file %s failed, please check whether the file been locked or using by other applications."
                    % self.MapFileName, None)

            # Write Vpd binary file
            fStringIO.seek(eachPcd.PcdBinOffset)
            if isinstance(eachPcd.PcdValue, list):
                for i in range(len(eachPcd.PcdValue)):
                    Value = eachPcd.PcdValue[i:i + 1]
                    if isinstance(bytes(Value), str):
                        fStringIO.write(chr(Value[0]))
                    else:
                        fStringIO.write(bytes(Value))
            else:
                fStringIO.write(eachPcd.PcdValue)

        try:
            fVpdFile.write(fStringIO.getvalue())
        except:
            EdkLogger.error(
                "BPDG", BuildToolError.FILE_WRITE_FAILURE,
                "Write data to file %s failed, please check whether the file been locked or using by other applications."
                % self.VpdFileName, None)

        fStringIO.close()
        fVpdFile.close()
        fMapFile.close()
def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
    #
    # Length of Binary File
    #
    FileHandle = open(FileName, 'rb')
    FileHandle.seek(0, 2)
    FileLength = FileHandle.tell()
    FileHandle.close()
    #
    # Unify string to upper string
    #
    TypeName = TypeName.upper()
    #
    # Get PCD value data length
    #
    ValueLength = 0
    if TypeName == 'BOOLEAN':
        ValueLength = 1
    elif TypeName == TAB_UINT8:
        ValueLength = 1
    elif TypeName == TAB_UINT16:
        ValueLength = 2
    elif TypeName == TAB_UINT32:
        ValueLength = 4
    elif TypeName == TAB_UINT64:
        ValueLength = 8
    elif TypeName == TAB_VOID:
        if MaxSize == 0:
            return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD."
        ValueLength = int(MaxSize)
    else:
        return PARAMETER_INVALID, "PCD type %s is not valid." % (
            CommandOptions.PcdTypeName)
    #
    # Check PcdValue is in the input binary file.
    #
    if ValueOffset + ValueLength > FileLength:
        return PARAMETER_INVALID, "PcdOffset + PcdMaxSize(DataType) is larger than the input file size."
    #
    # Read binary file into array
    #
    FileHandle = open(FileName, 'rb')
    ByteArray = array.array('B')
    ByteArray.fromfile(FileHandle, FileLength)
    FileHandle.close()
    OrigByteList = ByteArray.tolist()
    ByteList = ByteArray.tolist()
    #
    # Clear the data in file
    #
    for Index in range(ValueLength):
        ByteList[ValueOffset + Index] = 0
    #
    # Patch value into offset
    #
    SavedStr = ValueString
    ValueString = ValueString.upper()
    ValueNumber = 0
    if TypeName == 'BOOLEAN':
        #
        # Get PCD value for BOOLEAN data type
        #
        try:
            if ValueString == 'TRUE':
                ValueNumber = 1
            elif ValueString == 'FALSE':
                ValueNumber = 0
            ValueNumber = int(ValueString, 0)
            if ValueNumber != 0:
                ValueNumber = 1
        except:
            return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (
                ValueString)
        #
        # Set PCD value into binary data
        #
        ByteList[ValueOffset] = ValueNumber
    elif TypeName in TAB_PCD_CLEAN_NUMERIC_TYPES:
        #
        # Get PCD value for UINT* data type
        #
        try:
            ValueNumber = int(ValueString, 0)
        except:
            return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (
                ValueString)
        #
        # Set PCD value into binary data
        #
        for Index in range(ValueLength):
            ByteList[ValueOffset + Index] = ValueNumber % 0x100
            ValueNumber = ValueNumber // 0x100
    elif TypeName == TAB_VOID:
        ValueString = SavedStr
        if ValueString.startswith('L"'):
            #
            # Patch Unicode String
            #
            Index = 0
            for ByteString in ValueString[2:-1]:
                #
                # Reserve zero as unicode tail
                #
                if Index + 2 >= ValueLength:
                    break
                #
                # Set string value one by one/ 0x100
                #
                ByteList[ValueOffset + Index] = ord(ByteString)
                Index = Index + 2
        elif ValueString.startswith("{") and ValueString.endswith("}"):
            #
            # Patch {0x1, 0x2, ...} byte by byte
            #
            ValueList = ValueString[1:len(ValueString) - 1].split(',')
            Index = 0
            try:
                for ByteString in ValueList:
                    ByteString = ByteString.strip()
                    if ByteString.upper().startswith('0X'):
                        ByteValue = int(ByteString, 16)
                    else:
                        ByteValue = int(ByteString)
                    ByteList[ValueOffset + Index] = ByteValue % 0x100
                    Index = Index + 1
                    if Index >= ValueLength:
                        break
            except:
                return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % (
                    ValueString)
        else:
            #
            # Patch ascii string
            #
            Index = 0
            for ByteString in ValueString[1:-1]:
                #
                # Reserve zero as string tail
                #
                if Index + 1 >= ValueLength:
                    break
                #
                # Set string value one by one
                #
                ByteList[ValueOffset + Index] = ord(ByteString)
                Index = Index + 1
    #
    # Update new data into input file.
    #
    if ByteList != OrigByteList:
        ByteArray = array.array('B')
        ByteArray.fromlist(ByteList)
        FileHandle = open(FileName, 'wb')
        ByteArray.tofile(FileHandle)
        FileHandle.close()
    return 0, "Patch Value into File %s successfully." % (FileName)
Exemple #29
0
    def GenSection(self,
                   OutputPath,
                   ModuleName,
                   SecNum,
                   KeyStringList,
                   FfsInf=None,
                   Dict=None,
                   IsMakefile=False):
        #
        # Generate all section
        #
        self.KeyStringList = KeyStringList
        self.CurrentArchList = GenFdsGlobalVariable.ArchList
        if FfsInf is not None:
            self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
            self.NameGuid = FfsInf.__ExtendMacro__(self.NameGuid)
            self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)
            self.CurrentArchList = [FfsInf.CurrentArch]

        SectFile = tuple()
        SectAlign = []
        Index = 0
        MaxAlign = None
        if Dict is None:
            Dict = {}
        if self.FvAddr != []:
            FvAddrIsSet = True
        else:
            FvAddrIsSet = False

        if self.ProcessRequired in ("TRUE", "1"):
            if self.FvAddr != []:
                #no use FvAddr when the image is processed.
                self.FvAddr = []
            if self.FvParentAddr is not None:
                #no use Parent Addr when the image is processed.
                self.FvParentAddr = None

        for Sect in self.SectionList:
            Index = Index + 1
            SecIndex = '%s.%d' % (SecNum, Index)
            # set base address for inside FvImage
            if isinstance(Sect, FvImageSection):
                if self.FvAddr != []:
                    Sect.FvAddr = self.FvAddr.pop(0)
                self.IncludeFvSection = True
            elif isinstance(Sect, GuidSection):
                Sect.FvAddr = self.FvAddr
                Sect.FvParentAddr = self.FvParentAddr
            ReturnSectList, align = Sect.GenSection(OutputPath,
                                                    ModuleName,
                                                    SecIndex,
                                                    KeyStringList,
                                                    FfsInf,
                                                    Dict,
                                                    IsMakefile=IsMakefile)
            if isinstance(Sect, GuidSection):
                if Sect.IncludeFvSection:
                    self.IncludeFvSection = Sect.IncludeFvSection

            if align is not None:
                if MaxAlign is None:
                    MaxAlign = align
                if GenFdsGlobalVariable.GetAlignment(
                        align) > GenFdsGlobalVariable.GetAlignment(MaxAlign):
                    MaxAlign = align
            if ReturnSectList != []:
                if align is None:
                    align = "1"
                for file in ReturnSectList:
                    SectFile += (file, )
                    SectAlign.append(align)

        if MaxAlign is not None:
            if self.Alignment is None:
                self.Alignment = MaxAlign
            else:
                if GenFdsGlobalVariable.GetAlignment(
                        MaxAlign) > GenFdsGlobalVariable.GetAlignment(
                            self.Alignment):
                    self.Alignment = MaxAlign

        OutputFile = OutputPath + \
                     os.sep + \
                     ModuleName + \
                     SUP_MODULE_SEC + \
                     SecNum + \
                     SectionSuffix['GUIDED']
        OutputFile = os.path.normpath(OutputFile)

        ExternalTool = None
        ExternalOption = None
        if self.NameGuid is not None:
            ExternalTool, ExternalOption = FindExtendTool(
                self.KeyStringList, self.CurrentArchList, self.NameGuid)

        #
        # If not have GUID , call default
        # GENCRC32 section
        #
        if self.NameGuid is None:
            GenFdsGlobalVariable.VerboseLogger(
                "Use GenSection function Generate CRC32 Section")
            GenFdsGlobalVariable.GenerateSection(
                OutputFile,
                SectFile,
                Section.Section.SectionType[self.SectionType],
                InputAlign=SectAlign,
                IsMakefile=IsMakefile)
            OutputFileList = []
            OutputFileList.append(OutputFile)
            return OutputFileList, self.Alignment
        #or GUID not in External Tool List
        elif ExternalTool is None:
            EdkLogger.error("GenFds", GENFDS_ERROR,
                            "No tool found with GUID %s" % self.NameGuid)
        else:
            DummyFile = OutputFile + ".dummy"
            #
            # Call GenSection with DUMMY section type.
            #
            GenFdsGlobalVariable.GenerateSection(DummyFile,
                                                 SectFile,
                                                 InputAlign=SectAlign,
                                                 IsMakefile=IsMakefile)
            #
            # Use external tool process the Output
            #
            TempFile = OutputPath + \
                       os.sep + \
                       ModuleName + \
                       SUP_MODULE_SEC + \
                       SecNum + \
                       '.tmp'
            TempFile = os.path.normpath(TempFile)
            #
            # Remove temp file if its time stamp is older than dummy file
            # Just in case the external tool fails at this time but succeeded before
            # Error should be reported if the external tool does not generate a new output based on new input
            #
            if os.path.exists(TempFile) and os.path.exists(
                    DummyFile
            ) and os.path.getmtime(TempFile) < os.path.getmtime(DummyFile):
                os.remove(TempFile)

            FirstCall = False
            CmdOption = '-e'
            if ExternalOption is not None:
                CmdOption = CmdOption + ' ' + ExternalOption
            if not GenFdsGlobalVariable.EnableGenfdsMultiThread:
                if self.ProcessRequired not in (
                        "TRUE", "1"
                ) and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr is not None:
                    #FirstCall is only set for the encapsulated flash FV image without process required attribute.
                    FirstCall = True
                #
                # Call external tool
                #
                ReturnValue = [1]
                if FirstCall:
                    #first try to call the guided tool with -z option and CmdOption for the no process required guided tool.
                    GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile],
                                                  ExternalTool,
                                                  '-z' + ' ' + CmdOption,
                                                  ReturnValue)

                #
                # when no call or first call failed, ReturnValue are not 1.
                # Call the guided tool with CmdOption
                #
                if ReturnValue[0] != 0:
                    FirstCall = False
                    ReturnValue[0] = 0
                    GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile],
                                                  ExternalTool, CmdOption)
                #
                # There is external tool which does not follow standard rule which return nonzero if tool fails
                # The output file has to be checked
                #

                if not os.path.exists(TempFile):
                    EdkLogger.error(
                        "GenFds", COMMAND_FAILURE,
                        'Fail to call %s, no output file was generated' %
                        ExternalTool)

                FileHandleIn = open(DummyFile, 'rb')
                FileHandleIn.seek(0, 2)
                InputFileSize = FileHandleIn.tell()

                FileHandleOut = open(TempFile, 'rb')
                FileHandleOut.seek(0, 2)
                TempFileSize = FileHandleOut.tell()

                Attribute = []
                HeaderLength = None
                if self.ExtraHeaderSize != -1:
                    HeaderLength = str(self.ExtraHeaderSize)

                if self.ProcessRequired == "NONE" and HeaderLength is None:
                    if TempFileSize > InputFileSize:
                        FileHandleIn.seek(0)
                        BufferIn = FileHandleIn.read()
                        FileHandleOut.seek(0)
                        BufferOut = FileHandleOut.read()
                        if BufferIn == BufferOut[TempFileSize -
                                                 InputFileSize:]:
                            HeaderLength = str(TempFileSize - InputFileSize)
                    #auto sec guided attribute with process required
                    if HeaderLength is None:
                        Attribute.append('PROCESSING_REQUIRED')

                FileHandleIn.close()
                FileHandleOut.close()

                if FirstCall and 'PROCESSING_REQUIRED' in Attribute:
                    # Guided data by -z option on first call is the process required data. Call the guided tool with the real option.
                    GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile],
                                                  ExternalTool, CmdOption)

                #
                # Call Gensection Add Section Header
                #
                if self.ProcessRequired in ("TRUE", "1"):
                    if 'PROCESSING_REQUIRED' not in Attribute:
                        Attribute.append('PROCESSING_REQUIRED')

                if self.AuthStatusValid in ("TRUE", "1"):
                    Attribute.append('AUTH_STATUS_VALID')
                GenFdsGlobalVariable.GenerateSection(
                    OutputFile, [TempFile],
                    Section.Section.SectionType['GUIDED'],
                    Guid=self.NameGuid,
                    GuidAttr=Attribute,
                    GuidHdrLen=HeaderLength)

            else:
                #add input file for GenSec get PROCESSING_REQUIRED
                GenFdsGlobalVariable.GuidTool(TempFile, [DummyFile],
                                              ExternalTool,
                                              CmdOption,
                                              IsMakefile=IsMakefile)
                Attribute = []
                HeaderLength = None
                if self.ExtraHeaderSize != -1:
                    HeaderLength = str(self.ExtraHeaderSize)
                if self.AuthStatusValid in ("TRUE", "1"):
                    Attribute.append('AUTH_STATUS_VALID')
                if self.ProcessRequired == "NONE" and HeaderLength is None:
                    GenFdsGlobalVariable.GenerateSection(
                        OutputFile, [TempFile],
                        Section.Section.SectionType['GUIDED'],
                        Guid=self.NameGuid,
                        GuidAttr=Attribute,
                        GuidHdrLen=HeaderLength,
                        DummyFile=DummyFile,
                        IsMakefile=IsMakefile)
                else:
                    if self.ProcessRequired in ("TRUE", "1"):
                        if 'PROCESSING_REQUIRED' not in Attribute:
                            Attribute.append('PROCESSING_REQUIRED')
                    GenFdsGlobalVariable.GenerateSection(
                        OutputFile, [TempFile],
                        Section.Section.SectionType['GUIDED'],
                        Guid=self.NameGuid,
                        GuidAttr=Attribute,
                        GuidHdrLen=HeaderLength,
                        IsMakefile=IsMakefile)

            OutputFileList = []
            OutputFileList.append(OutputFile)
            if 'PROCESSING_REQUIRED' in Attribute:
                # reset guided section alignment to none for the processed required guided data
                self.Alignment = None
                self.IncludeFvSection = False
                self.ProcessRequired = "TRUE"
            if IsMakefile and self.Alignment is not None and self.Alignment.strip(
            ) == '0':
                self.Alignment = '1'
            return OutputFileList, self.Alignment
Exemple #30
0
def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
    CreateDirectory(os.path.dirname(Target))
    try:
        with open(Source, "r") as File:
            Lines = File.readlines()
    except IOError:
        EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
    except:
        EdkLogger.error("Trim",
                        AUTOGEN_ERROR,
                        "TrimPreprocessedFile: Error while processing file",
                        File=Source)

    PreprocessedFile = ""
    InjectedFile = ""
    LineIndexOfOriginalFile = None
    NewLines = []
    LineControlDirectiveFound = False
    for Index in range(len(Lines)):
        Line = Lines[Index]
        #
        # Find out the name of files injected by preprocessor from the lines
        # with Line Control directive
        #
        MatchList = gLineControlDirective.findall(Line)
        if MatchList != []:
            MatchList = MatchList[0]
            if len(MatchList) == 2:
                LineNumber = int(MatchList[0], 0)
                InjectedFile = MatchList[1]
                InjectedFile = os.path.normpath(InjectedFile)
                InjectedFile = os.path.normcase(InjectedFile)
                # The first injected file must be the preprocessed file itself
                if PreprocessedFile == "":
                    PreprocessedFile = InjectedFile
            LineControlDirectiveFound = True
            continue
        elif PreprocessedFile == "" or InjectedFile != PreprocessedFile:
            continue

        if LineIndexOfOriginalFile is None:
            #
            # Any non-empty lines must be from original preprocessed file.
            # And this must be the first one.
            #
            LineIndexOfOriginalFile = Index
            EdkLogger.verbose(
                "Found original file content starting from line %d" %
                (LineIndexOfOriginalFile + 1))

        if TrimLong:
            Line = gLongNumberPattern.sub(r"\1", Line)
        # convert HEX number format if indicated
        if ConvertHex:
            Line = gHexNumberPattern.sub(r"0\2h", Line)
        else:
            Line = gHexNumberPattern.sub(r"\1\2", Line)

        # convert Decimal number format
        Line = gDecNumberPattern.sub(r"\1", Line)

        if LineNumber is not None:
            EdkLogger.verbose("Got line directive: line=%d" % LineNumber)
            # in case preprocessor removed some lines, like blank or comment lines
            if LineNumber <= len(NewLines):
                # possible?
                NewLines[LineNumber - 1] = Line
            else:
                if LineNumber > (len(NewLines) + 1):
                    for LineIndex in range(len(NewLines), LineNumber - 1):
                        NewLines.append(TAB_LINE_BREAK)
                NewLines.append(Line)
            LineNumber = None
            EdkLogger.verbose("Now we have lines: %d" % len(NewLines))
        else:
            NewLines.append(Line)

    # in case there's no line directive or linemarker found
    if (not LineControlDirectiveFound) and NewLines == []:
        MulPatternFlag = False
        SinglePatternFlag = False
        Brace = 0
        for Index in range(len(Lines)):
            Line = Lines[Index]
            if MulPatternFlag == False and gTypedef_MulPattern.search(
                    Line) is None:
                if SinglePatternFlag == False and gTypedef_SinglePattern.search(
                        Line) is None:
                    # remove "#pragram pack" directive
                    if gPragmaPattern.search(Line) is None:
                        NewLines.append(Line)
                    continue
                elif SinglePatternFlag == False:
                    SinglePatternFlag = True
                if Line.find(";") >= 0:
                    SinglePatternFlag = False
            elif MulPatternFlag == False:
                # found "typedef struct, typedef union, union, struct", keep its position and set a flag
                MulPatternFlag = True

            # match { and } to find the end of typedef definition
            if Line.find("{") >= 0:
                Brace += 1
            elif Line.find("}") >= 0:
                Brace -= 1

            # "typedef struct, typedef union, union, struct" must end with a ";"
            if Brace == 0 and Line.find(";") >= 0:
                MulPatternFlag = False

    # save to file
    try:
        with open(Target, 'w') as File:
            File.writelines(NewLines)
    except:
        EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)