コード例 #1
0
    def _processFile(self):

        self.lAllObjects = []
        for sLine in self.filecontent:
            lTokens = tokens.create(sLine.replace('\t', '  ').rstrip())
            lObjects = []
            for sToken in lTokens:
                lObjects.append(parser.item(sToken))

            blank.classify(lObjects)
            whitespace.classify(lTokens, lObjects)
            comment.classify(lTokens, lObjects)
            preprocessor.classify(lTokens, lObjects)
            pragma.classify(lTokens, lObjects, self.lOpenPragmas,
                            self.lClosePragmas, self.dVars)

            self.lAllObjects.extend(lObjects)
            self.lAllObjects.append(parser.carriage_return())

        try:
            self.lAllObjects[0].set_filename(self.filename)
        except IndexError:
            pass

        design_file.tokenize(self.lAllObjects)
        post_token_assignments(self.lAllObjects)

        set_token_hierarchy_value(self.lAllObjects)
        self.oTokenMap = process_tokens(self.lAllObjects)
コード例 #2
0
    def update(self, lUpdates):

        if len(lUpdates) == 0:
            return
        bUpdateMap = True
        for oUpdate in lUpdates[::-1]:
            iStart = oUpdate.oTokens.iStartIndex
            lTokens = oUpdate.get_tokens()
            iEnd = oUpdate.oTokens.iEndIndex
            self.lAllObjects[iStart:iEnd] = lTokens
        if bUpdateMap:
            self.oTokenMap = process_tokens(self.lAllObjects)