コード例 #1
0
ファイル: parser.py プロジェクト: ThomasBollmeier/TBParser
    def __init__(self, grammar):

        self._grammar = grammar
        
        self._lexer = Lexer()
        for tt in self._grammar.getTokenTypes():
            self._lexer.addTokenType(tt)
            
        self._curFile = None
コード例 #2
0
ファイル: parser.py プロジェクト: ThomasBollmeier/TBParser
class Parser(object):

    def __init__(self, grammar):

        self._grammar = grammar
        
        self._lexer = Lexer()
        for tt in self._grammar.getTokenTypes():
            self._lexer.addTokenType(tt)
            
        self._curFile = None
        
    def enableLineComments(self, lineCommentStart='//'):
        
        self._lexer.enableLineComments(lineCommentStart)
                

    def enableBlockComments(self,
                            blockCommentStart='/*', 
                            blockCommentEnd='*/'
                            ):

        self._lexer.enableBlockComments(blockCommentStart, blockCommentEnd)

    def parse(self, inStream):
        
        self._lexer.setInputStream(inStream)
        self._tokenBuffer = []
        path = Path()
        path.push(self._grammar.getSocket(), None)
        error = False
        done = False

        while not done:
            
            token = self._getNextToken()
            
            if not token:

                found, path = self._findPathToEnd(path)
                
                if found:
                    done = True    
                else:
                    found, path = self._findNextSibling(path)
                    if not found:
                        error = True
                        done = True

                continue
 
            found, path = self._findNextMatchingNode(token, path)
            
            if found:
                self._tokenBuffer.pop()
            else:
                found, path = self._findNextSibling(path)
                if not found:
                    done = True
                    error = True
        
        if not error:
            return self._createAst(path)
        else:
            if self._tokenBuffer:
                token = self._tokenBuffer[0] 
                text = token.getText()
                line, column = token.getStartPosition()
                raise ParseError(self._curFile, line, column, text)
            else:
                raise Exception("Parsing error")

    def parseFile(self, filePath):
        
        self._curFile = filePath

        res = self.parse(FileInput(filePath))
        
        self._curFile = None

        return res
    
    def parseString(self, string):

        return self.parse(StringInput(string))

    def _createAst(self, path):

        stack = []
        current = None

        numElements = path.getLength()

        for i in range(numElements):

            element = path.getElement(i)
            node = element.getGrammarNode()
            token = element.getToken()

            if node.isRuleStart():

                if current:
                    stack.append(current)
                name = node.getName()
                id_ = node.getId()    
                text = token and token.getText() or ''
                current = AstNode(name, text, id_)

            elif node.isRuleEnd():

                # Ggf. Transformation. Dabei ID aus Regel bewahren:
                tmp = current;
                current = node.transform(current)
                if current is not tmp:
                    current.setId(tmp.getId())
                
                parent = stack and stack.pop() or None
                if parent:
                    parent.addChild(current)
                    current = parent
                else:
                    break

            elif node.isTokenNode():

                id_ = node.getId()
                text = token and token.getText() or ''
                current.addChild(AstNode('token', text, id_))

            else:
                continue

        return current

    def _findNextSibling(self, path):
        
        removed = []
        
        while True:

            if path.getLength() < 2:
                # Ursprünglichen Pfad wiederherstellen:
                while removed:
                    elem = removed.pop()
                    token = elem.getToken()
                    if token:
                        self._tokenBuffer.pop()
                    path.push(elem.getGrammarNode(), token)
                
                return False, path

            siblingFound, path = self._gotoNextSibling(path)
            
            if siblingFound:
                return True, path
            else:
                elem = path.pop()
                token = elem.getToken()
                removed.append(elem)
                if token:
                    self._tokenBuffer.append(token)
 
    def _gotoNextSibling(self, path):
            
        if path.getLength() < 2:
            return False, path

        elem = path.pop()
        start = elem.getGrammarNode()
        token = elem.getToken()

        prev = path.getElement(-1).getGrammarNode()
        context = Context(path, token)

        try:
            successors = prev.getSuccessors(context)
        except SuccessorError:
            path.push(start, token)
            return False, path
        
        try:
            idx = successors.index(start)
            if idx < len(successors) - 1:
                sibling = successors[idx+1]
                if token:
                    self._tokenBuffer.append(token)
                path.push(sibling, None)
                return True, path
            else:
                path.push(start, token)
                return False, path
        except ValueError:
            path.push(start, token)
            return False, path

    def _getNextToken(self):
        
        if not self._tokenBuffer:
            token = self._lexer.getNextToken();
            if token:
                self._tokenBuffer.append(token)

        if self._tokenBuffer:
            return self._tokenBuffer[-1]
        else:
            return None

    def _findNextMatchingNode(self, token, path):
        
        elem = path.getElement(-1)
        startNode = elem.getGrammarNode()
        startToken = elem.getToken()
        
        if startNode.isTokenNode() and startToken is None:
            
            if startNode.getTokenTypeId() in token.getTypeIds():
                path.pop()
                path.push(startNode, token)
                return True, path
            else:
                return False, path

        try:
            successors = startNode.getSuccessors(Context(path, token))
        except SuccessorError:
            return False, path

        for succ in successors:
            
            path.push(succ, None)

            found, path = self._findNextMatchingNode(token, path);
            if found:
                return found, path
            else:
                path.pop()
    
        return False, path
    
    def _findPathToEnd(self, path):

        node = path.getElement(-1).getGrammarNode()
        try:
            successors = node.getSuccessors(Context(path))
        except SuccessorError:
            return False, path

        if not successors:
            return True, path # Fertig!

        for succ in successors:
            
            if succ.isTokenNode():
                continue

            path.push(succ, None)

            found, path = self._findPathToEnd(path);
            if found:
                return found, path
            else:
                path.pop()
    
        return False, path