def parse_qls(file): try: input = FileStream(file) lexer = QLSLexer(input) stream = CommonTokenStream(lexer) parser = QLSParser(stream) parser._listeners = ErrorListener() return parser.qlsObject() except TypeError: return None
def split(self, path: str): input = InputStream(path) lexer = XPathLexer(input) def recover(self, e): raise e lexer.recover = recover lexer.removeErrorListeners() lexer.addErrorListener( ErrorListener()) # XPathErrorListener does no more tokenStream = CommonTokenStream(lexer) try: tokenStream.fill() except LexerNoViableAltException as e: pos = lexer.column msg = "Invalid tokens or characters at index %d in path '%s'" % ( pos, path) raise Exception(msg, e) tokens = iter(tokenStream.tokens) elements = list() for el in tokens: invert = False anywhere = False # Check for path separators, if none assume root if el.type in [XPathLexer.ROOT, XPathLexer.ANYWHERE]: anywhere = el.type == XPathLexer.ANYWHERE next_el = next(tokens, None) if not next_el: raise Exception('Missing element after %s' % el.getText()) else: el = next_el # Check for bangs if el.type == XPathLexer.BANG: invert = True next_el = next(tokens, None) if not next_el: raise Exception('Missing element after %s' % el.getText()) else: el = next_el # Add searched element if el.type in [ XPathLexer.TOKEN_REF, XPathLexer.RULE_REF, XPathLexer.WILDCARD, XPathLexer.STRING ]: element = self.getXPathElement(el, anywhere) element.invert = invert elements.append(element) elif el.type == Token.EOF: break else: raise Exception("Unknown path element %s" % lexer.symbolicNames[el.type]) return elements
def split(self, path: str): input = InputStream(path) lexer = XPathLexer(input) def recover(self, e): raise e lexer.recover = recover lexer.removeErrorListeners() lexer.addErrorListener( ErrorListener()) # XPathErrorListener does no more tokenStream = CommonTokenStream(lexer) try: tokenStream.fill() except LexerNoViableAltException as e: pos = lexer.getColumn() msg = "Invalid tokens or characters at index " + str( pos) + " in path '" + path + "'" raise Exception(msg, e) tokens = tokenStream.getTokens() elements = list() n = len(tokens) i = 0 while i < n: el = tokens[i] next = None if el.type in [XPathLexer.ROOT, XPathLexer.ANYWHERE]: anywhere = el.type == XPathLexer.ANYWHERE i += 1 next = tokens[i] invert = next.type == XPathLexer.BANG if invert: i += 1 next = tokens[i] pathElement = self.getXPathElement(next, anywhere) pathElement.invert = invert elements.append(pathElement) i += 1 elif el.type in [ XPathLexer.TOKEN_REF, XPathLexer.RULE_REF, XPathLexer.WILDCARD ]: elements.append(self.getXPathElement(el, False)) i += 1 elif el.type == Token.EOF: break else: raise Exception("Unknown path element " + str(el)) return elements
def run_ql_file(file): input = FileStream(file) lexer = QLLexer(input) stream = CommonTokenStream(lexer) parser = QLParser(stream) parser._listeners = ErrorListener() parse_tree = parser.form() visitor = ParseTreeVisitor() ast = parse_tree.accept(visitor) static_checker = StaticChecker() static_checker.run(ast)
def generate_ast(stream): error_listener = ErrorListener() lexer = SMTLIBv2Lexer(stream) lexer.removeErrorListeners() lexer.addErrorListener(error_listener) stream = CommonTokenStream(lexer) parser = SMTLIBv2Parser(stream) parser.removeErrorListeners() parser.addErrorListener(error_listener) tree = parser.start() vis = ASTVisitor() formula = vis.visitStart(tree) return formula
def generate_ast(stream, prep_seed=True): error_listener = ErrorListener() lexer = SMTLIBv2Lexer(stream) lexer.removeErrorListeners() lexer.addErrorListener(error_listener) stream = CommonTokenStream(lexer) parser = SMTLIBv2Parser(stream) parser.removeErrorListeners() tree = parser.start() vis = ASTVisitor() formula = vis.visitStart(tree) # empty file or parser preceding parser errror if len(formula.commands) == 0: return None return prepare_seed(formula) if prep_seed else formula
def parse(value): textStream = InputStream(value) astPathListener = ASTPathListener() errorListener = ErrorListener() lexer = FHIRPathLexer(textStream) lexer.recover = recover lexer.removeErrorListeners() lexer.addErrorListener(errorListener) parser = FHIRPathParser(CommonTokenStream(lexer)) parser.buildParseTrees = True parser.removeErrorListeners() parser.addErrorListener(errorListener) walker = ParseTreeWalker() walker.walk(astPathListener, parser.expression()) return astPathListener.parentStack[0]
from antlr4.error.ErrorListener import ErrorListener, ConsoleErrorListener class SyntaxError(Exception): pass class BasisErrorListener(ErrorListener): def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e): raise SyntaxError(f"{msg} on line: {line}, col: {column}") def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs): pass def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs): pass def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs): pass # Remove the default ConsoleErrorListener # https://github.com/antlr/antlr4/blob/be881fa6b91d1980936f8dcab902a9dc26ecd310/runtime/Python3/src/antlr4/error/ErrorListener.py#L27 ConsoleErrorListener.INSTANCE = ErrorListener()