class DocstringCommentTest(unittest.TestCase): def test_docstring_success(self): self.run_docstring_test('valid') @pytest.mark.xfail(strict=True, raises=DocstringCommentException) def test_docstring_failure(self): self.run_docstring_test('invalid') def run_docstring_test(self, case: str): assert case in ['valid', 'invalid'] input_file = FileStream( os.path.join( os.path.realpath(os.path.join(os.path.dirname(__file__), case)), 'DocstringCommentTest.nestml')) lexer = PyNestMLLexer(input_file) lexer._errHandler = BailErrorStrategy() lexer._errHandler.reset(lexer) # create a token stream stream = CommonTokenStream(lexer) stream.fill() # parse the file parser = PyNestMLParser(stream) parser._errHandler = BailErrorStrategy() parser._errHandler.reset(parser) compilation_unit = parser.nestMLCompilationUnit() # now build the meta_model ast_builder_visitor = ASTBuilderVisitor(stream.tokens) ast = ast_builder_visitor.visit(compilation_unit) neuron_body_elements = ast.get_neuron_list()[0].get_body( ).get_body_elements() # now run the docstring checker visitor visitor = CommentCollectorVisitor(stream.tokens, strip_delim=False) compilation_unit.accept(visitor) # test whether ``"""`` is used correctly assert len( ast.get_neuron_list()) == 1, "Neuron failed to load correctly" class CommentCheckerVisitor(ASTVisitor): def visit(self, ast): for comment in ast.get_comments(): if "\"\"\"" in comment \ and not (isinstance(ast, ASTNeuron) or isinstance(ast, ASTNestMLCompilationUnit)): raise DocstringCommentException() for comment in ast.get_post_comments(): if "\"\"\"" in comment: raise DocstringCommentException() visitor = CommentCheckerVisitor() ast.accept(visitor)
def parse(rule: str, text: str): lexer = DemystifyLexer(InputStream(text)) lexer.removeErrorListener(ConsoleErrorListener.INSTANCE) stream = CommonTokenStream(lexer) try: parser = DemystifyParser(stream) parser._interp.predictionMode = PredictionMode.SLL parser._errHandler = BailErrorStrategy() parser.removeErrorListener(ConsoleErrorListener.INSTANCE) return getattr(parser, rule)() except ParseCancellationException: parser = DemystifyParser(stream) parser._errHandler = BailErrorStrategy() parser.removeErrorListener(ConsoleErrorListener.INSTANCE) return getattr(parser, rule)()
def compileTreePattern(self, pattern: str, patternRuleIndex: int): tokenList = self.tokenize(pattern) tokenSrc = ListTokenSource(tokenList) tokens = CommonTokenStream(tokenSrc) from antlr4.ParserInterpreter import ParserInterpreter parserInterp = ParserInterpreter(self.parser.grammarFileName, self.parser.tokenNames, self.parser.ruleNames, self.parser.getATNWithBypassAlts(), tokens) tree = None try: parserInterp.setErrorHandler(BailErrorStrategy()) tree = parserInterp.parse(patternRuleIndex) except ParseCancellationException as e: raise e.cause except RecognitionException as e: raise e except Exception as e: raise CannotInvokeStartRule(e) # Make sure tree pattern compilation checks for a complete parse if tokens.LA(1) != Token.EOF: raise StartRuleDoesNotConsumeFullPattern() from antlr4.tree.ParseTreePattern import ParseTreePattern return ParseTreePattern(self, pattern, patternRuleIndex, tree)
def parse_query(text, optimize=True): from .gen.ContactQLLexer import ContactQLLexer from .gen.ContactQLParser import ContactQLParser stream = InputStream(text) lexer = ContactQLLexer(stream) tokens = CommonTokenStream(lexer) parser = ContactQLParser(tokens) parser._errHandler = BailErrorStrategy() try: tree = parser.parse() except ParseCancellationException as ex: message = None if ex.args and isinstance(ex.args[0], NoViableAltException): token = ex.args[0].offendingToken if token is not None and token.type != ContactQLParser.EOF: message = "Search query contains an error at: %s" % token.text if message is None: message = "Search query contains an error" raise SearchException(message) visitor = ContactQLVisitor() query = ContactQuery(visitor.visit(tree)) return query.optimized() if optimize else query
def parse(actions, spdOutput, two_stageParsing): tokens = CommonTokenStream(SPDGrammarLexer(InputStream(spdOutput))) parser = SPDGrammarParser(tokens) visitor = SPDGrammarVisitorImplementation(actions) if not two_stageParsing: visitor.visit(parser.json()) return visitor._errors parser._interp.predictionMode = PredictionMode.SLL parser.removeErrorListeners() parser._errHandler = BailErrorStrategy() try: visitor.visit(parser.json()) except RuntimeError as exception: if isinstance(exception, RecognitionException): tokens.seek(0) parser.addErrorListener(ConsoleErrorListener.INSTANCE) parser._errHandler = DefaultErrorStrategy() parser._interp.predictionMode = PredictionMode.LL visitor.visit(parser.json()) return visitor._errors
def parse(self, expression, error_listener): lexer = DynamoDbGrammarLexer(antlr4.InputStream(expression)) tokens = antlr4.CommonTokenStream(lexer) parser = DynamoDbGrammarParser(tokens) parser.buildParseTrees = True lexer.removeErrorListeners() lexer.addErrorListener(error_listener) parser.removeErrorListeners() # DO NOT CALL addErrorListener(errorListener) for BailErrorStrategy # ExpressionErrorListener converts syntaxErrors to validation exceptions # But when using SLL strategy syntaxErrors can be false # Such that a syntaxError thrown by SLL may not necessarily be a real syntaxError # For such cases we don't want the syntaxError become a validation exception and go up to customers # We need to parse the expression again with LL strategy in case of a syntax error # BailErrorStrategy will re-throw RecognitionExceptions as ParseCancellationException # Such that it's not caught by underlying parsing rule implemented by parseStub parser._interp.predictionMode = antlr4.PredictionMode.SLL parser._errHandler = BailErrorStrategy() try: # Stage 1 parse with PredictionMode.SLL # If there are no issues SLL was enough to parse # If there were problems LL will be used to try again return self.parse_stub(parser) except ParseCancellationException as e: # If there was an error we don't know if it's a real SyntaxError # Or SLL strategy wasn't strong enough # Stage 2 parse with default prediction mode tokens.reset() parser.reset() parser.addErrorListener(error_listener) parser._errHandler = DefaultErrorStrategy() parser._interp.predictionMode = antlr4.PredictionMode.LL return self.parse_stub(parser)
def pretty_tree(rule: str, text: str): lexer = DemystifyLexer(InputStream(text)) stream = CommonTokenStream(lexer) try: parser = DemystifyParser(stream) parser._interp.predictionMode = PredictionMode.SLL parser._errHandler = BailErrorStrategy() tree = getattr(parser, rule)() except ParseCancellationException: parser = DemystifyParser(stream) tree = getattr(parser, rule)() return to_pretty_tree(tree, recog=parser)
def test(self): model_files = [] for dir in ['models', os.path.join('tests', 'nest_tests', 'resources'), os.path.join('tests', 'valid')]: model_files += glob.glob(os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join('..', dir, '*.nestml')))) assert len(model_files) > 0 for filename in model_files: print("Processing " + os.path.basename(filename)) input_file = FileStream(filename) lexer = PyNestMLLexer(input_file) lexer._errHandler = BailErrorStrategy() lexer._errHandler.reset(lexer) # create a token stream stream = CommonTokenStream(lexer) stream.fill() # parse the file parser = PyNestMLParser(stream) parser._errHandler = BailErrorStrategy() parser._errHandler.reset(parser) compilation_unit = parser.nestMLCompilationUnit() assert compilation_unit is not None
def main(): filename = input('Enter a text file: ') file = open(filename, "r") text = file.read() text = text.replace(',', ' ,') input_stream = InputStream(text) lexer = complexLexer(input_stream) stream = antlr4.CommonTokenStream(lexer) parser = complexParser(stream) parser._errHandler = BailErrorStrategy() try: tree = parser.plans() print("Legal Input") except ParseCancellationException: print("Syntax error in input")
def evaluate_expression(self, expression, context, strategy=EvaluationStrategy.COMPLETE): """ Evaluates a single expression, e.g. "contact.reports * 2" :param expression: the expression string :param context: the evaluation context :param strategy: the evaluation strategy :return: the evaluated expression value """ from .gen.ExcellentLexer import ExcellentLexer from .gen.ExcellentParser import ExcellentParser stream = InputStream(expression) lexer = ExcellentLexer(stream) tokens = CommonTokenStream(lexer) parser = ExcellentParser(tokens) parser._errHandler = BailErrorStrategy() try: tree = parser.parse() if logger.isEnabledFor(logging.DEBUG): logger.debug("Expression '%s' parsed as %s" % (expression, tree.toStringTree())) except ParseCancellationException as ex: message = None if ex.args and isinstance(ex.args[0], NoViableAltException): token = ex.args[0].offendingToken if token is not None and token.type != ExcellentParser.EOF: message = "Expression error at: %s" % token.text if message is None: message = "Expression is invalid" raise EvaluationError(message, ex) if strategy == EvaluationStrategy.RESOLVE_AVAILABLE: resolved = self._resolve_available(tokens, context) if resolved is not None: return resolved visitor = ExcellentVisitor(self._function_manager, context) return visitor.visit(tree)
def parse_query(text, optimize=True, as_anon=False): from .gen.ContactQLLexer import ContactQLLexer from .gen.ContactQLParser import ContactQLParser if as_anon is False: # if the search query looks like a phone number, clean it before parsing cleaned_phonenumber = is_it_a_phonenumber(text) else: cleaned_phonenumber = None if cleaned_phonenumber: stream = InputStream(cleaned_phonenumber) else: stream = InputStream(text) lexer = ContactQLLexer(stream) tokens = CommonTokenStream(lexer) parser = ContactQLParser(tokens) parser._errHandler = BailErrorStrategy() try: tree = parser.parse() except ParseCancellationException as ex: message = None if ex.args and isinstance(ex.args[0], NoViableAltException): token = ex.args[0].offendingToken if token is not None and token.type != ContactQLParser.EOF: message = "Search query contains an error at: %s" % token.text if message is None: message = "Search query contains an error" raise SearchException(message) visitor = ContactQLVisitor(as_anon) query = ContactQuery(visitor.visit(tree)) return query.optimized() if optimize else query
def recognize_file(filename): terminalCnt = len(tokenize_file(filename)) prog = read_file(filename) input_stream = InputStream(prog) lexer = m2_Lexer(input_stream) stream = CommonTokenStream(lexer) parser = m2_Parser(stream) errHandler = BailErrorStrategy() visitor = ParseTreeVisitor parser._errHandler = errHandler try: st_ctx = parser.start() visitor = PVisitor() visitor.visit(st_ctx) pprint(Trees.toStringTree(st_ctx, None, m2_Parser), indent=1, width=1) print(visitor.terminalCount) print(terminalCnt) if abs(visitor.terminalCount - terminalCnt) != 0: return False except Exception as e: print(e) return False return True
def parse(atom): tokens = CommonTokenStream(ASPGrammarLexer(InputStream(atom))) parser = ASPGrammarParser(tokens) visitor = ASPParser() parser._interp.predictionMode = PredictionMode.SLL parser.removeErrorListeners() parser._errHandler = BailErrorStrategy() try: visitor.visit(parser.output()) except RuntimeError as exception: if isinstance(exception, RecognitionException): tokens.seek(0) parser.addErrorListener(ConsoleErrorListener.INSTANCE) parser._errHandler = DefaultErrorStrategy() parser._interp.predictionMode = PredictionMode.LL visitor.visit(parser.output()) return visitor
def parse(answerSets, dlvhexOutput, two_stageParsing): tokens = CommonTokenStream(DLVHEXLexer(InputStream(dlvhexOutput))) parser = DLVHEXParser(tokens) visitor = DLVHEXParserVisitorImplementation(answerSets) if not two_stageParsing: visitor.visit(parser.output()) return parser._interp.predictionMode = PredictionMode.SLL parser.removeErrorListeners() parser._errHandler = BailErrorStrategy() try: visitor.visit(parser.output()) except RuntimeError as exception: if isinstance(exception, RecognitionException): tokens.seek(0) parser.addErrorListener(ConsoleErrorListener.INSTANCE) parser._errHandler = DefaultErrorStrategy() parser._interp.predictionMode = PredictionMode.LL visitor.visit(parser.output())
def legacy_parse_query(text, optimize=True, as_anon=False): # pragma: no cover """ Parses the given contact query and optionally optimizes it """ from .gen.ContactQLLexer import ContactQLLexer from .gen.ContactQLParser import ContactQLParser is_phone, cleaned_phone = is_phonenumber(text) if not as_anon and is_phone: stream = InputStream(cleaned_phone) else: stream = InputStream(text) lexer = ContactQLLexer(stream) tokens = CommonTokenStream(lexer) parser = ContactQLParser(tokens) parser._errHandler = BailErrorStrategy() try: tree = parser.parse() except ParseCancellationException as ex: message = None if ex.args and isinstance(ex.args[0], NoViableAltException): token = ex.args[0].offendingToken if token is not None and token.type != ContactQLParser.EOF: message = "Search query contains an error at: %s" % token.text if message is None: message = "Search query contains an error" raise SearchException(message) visitor = ContactQLVisitor(as_anon) query = ContactQuery(visitor.visit(tree)) return query.optimized() if optimize else query
def parse(input_: str, *, permissive=False) -> ast.Program: """ Parse a complete OpenQASM 3 program from a string. :param input_: A string containing a complete OpenQASM 3 program. :param permissive: A Boolean controlling whether ANTLR should attempt to recover from incorrect input or not. Defaults to ``False``; if set to ``True``, the reference AST produced may be invalid if ANTLR emits any warning messages during its parsing phase. :return: A complete :obj:`~ast.Program` node. """ lexer = qasm3Lexer(InputStream(input_)) stream = CommonTokenStream(lexer) parser = qasm3Parser(stream) if not permissive: # For some reason, the Python 3 runtime for ANTLR 4 is missing the # setter method `setErrorHandler`, so we have to set the attribute # directly. parser._errHandler = BailErrorStrategy() try: tree = parser.program() except (RecognitionException, ParseCancellationException) as exc: raise QASM3ParsingError() from exc return QASMNodeVisitor().visitProgram(tree)