def tokenize(string: str) -> Tuple[ASTBuilderVisitor, PyNestMLParser]: lexer = PyNestMLLexer(InputStream(string)) # create a token stream stream = CommonTokenStream(lexer) stream.fill() parser = PyNestMLParser(stream) builder = ASTBuilderVisitor(stream.tokens) return builder, parser
def test_float(self): program = """ a == 0.0000003; run away with a; """ self.assertEqual(0.0000003, execute(InputStream(program)), 0.0000000000000001)
def test_eventually_bounded(self): lexer = SpecificationLexer(InputStream("eventually[0,1] a")) stream = CommonTokenStream(lexer) parser = SpecificationParser(stream) ast = AST().visit(parser.specification()) specification = ast.accept(Rewriter()) self.assertEqual(specification, Until(Boolean(True), Atomic("a"), 0, 1))
def ltlParse(str_: str) -> Formula: inputStream = InputStream(str_) lexer = LTLLexer(inputStream) tokens = CommonTokenStream(lexer) parser = LTLParser(tokens) ltl = parser.ltl().f return ltl if parser.getNumberOfSyntaxErrors() == 0 else None
def parse(statement): input_stream = InputStream(statement) visitor = SysvisConstructor() lexer = SysvisLexer(input_stream) token_stream = CommonTokenStream(lexer) parser = SysvisParser(token_stream) tree = parser.story() return visitor.visit(tree)
def parse(self, str_: str) -> list: inputStream = InputStream(str_) lexer = BuchiLexer(inputStream) tokens = CommonTokenStream(lexer) parser = BuchiParser(tokens) result = parser.compilationUnit() return result.states_list
def test_list_insert_position_can_be_float_without_decimal(self): program = """ list == [3,4]; list.insert(1.000000000000000,"AVEUL"); run away with list; """ self.assertEqual([3,"VALUE",4],execute(InputStream(program)))
def test_assign_subattributes(self): program = """ a == 6; a.a == 3; a.a.B == 5; run away with [a.a,a.a.B];""" self.assertEqual([3, 5], execute(InputStream(program)))
def test_inminus(self): program = """ a == 4; a=-=5; run away with a; """ self.assertEqual(-1, execute(InputStream(program), True))
def parse(openqasm3_program: str) -> QASMNode: lexer = qasm3Lexer(InputStream(openqasm3_program)) stream = CommonTokenStream(lexer) parser = qasm3Parser(stream) tree = parser.program() return QASMNodeVisitor().visitProgram(tree)
def test_multiple_negation(self): program = """ run away with !!!!True; """ value = execute(InputStream(program), True) self.assertTrue(value)
def test_variables_with_numbers_and_underscore(self): program = """ __oh_bOy53 == 14; run away with __oh_bOy53; """ self.assertEqual(14, execute(InputStream(program), True))
def test_list_attributes(self): program = """ list == [3,4]; list.attr == 99; run away with list.attr; """ self.assertEqual(99, execute(InputStream(program)))
def test_string_new_line(self): program = r""" run away with ["Just one \n line","nOher linenotAn\ enil e"]; """ value = execute(InputStream(program), True) self.assertEqual(["uJenil ne \st on", "One line \nAnother line"], value)
def test_indivision(self): program = """ a == 4; a=/=5; run away with a; """ self.assertEqual(0.8, execute(InputStream(program), True))
def test_inmult(self): program = """ a == 4; a=*=5; run away with a; """ self.assertEqual(20, execute(InputStream(program), True))
def test_insum(self): program = """ a == 4; a=+=5; run away with a; """ self.assertEqual(9, execute(InputStream(program), True))
def run_validator(pattern, stix_version=DEFAULT_VERSION): """ Validates a pattern against the STIX Pattern grammar. Error messages are returned in a list. The test passed if the returned list is empty. """ start = '' if isinstance(pattern, six.string_types): start = leading_characters(pattern, 2) pattern = InputStream(pattern) if not start: start = leading_characters(pattern.readline(), 2) pattern.seek(0) if stix_version == '2.1': return run_validator21(pattern, start) else: return run_validator20(pattern, start)
def java_tokenize(line): # stream = InputStream(line.decode('utf-8', 'ignore')) stream = InputStream(line) lexer = JavaLexer(stream) tokens = CommonTokenStream(lexer) tokens.fetch(100000) # tokenlist=set([token.text for token in tokens.tokens]) token_list = list([token.text for token in tokens.tokens]) return token_list # tokens.tokens
def parse_quil(quil: str) -> inst.Program: """ Parse a Quil program and return a Program. To convert a pyQuil program to a QuantumFlow Program, first convert to Quil, `quantumflow_program = qf.parse_quil(str(pyquil_program))` """ input_stream = InputStream(quil) return _parse(input_stream)
def java2CFG(line): stream = InputStream(line.decode('utf-8', 'ignore')) lexer = lib.parser(stream) toks = CommonTokenStream(lexer) parser = lib.JavaParser(toks) tree = parser.compilationUnit() return tree
def test_method_not_callable(self, mocked_print): program = """ a==3; a.b == 3; a.b();""" execute(InputStream(program), False) mocked_print.assert_called_once_with( "NotCallableException line 3: The variable a.b is not callable.")
def test_quantic_boolean_to_float(self, mocked_print): program = """ float(yTrue);""" execute(InputStream(program), False) mocked_print.assert_called_once_with( "EvaluateQuanticBooleanException line 2: Quantic booleans can't be evaluated or operated as regular booleans. Use evalX() or evalY() to evaluate them first." )
def test_method_attributes(self): program = """ string == "This is a string"; string.length.a == 42; run away with string.length.a; """ self.assertEqual(42, execute(InputStream(program)))
def make_parser(data: str) -> RelayParser: """Construct a RelayParser a given data stream.""" input_stream = InputStream(data) lexer = RelayLexer(input_stream) lexer.addErrorListener(StrictErrorListener(data)) token_stream = CommonTokenStream(lexer) p = RelayParser(token_stream) p.addErrorListener(StrictErrorListener(data)) return p
def test_return_quantic_boolean(self): program = """ run away with xTrue; """ result = execute(InputStream(program), False) self.assertIsNotNone(result) self.assertTrue(result.value)
def test_syntax_antlr(init): script, name = init['script'], init['correctness'] tree_wrapper = TreeAST(InputStream(script)) ast = tree_wrapper.tree if name == 'correct': assert ast is not None else: assert ast is None
def test_object_equality(self): program = """ a == object(); b == object(); run away with a := b; """ self.assertFalse(execute(InputStream(program), True))
def __init__(self, string: str, error_listener: ErrorListener = None): """Instantiate a string stream. Args: string (str): The string with JavaScript code. error_listener (ErrorListener): The custom error listener. Uses default one if not set or set to None. """ super().__init__(error_listener) self._input_stream = InputStream(string)
def parse(query_string): input_stream = InputStream(query_string) lexer = CypherLexer(input_stream) stream = CommonTokenStream(lexer) parser = CypherParser(stream) tree = parser.oC_Cypher() visitor = ParseTreeVisitor() result = visitor.visit(tree) return result
def execute(self, lexer:Lexer, input:InputStream, startIndex:int): requiresSeek = False stopIndex = input.index try: for lexerAction in self.lexerActions: if isinstance(lexerAction, LexerIndexedCustomAction): offset = lexerAction.offset input.seek(startIndex + offset) lexerAction = lexerAction.action requiresSeek = (startIndex + offset) != stopIndex elif lexerAction.isPositionDependent: input.seek(stopIndex) requiresSeek = False lexerAction.execute(lexer) finally: if requiresSeek: input.seek(stopIndex)