示例#1
0
def make_parse_wizard_context(
    script: Union[InputStream, Path, BinaryIO, TextIO, str],
    wrap_excs: bool = True,
    error_listeners: Iterable[ErrorListener] = [],
) -> wizardParser.ParseWizardContext:
    """
    Create a ParseWizardContext from the given script. Depending on the type of
    the script, the following procedure is used to parse the script:
    - If `script` is an `InputStream`, it is used as-is.
    - If `script` is a `Path`, it should point to a file containing a Wizard script.
    - If `script` is a `TextIO`, it is equivalent to the `str` version after reading
        the whole file.
    - If `script` is a `str`, an `InputStream` is constructed from it.

    Args:
        script: The script to create a context for.
        wrap_excs: If True, exceptions will be converted to Wizard exceptions (when
            possible).

    Returns:
        A ParseWizardContext extracted from the given script.
    """

    # Make a stream:
    stream: InputStream
    if isinstance(script, InputStream):
        stream = script
    elif isinstance(script, str):
        stream = InputStream(script)
    else:
        if isinstance(script, Path):
            with open(script, "rb") as fp:
                data = fp.read()
        else:
            data = script.read()  # type: ignore
        if isinstance(data, bytes):
            encoding = chardet.detect(data)["encoding"]
            text = data.decode(encoding)
        else:
            text = data
        stream = InputStream(text)

    # Create the lexer and disable console logs:
    lexer = wizardLexer(stream)
    lexer.removeErrorListener(ConsoleErrorListener.INSTANCE)

    stream = CommonTokenStream(lexer)

    # Create the parser with a custom error strategy:
    parser = wizardParser(stream)
    parser.removeErrorListener(ConsoleErrorListener.INSTANCE)
    parser._errHandler = WizardErrorStrategy()

    # Run the interpret:
    if wrap_excs:
        return wrap_exceptions(parser.parseWizard)  # type: ignore
    return parser.parseWizard()  # type: ignore
示例#2
0
def build(user, sourceobj=None, sourcetext=None, test_parse=False):

    if sourceobj is not None:
        stream = InputStream(sourceobj.text)
    elif sourcetext is not None:
        stream = InputStream(sourcetext)
    else:
        raise ValueError("sourceobj or sourcetext must not be None")

    parser = __setup_parser(stream)
    tree = parser.rolefragment()

    if not test_parse:
        builder = SchemaBuilder(user=user, source=sourceobj)
        ParseTreeWalker.DEFAULT.walk(builder, tree)
示例#3
0
def parse_code(code, class_parser, class_lexer):
    """
    Parses a code and returns a tree.

    @param      code                code to parse
    @param      class_parser        parser
    @param      class_lexer         lexer
    @return                         parsed code

    .. exref::
        :title: Check the syntax of a script PIG

        ::

            code = '''
            A = LOAD 'filename.txt' USING PigStorage('\t');
            STORE A INTO 'samefile.txt' ;
            '''

            clparser, cllexer = get_parser_lexer("Pig")
            parser = parse_code(code, clparser, cllexer)
            tree = parser.compilation_unit()
            st = get_tree_string(tree, parser, None)
            print(st)
    """
    if isinstance(code, str):
        # we assume it is a string
        code = InputStream(code)

    lexer = class_lexer(code)
    stream = CommonTokenStream(lexer)
    parser = class_parser(stream)
    return parser
    def test_boolean_to_integer(self):
        program = """
                         run away with [int(True),int(False)];

                        """

        self.assertEqual([1, 0], execute(InputStream(program), True))
    def test_range_start_and_end_are_zero(self):
        program = """
                    run away with range(0,0);
                """
        value = execute(InputStream(program), False)

        self.assertEqual([], value)
    def test_range_start_lower_than_end(self):
        program = """
            run away with range(1,0);
        """
        value = execute(InputStream(program), False)

        self.assertEqual([], value)
    def test_string_to_float(self):
        program = """
                                run away with [float(".32"),float("5-")];

                               """

        self.assertEqual([3.2, -5.0], execute(InputStream(program), True))
示例#8
0
def run_parser(quil):
    # type: (str) -> List[AbstractInstruction]
    """
    Run the ANTLR parser.

    :param str quil: a single or multiline Quil program
    :return: list of instructions that were parsed
    """
    # Step 1: Run the Lexer
    input_stream = InputStream(quil)
    lexer = QuilLexer(input_stream)
    stream = CommonTokenStream(lexer)

    # Step 2: Run the Parser
    parser = QuilParser(stream)
    parser.removeErrorListeners()
    parser.addErrorListener(CustomErrorListener())
    tree = parser.quil()

    # Step 3: Run the Listener
    pyquil_listener = PyQuilListener()
    walker = ParseTreeWalker()
    walker.walk(pyquil_listener, tree)

    return pyquil_listener.result
示例#9
0
    def test_antecedents_terms_have_correct_mf_values_with_more_then_one_term(self):
        fcl_text = """
        FUNCTION_BLOCK my_system
            FUZZIFY antecedent1
                TERM mf1 := (0, 1) (0.5, 0);
                TERM mf2 := (1, 0.3) (2, 0) (3, 1);
                TERM mf3 := (2, 0.4) (4, 1) (5, 1);
            END_FUZZIFY
        END_FUNCTION_BLOCK
        """
        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = ScikitFuzzyFclListener()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        antecedent = listener.antecedents.get('antecedent1').get('value')
        term = antecedent['mf1']
        expected_mf_value = np.asarray([1, 0, 0, 0, 0, 0, 0])
        np.testing.assert_array_equal(expected_mf_value, term.mf)

        term2 = antecedent['mf2']
        expected_mf_value = np.asarray([0, 0, 0.3, 0, 1, 0, 0])
        np.testing.assert_array_equal(expected_mf_value, term2.mf)

        term3 = antecedent['mf3']
        expected_mf_value = np.asarray([0, 0, 0, 0.4, 0.7, 1, 1])
        np.testing.assert_array_equal(expected_mf_value, term3.mf)
示例#10
0
def main():
    input_stream = InputStream(sys.stdin.read())
    lexer = Exemple2Lexer(input_stream)
    stream = CommonTokenStream(lexer)
    parser = Exemple2Parser(stream)
    parser.r()
    print("Finished")
示例#11
0
def parse_chord(label):
    """
    Parses a string chord label from a string form to ChorlLabel instance
    (containing a set of pitch classes, root, bass).

    Examples:
    ```
    from chord_labels import parse_chord

    chord = parse_chord("C:maj7")
    assert chord.tones == [0, 4, 7, 11]
    assert chord.tones_binary == [1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1]

    assert parse_chord("F#").root == 6

    assert parse_chord("C#/5").bass == 8
    ```
    """
    lexer = ChordLabelLexer(InputStream(label))
    stream = CommonTokenStream(lexer)
    parser = ChordLabelParser(stream)
    parser._listeners = [ChordErrorListener()]
    chordContext = parser.chord()
    walker = ParseTreeWalker()
    listener = ChordLabelReader()
    walker.walk(listener, chordContext)
    return listener.chord_label
示例#12
0
    def _runTest(self, text):
        input_stream = InputStream(text)
        parser = CUParser(input_stream, unittest.TestCase.id)
        cu = parser.parse()

        if len(cu.markers) > 0:
            print("Test Failed:")
            in_reader = StringIO(text)
            i = 1
            while True:
                line = in_reader.readline()
                if line == "":
                    break
                line = line[:-1]
                print("%3d: %s" % (i, line))
                i += 1

        self.assertEqual(len(cu.markers), 0, "Syntax Errors")

        # Now, run the linker...
        v = LinkVisitor([cu])
        v.link()

        # Run the link checker to ensure we didn't miss resolving any
        v = TestLinker.LinkCheckVisitor()
        cu.accept(v)
示例#13
0
文件: Sw.py 项目: RedKnite5/Switch
def comp(source, file=False):
	"""Parse the Switch source code and walk it, then return the python
	code"""

	output = bytearray("", "utf-8")

	namespace = {
		"->": "print_no_nl",
		":": "SwitchMap",
		"...": "SwitchList",
	}

	if file:
		lexer = switchLexer(FileStream(source))
	else:
		lexer = switchLexer(InputStream(source))

	stream = CommonTokenStream(lexer)
	parser = switchParser(stream)

	lexer.removeErrorListeners()
	lexer.addErrorListener(ExceptionListener())

	parser.removeErrorListeners()
	parser.addErrorListener(ExceptionListener())

	parse_tree = parser.switch_file()
	printer = SwitchPrintListener(output, namespace)
	walker = MyWalker()
	walker.walk(printer, parse_tree)

	return output
示例#14
0
    def test_rule_if_clause_condition_then_clause_with_x(self):
        fcl_text = """
        FUNCTION_BLOCK f_block
            RULEBLOCK rule1
                RULE first_rule : IF something AND otherthing THEN final IS final2 WITH 123;
            END_RULEBLOCK
        END_FUNCTION_BLOCK
        """

        class FclListenerRules(FclListener):
            def enterThen_clause(_self, ctx):
                conclusion = ctx.conclusion()
                subconclusion = conclusion.sub_conclusion()[0]
                final = subconclusion.ID()[0].getText()
                final2 = subconclusion.ID()[1].getText()
                self.assertEqual(final, 'final')
                self.assertEqual(final2, 'final2')

            def enterWith_x(_self, ctx):
                real = ctx.REAL().getText()

                self.assertEqual(real, '123')

        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = FclListenerRules()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
示例#15
0
    def test_rule_if_clause_condition_if_clause_with_and(self):
        fcl_text = """
        FUNCTION_BLOCK f_block
            RULEBLOCK rule1
                RULE first_rule : IF something AND otherthing THEN conclusion IS final;
            END_RULEBLOCK
        END_FUNCTION_BLOCK
        """

        class FclListenerRules(FclListener):
            def enterIf_clause(_self, ctx):
                condition = ctx.condition()
                something = condition.getChild(0).getText()
                operator = condition.getChild(1).getText()
                otherthing = condition.getChild(2).getText()
                self.assertEqual(something, 'something')
                self.assertEqual(operator, 'AND')
                self.assertEqual(otherthing, 'otherthing')

        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = FclListenerRules()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
示例#16
0
def autocomplete(current_input, query):
    """
    Use :class:`.JSONPathAutoCompleteListener`
    to parse the query and give auto-completion suggestions.

    :param current_input: the current query input
    :type current_input: str
    :param query: the query callback which can be used to get data.
    :type query: callback
    """
    input_stream = InputStream(current_input)
    lexer = JSONPathLexer(input_stream)
    lexer.removeErrorListeners()

    stream = CommonTokenStream(lexer)
    parser = JSONPathParser(stream)
    parser.removeErrorListeners()

    listener = JSONPathAutoCompleteListener(query)
    parser.addErrorListener(listener)  # listen syntax error
    parser.addParseListener(listener)  # listen incomplete field name

    # start parse
    parser.jsonpath()

    return listener.is_partial_complete, listener.prefix, listener.options
示例#17
0
    def parse(self, s: str) -> Wish:
        parser = wish_grammarParser(
            CommonTokenStream(wish_grammarLexer(InputStream(s))))

        parser._listeners = [WIshParseListener()]

        return self._visitor.visit(parser.start())
示例#18
0
    def test_antecedents_terms_have_correct_mf_values_using_singleton_and_piecewise(self):
        fcl_text = """
        FUNCTION_BLOCK my_system
            FUZZIFY antecedent1
                TERM mf1 := 4.0;
                TERM mf2 := (0, 0.2) (2, 0) (3, 1);
                TERM mf3 := 1.0;
            END_FUZZIFY
        END_FUNCTION_BLOCK
        """
        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = ScikitFuzzyFclListener()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        antecedent = listener.antecedents.get('antecedent1').get('value')
        term = antecedent['mf1']
        expected_mf_value = np.asarray([0, 0, 0, 0, 1])  # fx[0], fx[1], fx[2], fx[3], f[4]
        np.testing.assert_array_equal(expected_mf_value, term.mf)

        term = antecedent['mf2']
        expected_mf_value = np.asarray([0.2, 0.1, 0, 1, 0])  # fx[0], fx[1], fx[2], fx[3], f[4]
        np.testing.assert_array_equal(expected_mf_value, term.mf)

        term = antecedent['mf3']
        expected_mf_value = np.asarray([0, 1, 0, 0, 0])  # fx[0], fx[1], fx[2], fx[3], f[4]
        np.testing.assert_array_equal(expected_mf_value, term.mf)
示例#19
0
    def test_boolean_to_float(self):
        program = """
                         run away with [float(True),float(False)];

                        """

        self.assertEqual([1.0, 0.0], execute(InputStream(program), True))
示例#20
0
    def test_consequent_define_universe_override_range_defined_in_var_if_defined_in_consequent(self):
        fcl_text = """
        FUNCTION_BLOCK my_system
            VAR_output
                consequent1 : REAL (1 .. 9);
            END_VAR
            DEFUZZIFY consequent1
                RANGE := (0 .. 30);
            END_DEFUZZIFY
        END_FUNCTION_BLOCK
        """
        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = ScikitFuzzyFclListener()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        listener = ScikitFuzzyFclListener()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        consequents = listener.consequents
        expected_universe = np.asarray([0., 30.])
        self.assertIn('consequent1', consequents)
        self.assertEqual('consequent1', consequents.get('consequent1').get('value').label)
        np.testing.assert_array_equal(expected_universe, consequents.get('consequent1').get('value').universe)
示例#21
0
    def test_range(self):
        program = """
            run away with range(0,9);
        """
        value = execute(InputStream(program), False)

        self.assertEqual([0, 1, 2, 3, 4, 5, 6, 7, 8], value)
示例#22
0
def is_source_valid(source):
    input_file = InputStream(source)
    lexer = CPP14Lexer(input_file)
    stream = CommonTokenStream(lexer)
    parser = CPP14Parser(stream)
    parser.translationunit()
    return parser._syntaxErrors == 0
示例#23
0
    def test_range_negative_numbers(self):
        program = """
                  run away with range(-5,3);
              """
        value = execute(InputStream(program), False)

        self.assertEqual([-5, -4, -3, -2, -1, 0, 1, 2], value)
示例#24
0
def do_parse(infilename: str,
             jsonfilename: Optional[str],
             rdffilename: Optional[str],
             rdffmt: str,
             context: Optional[str] = None) -> bool:
    """
    Parse the jsg in infilename and save the results in outfilename
    :param infilename: name of the file containing the ShExC
    :param jsonfilename: target ShExJ equivalent
    :param rdffilename: target ShExR equivalent
    :param rdffmt: target RDF format
    :param context: @context to use for rdf generation. If None use what is in the file
    :return: true if success
    """

    inp = InputStream(load_shex_file(infilename))

    shexj = parse(inp)
    if shexj is not None:
        shexj[
            '@context'] = context if context else "http://www.w3.org/ns/shex.jsonld"
        if jsonfilename:
            with open(jsonfilename, 'w') as outfile:
                outfile.write(as_json(shexj))
        if rdffilename:
            g = Graph().parse(data=as_json(shexj, indent=None),
                              format="json-ld")
            g.serialize(open(rdffilename, "wb"), format=rdffmt)
        return True
    return False
示例#25
0
    def test_number_to_integer(self):
        program = """
                       run away with [int(3.001), int(4.0), int(5), int(6.9)];

                      """

        self.assertEqual([3, 4, 5, 6], execute(InputStream(program), True))
示例#26
0
def parse(input_: Union[str, InputStream],
          default_base: Optional[str] = None) -> Optional[Schema]:
    """
    Parse the text in infile and return the resulting schema
    :param input_: text or input stream to parse
    :param default_base: base URI for relative URI's in schema
    :return: ShExJ Schema object.  None if error.
    """

    # Step 1: Tokenize the input stream
    error_listener = ParseErrorListener()
    if not isinstance(input_, InputStream):
        input_ = InputStream(input_)
    lexer = ShExDocLexer(input_)
    lexer.addErrorListener(error_listener)
    tokens = CommonTokenStream(lexer)
    tokens.fill()
    if error_listener.n_errors:  # Lexer prints errors directly
        return None

    # Step 2: Generate the parse tree
    parser = ShExDocParser(tokens)
    parser.addErrorListener(error_listener)
    parse_tree = parser.shExDoc()
    if error_listener.n_errors:
        print('\n'.join(error_listener.errors), file=sys.stderr)
        return None

    # Step 3: Transform the results the results
    parser = ShexDocParser(default_base=default_base)
    parser.visit(parse_tree)

    return parser.context.schema
示例#27
0
    def test_string_to_integer(self):
        program = """
                                run away with [int(".32"),int("5-")];

                               """

        self.assertEqual([3, -5], execute(InputStream(program), True))
示例#28
0
def make_parser(data):
    # type: (str) -> RelayParser
    """Construct a RelayParser a given data stream."""
    input_stream = InputStream(data)
    lexer = RelayLexer(input_stream)
    token_stream = CommonTokenStream(lexer)
    return RelayParser(token_stream)
示例#29
0
 def parse(self, expr: str) -> Value:
     input_stream = InputStream(expr)
     lexer = wizardLexer(input_stream)
     stream = CommonTokenStream(lexer)
     parser = wizardParser(stream)
     parser._errHandler = BailErrorStrategy()
     return self.visitExpr(parser.parseWizard().body().expr(0), self.state)
示例#30
0
    def _do_load(self, text: str, path: str, offset: int, in_memory: bool, imports: List['Model']) -> 'Model':
        content = InputStream(text)

        lexer = Lexer(content)
        lexer.removeErrorListeners()
        lexer.addErrorListener(LoggingErrorListener(path, offset))

        tokens = CommonTokenStream(lexer)

        parser = Parser(tokens)
        parser.removeErrorListeners()
        parser.addErrorListener(LoggingErrorListener(path, offset))

        tree = parser.grammarSpec()

        if parser.getNumberOfSyntaxErrors():
            return ModelImpl(path, offset, in_memory, True)

        model = ModelImpl(path, offset, in_memory, False)

        for im in imports or []:
            model.add_import(im)

        MetaLoader(model, self).visit(tree)
        LexerRuleLoader(model).visit(tree)
        ParserRuleLoader(model).visit(tree)

        return model