コード例 #1
0
    def getPythonCode(self):
        preprocessor = PML_Preprocessor(self.rootDir)
        preprocessor.process(self.pmlFilePath)

        inputStream = InputStream(preprocessor.getStream())
        lexer = pmlLexer(inputStream)
        stream = CommonTokenStream(lexer)
        parser = pmlParser(stream)

        parser.removeErrorListeners()
        exceptionListener = ParserExceptionListener()
        parser.addErrorListener(exceptionListener)

        try:
            tree = parser.styles()
        except ParserException as e:
            line, col, msg = e.errParams()
            localFile, localLine = preprocessor.trackDownLineNr(line)
            raise Exception(
                "Error in file {file} on line {line}, col {col}: {msg}".format(
                    file=localFile, line=localLine, col=col, msg=msg))

        translator = PythonListener()
        walker = ParseTreeWalker()
        walker.walk(translator, tree)

        return translator.getCode()
コード例 #2
0
    def __init__(self, parse_tree):
        self.__ast_root = None
        self.__walker = ParseTreeWalker()
        self.__parse_tree = parse_tree
        self.__node_stack = list()

        AbstractSyntaxTree.node_count = 0
コード例 #3
0
 def startInsert(self):
     self.insert_points = []
     self.insert_tokens = []
     walker = ParseTreeWalker()
     walker.walk(self, self.parser.compilationUnit())
     print(self.insert_points)
     self._insert()
コード例 #4
0
def parse_chord(label):
    """
    Parses a string chord label from a string form to ChorlLabel instance
    (containing a set of pitch classes, root, bass).

    Examples:
    ```
    from chord_labels import parse_chord

    chord = parse_chord("C:maj7")
    assert chord.tones == [0, 4, 7, 11]
    assert chord.tones_binary == [1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1]

    assert parse_chord("F#").root == 6

    assert parse_chord("C#/5").bass == 8
    ```
    """
    lexer = ChordLabelLexer(InputStream(label))
    stream = CommonTokenStream(lexer)
    parser = ChordLabelParser(stream)
    parser._listeners = [ChordErrorListener()]
    chordContext = parser.chord()
    walker = ParseTreeWalker()
    listener = ChordLabelReader()
    walker.walk(listener, chordContext)
    return listener.chord_label
コード例 #5
0
def print_prestosql_full_graph(tree, rule_names):

    walker = ParseTreeWalker()
    listener = PrestoSQLGraphListener(rule_names)
    walker.walk(listener, tree)

    return listener.graph
コード例 #6
0
def parse_chord(label):
    """
    Parses a string chord label from a string form to ChorlLabel instance
    (containing a set of pitch classes, root, bass).

    Examples:
    ```
    from chord_labels import parse_chord

    chord = parse_chord("C:maj7")
    assert chord.tones == [0, 4, 7, 11]
    assert chord.tones_binary == [1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1]

    assert parse_chord("F#").root == 6

    assert parse_chord("C#/5").bass == 8
    ```
    """
    lexer = ChordLabelLexer(InputStream(label))
    stream = CommonTokenStream(lexer)
    parser = ChordLabelParser(stream)
    parser._listeners = [ChordErrorListener()]
    chordContext = parser.chord()
    walker = ParseTreeWalker()
    listener = ChordLabelReader()
    walker.walk(listener, chordContext)
    return listener.chord_label
コード例 #7
0
def main():

    program = "a = 37 \n"
    program += "b = a+3 \n"

    program += "j = set[5 10 1] \n"
    program += "c = set[10 15 1] \n"
    program += "c \n"
    program += "d = c.belongs(19)"
    program += "e = c.sum"
    program += "f = c.prom"
    program += "g = c.long"
    program += "h = c.comp"
    #program +=  "i = j union c"
    #program +=  "m = c inter j"
    program += "k = c diff j"

    input = InputStream(program)
    lexer = ConjuntosLexer(input)
    stream = CommonTokenStream(lexer)
    parser = ConjuntosParser(stream)

    tree = parser.program()

    listener = RealListener()
    walker = ParseTreeWalker()
    walker.walk(listener, tree)

    print(listener.variables)
コード例 #8
0
ファイル: PyQuilListener.py プロジェクト: tsatir/pyquil
def run_parser(quil):
    # type: (str) -> List[AbstractInstruction]
    """
    Run the ANTLR parser.

    :param str quil: a single or multiline Quil program
    :return: list of instructions that were parsed
    """
    # Step 1: Run the Lexer
    input_stream = InputStream(quil)
    lexer = QuilLexer(input_stream)
    stream = CommonTokenStream(lexer)

    # Step 2: Run the Parser
    parser = QuilParser(stream)
    parser.removeErrorListeners()
    parser.addErrorListener(CustomErrorListener())
    tree = parser.quil()

    # Step 3: Run the Listener
    pyquil_listener = PyQuilListener()
    walker = ParseTreeWalker()
    walker.walk(pyquil_listener, tree)

    return pyquil_listener.result
コード例 #9
0
ファイル: PML2PythonTranslator.py プロジェクト: vvoovv/pml
def main(argv):
    # don't know if this works for all OS
    input_stream = FileStream(argv[1])
    lexer = pmlLexer(input_stream)
    stream = CommonTokenStream(lexer)
    parser = pmlParser(stream)

    parser.removeErrorListeners()
    exceptionListener = ParserExceptionListener()
    parser.addErrorListener(exceptionListener)

    # error management
    hadSyntaxErrors = False
    try:
        tree = parser.styles()
    except Exception as e:
        errorText = str(e)
        hadSyntaxErrors = True

    if not hadSyntaxErrors:
        translator = PythonListener()
        walker = ParseTreeWalker()
        walker.walk(translator, tree)
        sys.stdout.write(translator.getCode())
    else:
        sys.stdout.write(errorText)
コード例 #10
0
    def test_antecedents_terms_have_correct_mf_values_using_singleton_and_piecewise(self):
        fcl_text = """
        FUNCTION_BLOCK my_system
            FUZZIFY antecedent1
                TERM mf1 := 4.0;
                TERM mf2 := (0, 0.2) (2, 0) (3, 1);
                TERM mf3 := 1.0;
            END_FUZZIFY
        END_FUNCTION_BLOCK
        """
        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = ScikitFuzzyFclListener()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        antecedent = listener.antecedents.get('antecedent1').get('value')
        term = antecedent['mf1']
        expected_mf_value = np.asarray([0, 0, 0, 0, 1])  # fx[0], fx[1], fx[2], fx[3], f[4]
        np.testing.assert_array_equal(expected_mf_value, term.mf)

        term = antecedent['mf2']
        expected_mf_value = np.asarray([0.2, 0.1, 0, 1, 0])  # fx[0], fx[1], fx[2], fx[3], f[4]
        np.testing.assert_array_equal(expected_mf_value, term.mf)

        term = antecedent['mf3']
        expected_mf_value = np.asarray([0, 1, 0, 0, 0])  # fx[0], fx[1], fx[2], fx[3], f[4]
        np.testing.assert_array_equal(expected_mf_value, term.mf)
コード例 #11
0
    def test_rule_if_clause_condition_if_clause_with_and(self):
        fcl_text = """
        FUNCTION_BLOCK f_block
            RULEBLOCK rule1
                RULE first_rule : IF something AND otherthing THEN conclusion IS final;
            END_RULEBLOCK
        END_FUNCTION_BLOCK
        """

        class FclListenerRules(FclListener):
            def enterIf_clause(_self, ctx):
                condition = ctx.condition()
                something = condition.getChild(0).getText()
                operator = condition.getChild(1).getText()
                otherthing = condition.getChild(2).getText()
                self.assertEqual(something, 'something')
                self.assertEqual(operator, 'AND')
                self.assertEqual(otherthing, 'otherthing')

        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = FclListenerRules()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
コード例 #12
0
    def execute(self, input_source):
        parser = JavaParser(CommonTokenStream(JavaLexer(FileStream(input_source, encoding="utf-8"))))
        walker = ParseTreeWalker()
        walker.walk(self.listener, parser.compilationUnit())
        # print(self.listener.called_methods)
        # print(self.listener.methods)
        # print(self.listener.calsses)
        print(self.listener.calledMethodToMethod)
        for key in self.listener.calledMethodToMethod:
            print(key)
        
        for value in self.listener.calledMethodToMethod.values():
            print(value)

        save_row = {}

        with open("a.csv",'w') as f:
            fieldnames = ['called method', 'method']
            writer = csv.DictWriter(f, fieldnames=fieldnames, delimiter=",",quotechar='"')
            writer.writeheader()

            for calledMethod in self.listener.calledMethodToMethod.keys():
                writer.writerow({'called method': calledMethod, 'method': self.listener.calledMethodToMethod[calledMethod] })
                print(calledMethod)
                print(self.listener.calledMethodToMethod[calledMethod])
コード例 #13
0
    def test_rule_if_clause_condition_then_clause_with_x(self):
        fcl_text = """
        FUNCTION_BLOCK f_block
            RULEBLOCK rule1
                RULE first_rule : IF something AND otherthing THEN final IS final2 WITH 123;
            END_RULEBLOCK
        END_FUNCTION_BLOCK
        """

        class FclListenerRules(FclListener):
            def enterThen_clause(_self, ctx):
                conclusion = ctx.conclusion()
                subconclusion = conclusion.sub_conclusion()[0]
                final = subconclusion.ID()[0].getText()
                final2 = subconclusion.ID()[1].getText()
                self.assertEqual(final, 'final')
                self.assertEqual(final2, 'final2')

            def enterWith_x(_self, ctx):
                real = ctx.REAL().getText()

                self.assertEqual(real, '123')

        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = FclListenerRules()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
コード例 #14
0
ファイル: dsl.py プロジェクト: balrok/netaddition
def get_na_rules_from_file(file: str) -> List[Addition]:
    cl = MyListener()
    tree = getTree(file)
    walker = ParseTreeWalker()
    walker.walk(cl, tree)
    resolve_pn(cl.rules)
    return cl.rules
コード例 #15
0
    def test_var_input_and_output(self):
        fcl_text = """
        FUNCTION_BLOCK f_block
            VAR_INPUT
                input_id1 : REAL;
            END_VAR
            VAR_OUTPUT
                output_id1 : REAL;
            END_VAR
            VAR_INPUT
                input_id2 : REAL;
            END_VAR
        END_FUNCTION_BLOCK
        """
        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()
        listener = FclListenerTester()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)

        self.assertEqual(['output_id1', 'REAL'], listener.outputs[0])
        self.assertEqual(['input_id1', 'REAL'], listener.inputs[0])
        self.assertEqual(['input_id2', 'REAL'], listener.inputs[1])
コード例 #16
0
ファイル: primitive.py プロジェクト: ryu-bu/pyLFR-1
    def get_default_netlist(self, cn_id: str,
                            name_gen: NameGenerator) -> MINTDevice:
        if self.type is not PrimitiveType.NETLIST:
            raise Exception(
                "Cannot execute this method for this kind of a  primitive")

        default_mint_file = parameters.LIB_DIR.joinpath(
            self._default_netlist).resolve()

        if not path.exists(default_mint_file):
            raise Exception("Default netlist file does not exist")

        finput = FileStream(default_mint_file)

        lexer = mintLexer(finput)

        stream = CommonTokenStream(lexer)

        parser = mintParser(stream)

        tree = parser.netlist()

        walker = ParseTreeWalker()

        listener = MINTCompiler()

        walker.walk(listener, tree)

        device = listener.current_device

        name_gen.rename_netlist(cn_id, device)
        # Return the default netlist
        return device
コード例 #17
0
 def execute(self, input_source):
     parser = Java8Parser(
         CommonTokenStream(
             Java8Lexer(FileStream(input_source, encoding="utf-8"))))
     walker = ParseTreeWalker()
     walker.walk(self.listener, parser.compilationUnit())
     return self.listener.ast_info
コード例 #18
0
 def parse_java_statement(self, statement):
     parser = ECleverParser(text=statement)
     tree = parser.java_statement()
     builder = EPromptoBuilder(parser)
     walker = ParseTreeWalker()
     walker.walk(builder, tree)
     return builder.getNodeValue(tree)
コード例 #19
0
    def test_antecedents_terms_have_correct_mf_values_with_more_then_one_term(self):
        fcl_text = """
        FUNCTION_BLOCK my_system
            FUZZIFY antecedent1
                TERM mf1 := (0, 1) (0.5, 0);
                TERM mf2 := (1, 0.3) (2, 0) (3, 1);
                TERM mf3 := (2, 0.4) (4, 1) (5, 1);
            END_FUZZIFY
        END_FUNCTION_BLOCK
        """
        lexer = FclLexer(InputStream(fcl_text))
        stream = CommonTokenStream(lexer)
        parser = FclParser(stream)
        tree = parser.main()

        listener = ScikitFuzzyFclListener()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        antecedent = listener.antecedents.get('antecedent1').get('value')
        term = antecedent['mf1']
        expected_mf_value = np.asarray([1, 0, 0, 0, 0, 0, 0])
        np.testing.assert_array_equal(expected_mf_value, term.mf)

        term2 = antecedent['mf2']
        expected_mf_value = np.asarray([0, 0, 0.3, 0, 1, 0, 0])
        np.testing.assert_array_equal(expected_mf_value, term2.mf)

        term3 = antecedent['mf3']
        expected_mf_value = np.asarray([0, 0, 0, 0.4, 0.7, 1, 1])
        np.testing.assert_array_equal(expected_mf_value, term3.mf)
コード例 #20
0
def print_tokens(filename, use_cpp):
    fs = FileStream(filename)
    sa_modelica.USE_CPP_IMPLEMENTATION = use_cpp
    tree = sa_modelica.parse(fs, 'stored_definition')
    printer = HelloPrintListener()
    walker = ParseTreeWalker()
    walker.walk(printer, tree)
コード例 #21
0
ファイル: Sample4.py プロジェクト: m-nakagawa/moyodemo
def main(argv):
    logfmt = ('[%(levelname)s]\t%(name)s:%(threadName)-10s' +
              '(%(asctime)s.%(msecs)d) ' +
              '%(filename)s:%(lineno)d:%(message)s')
    datefmt = '%H:%M:%S'
    logging.basicConfig(level=logging.INFO, format=logfmt, datefmt=datefmt)

    #input = FileStream(argv[1])
    gname = argv[1]
    inputstream = FileStream(gname, encoding='utf-8')
    lexer = SparqlLexer(inputstream)
    stream = CommonTokenStream(lexer)
    parser = SparqlParser(stream)
    #tree = parser.StartRule()
    tree = parser.query()
    fmind = Fmind(gname)
    fnode = fmind.make_right(u"root")
    toFmind(fnode, tree)
    fmind.unfold_all()
    fmind.dump_to_file("l2.mm")

    #tree = parser.prologue()

    morpher = MorpherContext2()
    listener = MySparqlParserListener(morpher)
    walker = ParseTreeWalker()
    walker.walk(listener, tree)

    logging.info("Output:%s", sys.argv[1])
    print "# ", sys.argv[1]
    print morpher.get_result()
コード例 #22
0
def main():
    program = "a = set[0 5] \n"
    program += "b = set[2 8 2] \n"

    program += "c = a.belongs(2) \n"
    program += "d = a.elementSum() \n"
    program += "e = a.elementAvg() \n"
    program += "f = a.length() \n"
    program += "g = a.intersection(b) \n"
    program += "h = a.union(b) \n"
    program += "i = a.difference(b) \n"
    program += "j = a.complement(b) \n"

    input = InputStream(program)
    lexer = TpConjuntosLexer(input)
    stream = CommonTokenStream(lexer)
    parser = TpConjuntosParser(stream)

    tree = parser.program()

    listener = RealListener()
    walker = ParseTreeWalker()
    walker.walk(listener, tree)

    print(listener.variables)
コード例 #23
0
 def execute(self, input_source):
     parser = JavaParser(CommonTokenStream(JavaLexer(FileStream(input_source, encoding="utf-8"))))
     walker = ParseTreeWalker()
     walker.walk(self.listener, parser.compilationUnit())
     # self.logger.debug('Display all data extracted by AST. \n' + pformat(self.listener.ast_info, width=160))
     # print(self.listener.call_methods)
     # print(self.listener.ast_info['methods'])
     return self.listener.ast_info
コード例 #24
0
 def execute(self, input_source):
     parser = MySqlParser(
         CommonTokenStream(MySqlLexer(InputStream(input_source))))
     walker = ParseTreeWalker()
     walker.walk(self.listener, parser.root())
     self.logger.debug('Display all data extracted by AST. \n' +
                       pformat(self.listener.ast_info, width=160))
     return self.listener.ast_info
コード例 #25
0
 def parseExpression(self, exp):
     parser = ECleverParser(text=exp)
     parser._input.tokenSource.addLF = False
     tree = parser.expression()
     builder = EPromptoBuilder(parser)
     walker = ParseTreeWalker()
     walker.walk(builder, tree)
     return builder.getNodeValue(tree)
コード例 #26
0
ファイル: solve.py プロジェクト: romangraef/aoc2018
def parse_tree():
    lexer = GrammarLexer(InputStream(txt))
    stream = CommonTokenStream(lexer)
    parser = GrammarParser(stream)
    tree = parser.root()
    listener = GuardTimeListener()
    walker = ParseTreeWalker()
    walker.walk(listener, tree)
コード例 #27
0
ファイル: core.py プロジェクト: apleshakov/thespiae
def get_single_path_entry(data: str, raw: bool) -> str:
    walker = ParseTreeWalker()
    ls = _EntryListener()
    try:
        walker.walk(ls, _parser(data).single_entry())
    except ParseCancellationException:
        raise PathEntryError
    return ls.raw_entries[0] if raw else ls.processed_entries[0]
コード例 #28
0
ファイル: Reorder.py プロジェクト: uiuc-arc/Storm
 def getList(self, node, getDef=False):
     useAnalyzer = UseAnalyzer()
     walker = ParseTreeWalker()
     walker.walk(useAnalyzer, node)
     if getDef:
         return useAnalyzer.defined
     else:
         return useAnalyzer.use - useAnalyzer.loop - useAnalyzer.defined
コード例 #29
0
ファイル: translate.py プロジェクト: orat/ParseMATLAB
def translate(tree=None, string=None):
    if tree == None:
        tree = parse(string)

    # Actually do the walking
    evaluator = TranslateListener()
    walker = ParseTreeWalker()
    walker.walk(evaluator, tree)
コード例 #30
0
def parse_sol(srcpath, relsrcpath):
    src = FileStream(srcpath, encoding='utf8')
    lexer = SolidityLexer(src)
    stream = CommonTokenStream(lexer)
    parser = SolidityParser(stream)
    tree = parser.sourceUnit()
    recorder = DefinitionsRecorder(relsrcpath)
    walker = ParseTreeWalker()
    walker.walk(recorder, tree)
コード例 #31
0
    def execute(self, input_source):
        parser = JavaParser(
            CommonTokenStream(
                JavaLexer(FileStream(input_source, encoding="utf-8"))))
        walker = ParseTreeWalker()
        walker.walk(self.listener, parser.compilationUnit())

        for method in self.listener.methods:
            print(method)
コード例 #32
0
    def test_format_single_line(self):
        input_file_stream = FileStream(self._tnsnames_file)

        lexer = tnsnamesLexer(input_file_stream)
        stream = CommonTokenStream(lexer)
        parser = tnsnamesParser(stream)
        tree = parser.tnsnames()

        listener = TnsnameLineFormatter()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        assert len(listener.get_lines) == 6
コード例 #33
0
ファイル: aliases.py プロジェクト: haticepublic/tnsmaster
def main(argv):
    input_file_stream = FileStream(argv[1])
    lexer = tnsnamesLexer(input_file_stream)
    stream = CommonTokenStream(lexer)
    parser = tnsnamesParser(stream)
    tree = parser.tnsnames()

    listener = AliasFinder()
    walker = ParseTreeWalker()
    walker.walk(listener, tree)
    for alias in listener.get_aliases:
        print(alias)
コード例 #34
0
ファイル: test_tnsFormatter.py プロジェクト: difu/tnsmaster
    def test_format_orastyle(self):
        input_file_stream = FileStream(self._tnsnames_file)

        lexer = tnsnamesLexer(input_file_stream)
        stream = CommonTokenStream(lexer)
        parser = TnsNamesParserWithException(stream)
        tree = parser.tnsnames()

        listener = TnsnameOraStyleFormatter()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        assert len(listener.get_lines) == 51
コード例 #35
0
def main():
    in_str = "a=1\na\nclear\na\na=3\na\nb=2+a\nb+a\n"

    chars = InputStream(in_str)
    lexer = ExprLexer(chars)
    tokens = CommonTokenStream(lexer)
    parser = ExprParser(tokens)
    tree = parser.prog()

    # Actually do the walking
    evaluator = EvalListener()
    walker = ParseTreeWalker()
    walker.walk(evaluator, tree)
コード例 #36
0
ファイル: __init__.py プロジェクト: ssaamm/worklog.md
def get_day_stats(fname):
    input_file = FileStream(fname)
    lexer = WorklogLexer(input_file)
    stream = CommonTokenStream(lexer)
    parser = WorklogParser(stream)
    tree = parser.wl()

    stats_walker = DayStatsWalker()
    walker = ParseTreeWalker()
    walker.walk(stats_walker, tree)
    stats_walker._save_current_stats()

    return stats_walker.stats
コード例 #37
0
ファイル: main.py プロジェクト: neodyme60/raypy
def load_from_file(filename):
    from loader.pbrt_loader import PbrtLoader
    from loader.pbrt.pbrtLexer import pbrtLexer
    from loader.pbrt.pbrtParser import pbrtParser

    input = FileStream(filename)
    lexer = pbrtLexer(input)
    tokens = CommonTokenStream(lexer)
    loader = pbrtParser(tokens)
    tree = loader.body()
    printer = PbrtLoader()
    walker = ParseTreeWalker()
    walker.walk(printer, tree)
コード例 #38
0
    def test_get_aliases(self):
        input_file_stream = FileStream(self._tnsnames_file)

        lexer = tnsnamesLexer(input_file_stream)
        stream = CommonTokenStream(lexer)
        parser = tnsnamesParser(stream)
        tree = parser.tnsnames()

        listener = AliasFinder()
        walker = ParseTreeWalker()
        walker.walk(listener, tree)
        expected_aliases = ['LSNR_FRED', 'LSNR_WILMA', 'lsnr_barney', 'alias_1', 'alias_2.world',
                            'alias3.dunbar-it.co.uk', 'someother_alias', 'someother_alias2']
        self.assertListEqual(listener.get_aliases, expected_aliases)
コード例 #39
0
ファイル: evaluate.py プロジェクト: andrewhead/StackSkim
    def count_literals(self, pattern):

        self.atom_count = 0
        self.literal_count = 0

        walker = ParseTreeWalker()
        input_ = InputStream(pattern)
        lexer = PCRELexer(input_)
        stream = CommonTokenStream(lexer)
        parser = PCREParser(stream)
        tree = parser.parse()

        walker.walk(self, tree)
        return self.literal_count, self.atom_count
コード例 #40
0
ファイル: csharp_parser.py プロジェクト: sdpython/pyensae
    def parse(self, code, source=None):
        """
        Returns all elements of codes inside a string.

        @param      code        string
        @param      source      source
        @return                 list of @see cl CSharpElement
        """
        self._source = source
        parser = parse_code(code, self._parser, self._lexer)
        tree = parser.parse()
        walker = ParseTreeWalker()
        listen = CSharpParserListenerSignatures(parser, source)
        walker.walk(listen, tree)
        return listen._elements
コード例 #41
0
ファイル: antlr_grammar_use.py プロジェクト: sdpython/pyensae
def get_tree_graph(tree, parser, format=TreeGraphListener):
    """
    Returns a graph with :epkg:`networkx`.

    @param      tree        from @see fn parse_code
    @param      parser      the parser used to build the tree, output of @see fn parse_code
    @param      format      None or a class `ParseTreeListener <https://github.com/antlr/antlr4-python3/blob/master/src/antlr4/tree/Tree.py>`_
    @return                 string
    """
    if format is None:
        raise TypeError("format cannot be None")
    walker = ParseTreeWalker()
    listen = format(parser)
    walker.walk(listen, tree)
    return listen
コード例 #42
0
ファイル: antlr_grammar_use.py プロジェクト: sdpython/pyensae
def get_tree_string(tree, parser, format=TreeStringListener):
    """
    Returns a string which shows the parsed tree.

    @param      tree        from @see fn parse_code
    @param      parser      the parser used to build the tree, output of @see fn parse_code
    @param      format      None or a class `ParseTreeListener <https://github.com/antlr/antlr4-python3/blob/master/src/antlr4/tree/Tree.py>`_
    @return                 string
    """
    if format is None:
        return tree.toStringTree()
    else:
        walker = ParseTreeWalker()
        listen = format(parser)
        walker.walk(listen, tree)
        return str(listen)
コード例 #43
0
ファイル: ltl_parser.py プロジェクト: andrewhead/LTLTrans
    def to_dict(self, formula):

        self.propositions = []
        self.obj = {}
        self.results = {}

        input = InputStream(formula)
        lexer = GenLtlLexer(input)
        stream = CommonTokenStream(lexer)
        parser = GenLtlParser(stream)
        tree = parser.exp()
        walker = ParseTreeWalker()
        walker.walk(self, tree)

        return {
            'formula': self.obj,
            'propositions': dict(
                [(self.get_prop_index(p), p) for p in self.propositions]
            )
        }
コード例 #44
0
ファイル: formatter.py プロジェクト: haticepublic/tnsmaster
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("tnsnamesFile", type=str, help="the filename of the tnsnames file to be formatted")
    parser.add_argument("-f", "--format", type=str, help="format to be applied",
                        default=Format.oracle.name)
    args = parser.parse_args()
    if args.format == Format.oracle.name:
        _listener = TnsnameOraStyleFormatter()
    else:
        _listener = TnsnameLineFormatter()

    input_file_stream = FileStream(args.tnsnamesFile)
    lexer = tnsnamesLexer(input_file_stream)
    stream = CommonTokenStream(lexer)
    parser = tnsnamesParser(stream)
    tree = parser.tnsnames()

    walker = ParseTreeWalker()
    walker.walk(_listener, tree)

    for line in _listener.get_lines:
        print(line)
コード例 #45
0
ファイル: command_line.py プロジェクト: tsudmi/json-database
def main(db, f, c):
    """Command line tool for JSON database manipulation."""

    if f is not None:
        if not isfile(f):
            echo(style('Specified file path does not exist.', fg='red'), err=True)
            return
        else:
            stream = FileStream(f)
    elif c is not None:
        stream = InputStream(c)
    else:
        c = prompt('Command')
        stream = InputStream(c)

    lexer = DatabaseLexer(stream)
    tokens = CommonTokenStream(lexer)
    parser = DatabaseParser(tokens)
    tree = parser.commands()

    command_parser = CommandParser(db)
    walker = ParseTreeWalker()
    walker.walk(command_parser, tree)
    command_parser.api.write_data()
コード例 #46
0
ファイル: preds.py プロジェクト: egrachev/preds_lang
        if isinstance(ctx.INT(), list):
            i = ctx.INT()[0].getText()

        op = ctx.OP().getText()

        self.current_list.append(
            v+op+i
        )

    def enterPredicate_list(self, ctx: predsParser.Predicate_listContext):
        self.current_list = []

    def exitPredicate_list(self, ctx: predsParser.Predicate_listContext):
        self.predicates.append(self.current_list)
        self.current_list = []


code = open('preds.txt').read()
stream = InputStream(code)
lexer = predsLexer(stream)
tokens = CommonTokenStream(lexer)
parser = predsParser(tokens)
tree = parser.if_list()

walker = ParseTreeWalker()
listener = Listener()
walker.walk(listener, tree)


コード例 #47
0
ファイル: rconverter.py プロジェクト: sdpython/pyensae
def r2python(code: str, pep8=False, fLOG=None) -> str:
    """
    Converts a R script into Python.

    @param      code        R string
    @param      pep8        modify the output to be compliant with pep8
    @param      fLOG        logging function
    @return                 Python string

    .. _code-r2python:

    The function uses a customized R grammar implemented with Antlr4.
    Formula becomes strings. They should be handled with
    `patsy <http://patsy.readthedocs.io/en/latest/>`_.

    .. exref::
        :title: Convert R into Python

        .. runpython::
            :showcode:

            rscript = '''
                nb=function(y=1930){
                debut=1816
                MatDFemale=matrix(D$Female,nrow=111)
                colnames(MatDFemale)=(debut+0):198
                cly=(y-debut+1):111
                deces=diag(MatDFemale[:,cly[cly%in%1:199]])
                return(c(B$Female[B$Year==y],deces))}
                '''

            from pyensae.languages import r2python
            print(r2python(rscript, pep8=True))

    Some patterns are not well migrated such expression ``a:b`` into ``range(a,b)``.
    The grammar could be improved to detect the beginning of the expression but
    for now, if the function fails to do the conversion, ``a:b`` must be written
    into ``(a):b``. The same trick is sometimes needed for other patterns
    such as the operator ``%in%`` which is converted into an intersection
    of two sets.

    Kwonws bugs:

    * ``} else {`` needs to be replaced by ``} EOL else {``
    * comment added at the end of line must be followed by an empty line
    * ``m[,1]`` must be replaced by ``M[:,1]``
    * formula ``~.`` is not translated
    * ``%<%`` cannot be followed by an empty line
    """
    if fLOG:
        fLOG("[r2python] parse ", len(code), "bytes")
    parser = parse_code(code, RParser, RLexer)
    if fLOG:
        fLOG("[r2python] parse continue")
    parsed = parser.parse()
    if fLOG:
        fLOG("[r2python] listen")
    listen = TreeStringListener(parsed, fLOG=fLOG)
    walker = ParseTreeWalker()
    if fLOG:
        fLOG("[r2python] walk")
    walker.walk(listen, parsed)
    if fLOG:
        fLOG("[r2python] get code")
    code = listen.get_python()
    if pep8:
        if fLOG:
            fLOG("[r2python] pep8")
        code = remove_extra_spaces_and_pep8(code, aggressive=True)
    return code
コード例 #48
0
ファイル: util.py プロジェクト: andrewhead/tutorons-server
def walk_tree(tree, tree_listener):
    walker = ParseTreeWalker()
    walker.walk(tree_listener, tree)
    return walker
コード例 #49
0
ファイル: formatter.py プロジェクト: erget/tnsmaster
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("tnsnamesFile", type=str, help="the filename of the tnsnames file to be formatted")
    parser.add_argument("-f", "--format", choices=[Format.oneLine.name, Format.oracle.name],
                        help="format to be applied",
                        default=Format.oracle.name)
    parser.add_argument("--lowerkeys", action='store_true',
                        help="lowercase keys (case handling for keys must be activated!)",
                        default=False)
    parser.add_argument("--lowervalues", action='store_true',
                        help="lowercase keys (case handling for values must be activated!)",
                        default=False)
    parser.add_argument("--handlekeycase", action='store_true', help="activate case handling for keys",
                        default=False)
    parser.add_argument("--handlevaluecase", action='store_true', help="activate case handling for values",
                        default=False)

    args = parser.parse_args()

    listener_ora_style = TnsnameOraStyleFormatter()
    listener_ora_style.set_uppercase_keywords(not args.lowerkeys)
    listener_ora_style.set_uppercase_value(not args.lowervalues)
    listener_ora_style.set_ignore_keyword_case(not args.handlekeycase)
    listener_ora_style.set_ignore_value_case(not args.handlevaluecase)

    try:
        input_file_stream = FileStream(args.tnsnamesFile)
    except FileNotFoundError:
        print(args.tnsnamesFile + " not found!")
        exit(1)

    lexer = tnsnamesLexer(input_file_stream)
    ora_stream = CommonTokenStream(lexer)
    tns_parser = TnsNamesParserWithException(ora_stream)
    try:
        tree = tns_parser.tnsnames()
    except Exception as ex:
        print("Error while parsing: " + ex.__str__())
        exit(1)

    walker = ParseTreeWalker()
    walker.walk(listener_ora_style, tree)

    buf = StringIO()

    if args.format == Format.oracle.name:
        for line in listener_ora_style.get_lines:
            print(line)
        exit(0)

    for line in listener_ora_style.get_lines:
        buf.write(line)

    listener_line_style = TnsnameLineFormatter()
    input_text_stream = InputStream(buf.getvalue())
    line_lexer = tnsnamesLexer(input_text_stream)
    line_stream = CommonTokenStream(line_lexer)
    line_parser = TnsNamesParserWithException(line_stream)
    try:
        tree = line_parser.tnsnames()
    except Exception as ex:
        print("Error while parsing: " + ex.__str__())
        exit(1)

    walker.walk(listener_line_style, tree)

    for line2 in listener_line_style.get_lines:
        print(line2)