Пример #1
0
def main():
    import pprint
    import json
    from grako import parse
    from grako.util import asjson

    ast = parse(GRAMMAR, 'file.py a.py testing.py test2 -hello True False test3 test4 --args 1 2')
    json_ast = asjson(ast)
    print(ast)
Пример #2
0
def main():
    import pprint
    import json
    from grako import parse
    from grako.util import asjson

    ast = parse(GRAMMAR, '3 + 5 * ( 10 - 20 )')
    print('PPRINT')
    pprint.pprint(ast, indent=2, width=20)
    print()

    json_ast = asjson(ast)
    print('JSON')
    print(json.dumps(json_ast, indent=2))
    print()
Пример #3
0
def main():
    import pprint
    import json
    from grako import parse
    from grako.util import asjson

    ast = parse(GRAMMAR, '3 + 5 * ( 10 - 20 )')
    print('PPRINT')
    pprint.pprint(ast, indent=2, width=20)
    print()

    json_ast = asjson(ast)
    print('JSON')
    print(json.dumps(json_ast, indent=2))
    print()
Пример #4
0
 def __json__(self):
     # preserve order
     return {
         asjson(k): asjson(v)
         for k, v in self.items() if not k.startswith('_')
     }
Пример #5
0
 def __json__(self):
     result = collections.OrderedDict(__class__=self.__class__.__name__, )
     result.update(self._pubdict())
     return asjson(result)
Пример #6
0
 def asjson(self):
     return asjson(self)
Пример #7
0
 def __json__(self):
     result = super(BasedSymbol, self).__json__()
     result['bases'] = asjson([b.qualname() for b in self.bases])
     return result
Пример #8
0
 def __json__(self):
     return odict([(name, asjson(symbols)) for name, symbols in self.entries.items()])
Пример #9
0
 def __json__(self):
     return odict([(name, asjson(symbols))
                   for name, symbols in self.entries.items()])
Пример #10
0
 def __json__(self):
     result = collections.OrderedDict(
         __class__=self.__class__.__name__,
     )
     result.update(self._pubdict())
     return asjson(result)
Пример #11
0
dir_path = os.path.dirname(os.path.realpath(__file__))

jsons = build_ast.get_json_files()

tokens, nodes, funcs = build_ast.group_json_by_keys(jsons)

ast = build_ast.build_ast(tokens, nodes)

#text = get_speech.get_speech()

request = 'match a single character of a b c end of match'

resp = parse(ast, request)

json_ast = asjson(resp)

print(json.dumps(json_ast, indent=2))

regex = '/'
for token in json_ast:
    key, value = list(token.items())[0]
    expr = funcs[key]
    regex += eval(expr,
                  {'__builtins__': {
                      'tokens': token[key][0],
                      'str': str
                  }})
regex += '/'
print(regex)
Пример #12
0
 def __json__(self):
     return {
         asjson(k): asjson(v)
         for k, v in self.items() if not k.startswith('_')
     }
Пример #13
0
 def __json__(self):
     result = super(BasedSymbol, self).__json__()
     result['bases'] = asjson([b.qualname() for b in self.bases])
     return result
Пример #14
0
 def __json__(self):
     return odict([
         ('node', type(self.node).__name__),
         ('entries', super(Symbol, self).__json__()),
         ('references', asjson(self._references)),
     ])
Пример #15
0
 def asjson(self):
     return asjson(self)
Пример #16
0
 def __json__(self):
     return odict([
         ('node', type(self.node).__name__),
         ('entries', super(Symbol, self).__json__()),
         ('references', asjson(self._references)),
     ])
Пример #17
0
        return ast

    def string(self, ast):
        return ast

    def NEWLINE(self, ast):
        return ast

    def CLASS(self, ast):
        return ast


def main(filename, startrule, **kwargs):
    with open(filename) as f:
        text = f.read()
    parser = thinkingprocessesParser()
    return parser.parse(text, startrule, filename=filename, **kwargs)


if __name__ == '__main__':
    import json
    from grako.util import asjson

    ast = generic_main(main, thinkingprocessesParser, name='thinkingprocesses')
    print('AST:')
    print(ast)
    print()
    print('JSON:')
    print(json.dumps(asjson(ast), indent=2))
    print()
Пример #18
0
    def test_bootstrap(self):
        print()

        if os.path.isfile('./tmp/00.ast'):
            shutil.rmtree('./tmp')
        if not os.path.isdir('./tmp'):
            os.mkdir('./tmp')
        print('-' * 20, 'phase 00 - parse using the bootstrap grammar')
        with open('grammar/grako.ebnf') as f:
            text = str(f.read())
        g = EBNFParser('EBNFBootstrap')
        grammar0 = g.parse(text)
        ast0 = json.dumps(asjson(grammar0), indent=2)
        with open('./tmp/00.ast', 'w') as f:
            f.write(ast0)

        print('-' * 20, 'phase 01 - parse with parser generator')
        with open('grammar/grako.ebnf') as f:
            text = str(f.read())
        g = GrammarGenerator('EBNFBootstrap')
        g.parse(text, trace=False)

        generated_grammar1 = str(g.ast['start'])
        with open('./tmp/01.ebnf', 'w') as f:
            f.write(generated_grammar1)

        print('-' * 20,
              'phase 02 - parse previous output with the parser generator')
        with open('./tmp/01.ebnf', 'r') as f:
            text = str(f.read())
        g = GrammarGenerator('EBNFBootstrap')
        g.parse(text, trace=False)
        generated_grammar2 = str(g.ast['start'])
        with open('./tmp/02.ebnf', 'w') as f:
            f.write(generated_grammar2)
        self.assertEqual(generated_grammar2, generated_grammar1)

        print('-' * 20, 'phase 03 - repeat')
        with open('./tmp/02.ebnf') as f:
            text = f.read()
        g = EBNFParser('EBNFBootstrap')
        ast3 = g.parse(text)
        with open('./tmp/03.ast', 'w') as f:
            f.write(json.dumps(asjson(ast3), indent=2))

        print('-' * 20, 'phase 04 - repeat')
        with open('./tmp/02.ebnf') as f:
            text = f.read()
        g = GrammarGenerator('EBNFBootstrap')
        g.parse(text)
        parser = g.ast['start']
        #    pprint(parser.first_sets, indent=2, depth=3)
        generated_grammar4 = str(parser)
        with open('./tmp/04.ebnf', 'w') as f:
            f.write(generated_grammar4)
        self.assertEqual(generated_grammar4, generated_grammar2)

        print('-' * 20, 'phase 05 - parse using the grammar model')
        with open('./tmp/04.ebnf') as f:
            text = f.read()
        ast5 = parser.parse(text)
        with open('./tmp/05.ast', 'w') as f:
            f.write(json.dumps(asjson(ast5), indent=2))

        print('-' * 20, 'phase 06 - generate parser code')
        gencode6 = codegen(parser)
        with open('./tmp/g06.py', 'w') as f:
            f.write(gencode6)

        print('-' * 20, 'phase 07 - import generated code')
        py_compile.compile('./tmp/g06.py', doraise=True)
        # g06 = __import__('g06')
        # GenParser = g06.EBNFBootstrapParser

        # print('-' * 20, 'phase 08 - compile using generated code')
        # parser = GenParser(trace=False)
        # result = parser.parse(
        #     text,
        #     'start',
        #     comments_re=COMMENTS_RE,
        #     eol_comments_re=EOL_COMMENTS_RE
        # )
        # self.assertEqual(result, parser.ast['start'])
        # ast8 = parser.ast['start']
        # json8 = json.dumps(asjson(ast8), indent=2)
        # open('./tmp/08.ast', 'w').write(json8)
        # self.assertEqual(ast5, ast8)

        print('-' * 20, 'phase 09 - Generate parser with semantics')
        with open('grammar/grako.ebnf') as f:
            text = f.read()
        parser = GrammarGenerator('EBNFBootstrap')
        g9 = parser.parse(text)
        generated_grammar9 = str(g9)
        with open('./tmp/09.ebnf', 'w') as f:
            f.write(generated_grammar9)
        self.assertEqual(generated_grammar9, generated_grammar1)

        print('-' * 20, 'phase 10 - Parse with a model using a semantics')
        g10 = g9.parse(text,
                       start_rule='start',
                       semantics=EBNFGrammarSemantics('EBNFBootstrap'))
        generated_grammar10 = str(g10)
        with open('./tmp/10.ebnf', 'w') as f:
            f.write(generated_grammar10)
        gencode10 = codegen(g10)
        with open('./tmp/g10.py', 'w') as f:
            f.write(gencode10)

        print('-' * 20, 'phase 11 - Pickle the model and try again.')
        with open('./tmp/11.grako', 'wb') as f:
            pickle.dump(g10, f, protocol=2)
        with open('./tmp/11.grako', 'rb') as f:
            g11 = pickle.load(f)
        r11 = g11.parse(text,
                        start_rule='start',
                        semantics=EBNFGrammarSemantics('EBNFBootstrap'))
        with open('./tmp/11.ebnf', 'w') as f:
            f.write(str(g11))
        gencode11 = codegen(r11)
        with open('./tmp/g11.py', 'w') as f:
            f.write(gencode11)

        print('-' * 20, 'phase 12 - Walker')

        class PrintNameWalker(DepthFirstWalker):
            def __init__(self):
                self.walked = []

            def walk_default(self, o, children):
                self.walked.append(o.__class__.__name__)

        v = PrintNameWalker()
        v.walk(g11)
        with open('./tmp/12.txt', 'w') as f:
            f.write('\n'.join(v.walked))

        print('-' * 20, 'phase 13 - Graphics')
        try:
            from grako.diagrams import draw
        except ImportError:
            print('PyGraphViz not found!')
        else:
            draw('./tmp/13.png', g11)