예제 #1
0
파일: test_nfa.py 프로젝트: EvelynHf/basil
 def test_simple_compose (self):
     testpath = os.path.split(__file__)[0]
     gobj1 = PyPgen.PyPgen().handleStart(
         PgenParser.parseFile(os.path.join(testpath, 'test.pgen')))
     gobj2 = PyPgen.PyPgen().handleStart(
         PgenParser.parseFile(os.path.join(testpath, 'ext.pgen')))
     gobj3 = nfa.compose_nfas(gobj1, gobj2)
     self.assertTrue(gobj3)
     self.assertEqual(len(gobj3[0]), len(gobj1[0]))
     self.assertNotEqual(gobj3[0], gobj1[0])
예제 #2
0
def main (fileName = None):
    """main
    Read a pgen input grammar from the given file name.  If no file name is
    given, read the grammar from stdin.  Print a XML document for the
    resulting grammar model translation.
    """
    if None == fileName:
        parseTree = PgenParser.parseString(sys.stdin.read())
    else:
        parseTree = PgenParser.parseFile(fileName)
    grammarFactoryClass = BasilGrammarModel.getModelFactory()
    grammarFactory = grammarFactoryClass()
    internalizer = PgenInternalizer(grammarFactory)
    model = internalizer(parseTree)
    print grammarFactory.externalizeXML(model)
예제 #3
0
파일: PyPgen.py 프로젝트: EvelynHf/basil
def main ():
    import sys, PgenParser, pprint
    # ____________________________________________________________
    # Generate a test parser
    if len(sys.argv) > 1:
        grammarFile = sys.argv[1]
    else:
        grammarFile = "tests/test.pgen"
    grammarST = PgenParser.parseFile(grammarFile)
    parser = buildParser(grammarST)
    pprint.pprint(parser.toTuple())
    # ____________________________________________________________
    # Parse some input
    if len(sys.argv) > 2:
        inputFile = sys.argv[2]
        fileObj = open(inputFile)
    else:
        inputFile = "<stdin>"
        fileObj = sys.stdin
    tokenizer = StdTokenizer.StdTokenizer.tokenize(fileObj)
    parser.setStart(257)
    parseTree = parser.parseTokens(tokenizer)
    fileObj.close()
    # ____________________________________________________________
    # Show the result
    from basil.visuals.TreeBox import showTree
    showTree(parseTree).mainloop()
예제 #4
0
파일: nfa.py 프로젝트: EvelynHf/basil
def main (*args):
    """Main routine for the basil.parsing.nfa module.

    Composes one or more grammars (represented as a collections of
    pgen non-deterministic finite state automata, or NFA's) into a single
    NFA.  By default, main() pretty-prints the resulting NFA to stdout.

    Flags:
    -n <symbol>    Output a Graphviz DOT file for the NFA that recognizes the
                   given symbol (can specify more than one).
    -o <filename>  Output the pretty-printed NFA collection to the given file.
    -q             Quiet. Suppress output of the resulting NFA.  Overrides -o.

    Non-flag arguments should be the file names of a pgen input files
    that will be composed into a single NFA collection.
    """
    import getopt
    import pprint
    from basil.parsing import PgenParser, PyPgen
    nfas_to_output = []
    output_file = None
    quiet = False
    opts, args = getopt.getopt(args, "n:o:q")
    for (opt_flag, opt_arg) in opts:
        if opt_flag == "-q":
            quiet = True
        elif opt_flag == "-o":
            output_file = opt_arg
        elif opt_flag == "-n":
            nfas_to_output.append(opt_arg)
    if len(args) < 1:
        print "Usage: nfa.py [flags] <pgenfile1> [<pgenfile2> ...]"
        return
    index = 0
    nfas_tups = []
    out_nfas = [[],[]]
    for arg in args:
        grammar_st = PgenParser.parseFile(arg)
        crnt_nfas = PyPgen.PyPgen().handleStart(grammar_st)
        nfas_tups.append((crnt_nfas, "g%d_" % index))
        out_nfas = compose_nfas(out_nfas, crnt_nfas)
        index += 1
    nfas_tups.append((out_nfas, "res"))
    if not quiet:
        out_nfas_str = pprint.pformat(out_nfas)
        if output_file:
            open(output_file, "w").write(out_nfas_str)
        else:
            print(out_nfas_str)
    if nfas_to_output:
        for nfa_name in nfas_to_output:
            nfas = [(find_nfa(nfas, nfa_name), nfas, nfas_pre)
                    for nfas, nfas_pre in nfas_tups]
            nfas_code = ["" if nfa is None else
                         nfa_to_dot(nfa, nfas[1], nfas_pre)
                         for (nfa, nfas, nfas_pre) in nfas]
            dotfile = open(nfa_name + ".dot", "w")
            dotfile.write("digraph G {\n%s}\n" % ("\n".join(nfas_code)))
            dotfile.close()
예제 #5
0
파일: mypgen.py 프로젝트: EvelynHf/basil
def pgen_to_grammar_obj (source):
    pgen = PyPgen.PyPgen()
    nfa_grammar = pgen.handleStart(PgenParser.parseString(source))
    dfa_grammar = pgen.generateDfaGrammar(nfa_grammar)
    pgen.translateLabels(dfa_grammar)
    pgen.generateFirstSets(dfa_grammar)
    dfa_grammar = DFAParser.addAccelerators(dfa_grammar)
    return dfa_grammar
예제 #6
0
def pgen_to_grammar_obj (source):
    '''XXX Stolen from PyCon 2010 sprint sandbox.  Move into Basil proper.'''
    pgen = PyPgen.PyPgen()
    nfa_grammar = pgen.handleStart(PgenParser.parseString(source))
    dfa_grammar = pgen.generateDfaGrammar(nfa_grammar)
    pgen.translateLabels(dfa_grammar)
    pgen.generateFirstSets(dfa_grammar)
    dfa_grammar = DFAParser.addAccelerators(dfa_grammar)
    return dfa_grammar
예제 #7
0
 def internalizePGEN (self, stream):
     """BasilGrammarModelFactory.internalizePgen
     """
     from basil.parsing import PgenParser
     from basil.models.grammar.InternalizePgen import PgenInternalizer
     if type(stream) == types.StringType:
         text = stream
     else:
         text = stream.read()
     parse = PgenParser.parseString(text)
     internalizer = PgenInternalizer(self)
     return internalizer(parse)
예제 #8
0
 def setUp (self):
     # Parse the MyFront grammar, create a set of automata for it (like
     # pgen does), and then convert the automata to generators for the
     # treepoline.
     grammar_st = PgenParser.parseFile(basil.lang.python.__path__[0] +
                                       "/python26/Grammar")
     grammar_obj = PyPgen.buildParser(grammar_st)
     grammar_obj.setStart(grammar_obj.stringToSymbolMap()['file_input'])
     self.parser1 = grammar_obj
     gram_tup0 = grammar_obj.toTuple()
     gram_tup1 = DFAParser.addAccelerators(gram_tup0)
     handlers = trampoline.pgen_grammar_to_handlers(gram_tup1, {})
     # Override the start special nonterminal to just do what it is
     # supposed to:
     def parse_start (instream, outtree):
         yield 'file_input'
     handlers['start'] = parse_start
     self.handlers = handlers
예제 #9
0
def main (*args):
    """main() - Unit test routine for the PushdownAutomaton module."""
    from basil.lang.python import DFAParser
    from basil.parsing import PgenParser, PyPgen
    import sys, getopt
    # ____________________________________________________________
    opts, args = getopt.getopt(args, "o:")
    outFile = sys.stdout
    for optFlag, optArg in opts:
        if optFlag == "-o":
            outFile = open(optArg, "w")
    argc = len(args)
    if argc > 1:
        print "Usage:\n\tPushdownAutomaton.py [opts] <file.pgen>\n"
        sys.exit(-1)
    elif argc == 1:
        srcFile = args[0]
    else:
        srcFile = "./tests/test.pgen"
    grammarST = PgenParser.parseFile(srcFile)
    parser = PyPgen.buildParser(grammarST)
    grammarTup = parser.grammarObj
    # ____________________________________________________________
    # Round trip test
    myGrammarObj = PushdownGrammar(grammarTup)
    myGrammarObj.compareToTuple(grammarTup)
    grammarTup2 = myGrammarObj.toTuple()
    myGrammarObj.compareToTuple(grammarTup2)
    # Now with accelerators...
    grammarTup3 = DFAParser.addAccelerators(grammarTup)
    myGrammarObj2 = PushdownGrammar(grammarTup3)
    myGrammarObj2.compareToTuple(grammarTup3)
    grammarTup4 = myGrammarObj2.toTuple()
    myGrammarObj2.compareToTuple(grammarTup4)
    # ____________________________________________________________
    # Output tests.
    outFile.write("%s\n" % myGrammarObj2.toString("graphviz"))
    if outFile != sys.stdout:
        outFile.close()
예제 #10
0
파일: myparser.py 프로젝트: EvelynHf/basil
from basil.lang.mython.MyFrontExceptions import MyFrontSyntaxError

# ______________________________________________________________________
# Module data

MY_GRAMMAR_EXT = """
compound_stmt: quotedef
quotedef: 'quote' ['[' expr ']'] [NAME] qsuite
qsuite: ':' (QUOTED NEWLINE | NEWLINE QUOTED)
"""

MY_START_SYMBOL = 'file_input'

pgen = PyPgen.PyPgen()
py_grammar_path = os.path.split(basil.lang.python.__file__)[0]
py_nfa_grammar = pgen.handleStart(PgenParser.parseFile(
    os.path.join(py_grammar_path, 'python26/Grammar')))
ext_nfa_grammar = pgen.handleStart(PgenParser.parseString(MY_GRAMMAR_EXT))
my_nfa_grammar = nfa.compose_nfas(py_nfa_grammar, ext_nfa_grammar)
my_grammar0 = pgen.generateDfaGrammar(my_nfa_grammar, MY_START_SYMBOL)
pgen.translateLabels(my_grammar0, {'QUOTED' : mylexer.QUOTED})
pgen.generateFirstSets(my_grammar0)
my_grammar0[0] = map(tuple, my_grammar0[0])
my_grammar0 = tuple(my_grammar0)
my_grammar = DFAParser.addAccelerators(my_grammar0)
del my_grammar0

__DEBUG__ = False

if __DEBUG__:
    import pprint