示例#1
0
文件: mypgen.py 项目: EvelynHf/basil
def pgen_to_grammar_obj (source):
    pgen = PyPgen.PyPgen()
    nfa_grammar = pgen.handleStart(PgenParser.parseString(source))
    dfa_grammar = pgen.generateDfaGrammar(nfa_grammar)
    pgen.translateLabels(dfa_grammar)
    pgen.generateFirstSets(dfa_grammar)
    dfa_grammar = DFAParser.addAccelerators(dfa_grammar)
    return dfa_grammar
示例#2
0
def pgen_compose (pgen_st1, pgen_st2, start_symbol, additional_tokens = None):
    nfa_grammar1 = pgen.handleStart(pgen_st1)
    nfa_grammar2 = pgen.handleStart(pgen_st2)
    nfa_composed = nfa.compose_nfas(nfa_grammar1, nfa_grammar2)
    grammar3 = pgen.generateDfaGrammar(nfa_composed, start_symbol)
    pgen.translateLabels(grammar3, additional_tokens)
    pgen.generateFirstSets(grammar3)
    grammar3[0] = map(tuple, grammar3[0])
    return DFAParser.addAccelerators(tuple(grammar3))
示例#3
0
def pgen_to_grammar_obj (source):
    '''XXX Stolen from PyCon 2010 sprint sandbox.  Move into Basil proper.'''
    pgen = PyPgen.PyPgen()
    nfa_grammar = pgen.handleStart(PgenParser.parseString(source))
    dfa_grammar = pgen.generateDfaGrammar(nfa_grammar)
    pgen.translateLabels(dfa_grammar)
    pgen.generateFirstSets(dfa_grammar)
    dfa_grammar = DFAParser.addAccelerators(dfa_grammar)
    return dfa_grammar
示例#4
0
 def setUp (self):
     # Parse the MyFront grammar, create a set of automata for it (like
     # pgen does), and then convert the automata to generators for the
     # treepoline.
     grammar_st = PgenParser.parseFile(basil.lang.python.__path__[0] +
                                       "/python26/Grammar")
     grammar_obj = PyPgen.buildParser(grammar_st)
     grammar_obj.setStart(grammar_obj.stringToSymbolMap()['file_input'])
     self.parser1 = grammar_obj
     gram_tup0 = grammar_obj.toTuple()
     gram_tup1 = DFAParser.addAccelerators(gram_tup0)
     handlers = trampoline.pgen_grammar_to_handlers(gram_tup1, {})
     # Override the start special nonterminal to just do what it is
     # supposed to:
     def parse_start (instream, outtree):
         yield 'file_input'
     handlers['start'] = parse_start
     self.handlers = handlers
示例#5
0
def main (*args):
    """main() - Unit test routine for the PushdownAutomaton module."""
    from basil.lang.python import DFAParser
    from basil.parsing import PgenParser, PyPgen
    import sys, getopt
    # ____________________________________________________________
    opts, args = getopt.getopt(args, "o:")
    outFile = sys.stdout
    for optFlag, optArg in opts:
        if optFlag == "-o":
            outFile = open(optArg, "w")
    argc = len(args)
    if argc > 1:
        print "Usage:\n\tPushdownAutomaton.py [opts] <file.pgen>\n"
        sys.exit(-1)
    elif argc == 1:
        srcFile = args[0]
    else:
        srcFile = "./tests/test.pgen"
    grammarST = PgenParser.parseFile(srcFile)
    parser = PyPgen.buildParser(grammarST)
    grammarTup = parser.grammarObj
    # ____________________________________________________________
    # Round trip test
    myGrammarObj = PushdownGrammar(grammarTup)
    myGrammarObj.compareToTuple(grammarTup)
    grammarTup2 = myGrammarObj.toTuple()
    myGrammarObj.compareToTuple(grammarTup2)
    # Now with accelerators...
    grammarTup3 = DFAParser.addAccelerators(grammarTup)
    myGrammarObj2 = PushdownGrammar(grammarTup3)
    myGrammarObj2.compareToTuple(grammarTup3)
    grammarTup4 = myGrammarObj2.toTuple()
    myGrammarObj2.compareToTuple(grammarTup4)
    # ____________________________________________________________
    # Output tests.
    outFile.write("%s\n" % myGrammarObj2.toString("graphviz"))
    if outFile != sys.stdout:
        outFile.close()
示例#6
0
"""

MY_START_SYMBOL = 'file_input'

pgen = PyPgen.PyPgen()
py_grammar_path = os.path.split(basil.lang.python.__file__)[0]
py_nfa_grammar = pgen.handleStart(PgenParser.parseFile(
    os.path.join(py_grammar_path, 'python26/Grammar')))
ext_nfa_grammar = pgen.handleStart(PgenParser.parseString(MY_GRAMMAR_EXT))
my_nfa_grammar = nfa.compose_nfas(py_nfa_grammar, ext_nfa_grammar)
my_grammar0 = pgen.generateDfaGrammar(my_nfa_grammar, MY_START_SYMBOL)
pgen.translateLabels(my_grammar0, {'QUOTED' : mylexer.QUOTED})
pgen.generateFirstSets(my_grammar0)
my_grammar0[0] = map(tuple, my_grammar0[0])
my_grammar0 = tuple(my_grammar0)
my_grammar = DFAParser.addAccelerators(my_grammar0)
del my_grammar0

__DEBUG__ = False

if __DEBUG__:
    import pprint

# ______________________________________________________________________
# Class and function definitions

class MyComposedParser (object):
    def __init__ (self):
        global my_grammar
        self.handlers = trampoline.pgen_grammar_to_handlers(my_grammar, {})
        self.handlers['start'] = self.parse_start