Esempio n. 1
0
def pgen_to_grammar_obj (source):
    pgen = PyPgen.PyPgen()
    nfa_grammar = pgen.handleStart(PgenParser.parseString(source))
    dfa_grammar = pgen.generateDfaGrammar(nfa_grammar)
    pgen.translateLabels(dfa_grammar)
    pgen.generateFirstSets(dfa_grammar)
    dfa_grammar = DFAParser.addAccelerators(dfa_grammar)
    return dfa_grammar
Esempio n. 2
0
def main():
    graphical = False
    if "-g" in sys.argv:
        from basil.visuals.TreeBox import showTree

        graphical = True
    # ____________________________________________________________
    grammarST0 = PgenParser.parseFile("test.pgen")
    grammarST1Obj = pgen.metaParser.parseFile("test.pgen")
    grammarST1 = grammarST1Obj.toTuple()
    print "Preliminaries:", grammarST0 == grammarST1
    if graphical:
        tk = showTree(grammarST0)
        showTree(grammarST1, tk)
    pgenObj = PyPgen.PyPgen()
    parser0 = pgenObj(grammarST0)
    parser1 = pgenObj(grammarST1)
    parser2Obj = pgen.buildParser(grammarST1Obj)
    parser2 = parser2Obj.toTuple()
    print "Test 1:", parser0 == parser1
    print "Test 2:", parser0 == parser2
    # __________________________________________________
    # The funny part of this sequence is: if the previous tests worked, isn't
    # it reasonable to assume the next test is given?  (i.e. the inputs are
    # structurally identical!)
    testPythonFile = "../PyPgen.py"
    fileObj = open(testPythonFile)
    tokenizer0 = StdTokenizer.StdTokenizer(testPythonFile, fileObj.readline)
    parse0 = DFAParser.parsetok(tokenizer0, parser0, 257)
    # __________________________________________________
    fileObj.seek(0)
    tokenizer1 = StdTokenizer.StdTokenizer(testPythonFile, fileObj.readline)
    parse1 = DFAParser.parsetok(tokenizer1, parser1, 257)
    # __________________________________________________
    fileObj.seek(0)
    tokenizer2 = StdTokenizer.StdTokenizer(testPythonFile, fileObj.readline)
    parse2 = DFAParser.parsetok(tokenizer2, parser2, 257)
    # __________________________________________________
    print "Test 3:", parse0 == parse1
    print "Test 4:", parse0 == parse2
    if graphical:
        showTree(parse0, tk)
        showTree(parse1, tk)
        showTree(parse2, tk)
        tk.mainloop()
Esempio n. 3
0
def pgen_compose (pgen_st1, pgen_st2, start_symbol, additional_tokens = None):
    nfa_grammar1 = pgen.handleStart(pgen_st1)
    nfa_grammar2 = pgen.handleStart(pgen_st2)
    nfa_composed = nfa.compose_nfas(nfa_grammar1, nfa_grammar2)
    grammar3 = pgen.generateDfaGrammar(nfa_composed, start_symbol)
    pgen.translateLabels(grammar3, additional_tokens)
    pgen.generateFirstSets(grammar3)
    grammar3[0] = map(tuple, grammar3[0])
    return DFAParser.addAccelerators(tuple(grammar3))
Esempio n. 4
0
def pgen_to_grammar_obj (source):
    '''XXX Stolen from PyCon 2010 sprint sandbox.  Move into Basil proper.'''
    pgen = PyPgen.PyPgen()
    nfa_grammar = pgen.handleStart(PgenParser.parseString(source))
    dfa_grammar = pgen.generateDfaGrammar(nfa_grammar)
    pgen.translateLabels(dfa_grammar)
    pgen.generateFirstSets(dfa_grammar)
    dfa_grammar = DFAParser.addAccelerators(dfa_grammar)
    return dfa_grammar
Esempio n. 5
0
 def setUp (self):
     # Parse the MyFront grammar, create a set of automata for it (like
     # pgen does), and then convert the automata to generators for the
     # treepoline.
     grammar_st = PgenParser.parseFile(basil.lang.python.__path__[0] +
                                       "/python26/Grammar")
     grammar_obj = PyPgen.buildParser(grammar_st)
     grammar_obj.setStart(grammar_obj.stringToSymbolMap()['file_input'])
     self.parser1 = grammar_obj
     gram_tup0 = grammar_obj.toTuple()
     gram_tup1 = DFAParser.addAccelerators(gram_tup0)
     handlers = trampoline.pgen_grammar_to_handlers(gram_tup1, {})
     # Override the start special nonterminal to just do what it is
     # supposed to:
     def parse_start (instream, outtree):
         yield 'file_input'
     handlers['start'] = parse_start
     self.handlers = handlers
Esempio n. 6
0
def main (*args):
    """main() - Unit test routine for the PushdownAutomaton module."""
    from basil.lang.python import DFAParser
    from basil.parsing import PgenParser, PyPgen
    import sys, getopt
    # ____________________________________________________________
    opts, args = getopt.getopt(args, "o:")
    outFile = sys.stdout
    for optFlag, optArg in opts:
        if optFlag == "-o":
            outFile = open(optArg, "w")
    argc = len(args)
    if argc > 1:
        print "Usage:\n\tPushdownAutomaton.py [opts] <file.pgen>\n"
        sys.exit(-1)
    elif argc == 1:
        srcFile = args[0]
    else:
        srcFile = "./tests/test.pgen"
    grammarST = PgenParser.parseFile(srcFile)
    parser = PyPgen.buildParser(grammarST)
    grammarTup = parser.grammarObj
    # ____________________________________________________________
    # Round trip test
    myGrammarObj = PushdownGrammar(grammarTup)
    myGrammarObj.compareToTuple(grammarTup)
    grammarTup2 = myGrammarObj.toTuple()
    myGrammarObj.compareToTuple(grammarTup2)
    # Now with accelerators...
    grammarTup3 = DFAParser.addAccelerators(grammarTup)
    myGrammarObj2 = PushdownGrammar(grammarTup3)
    myGrammarObj2.compareToTuple(grammarTup3)
    grammarTup4 = myGrammarObj2.toTuple()
    myGrammarObj2.compareToTuple(grammarTup4)
    # ____________________________________________________________
    # Output tests.
    outFile.write("%s\n" % myGrammarObj2.toString("graphviz"))
    if outFile != sys.stdout:
        outFile.close()
Esempio n. 7
0
"""

MY_START_SYMBOL = 'file_input'

pgen = PyPgen.PyPgen()
py_grammar_path = os.path.split(basil.lang.python.__file__)[0]
py_nfa_grammar = pgen.handleStart(PgenParser.parseFile(
    os.path.join(py_grammar_path, 'python26/Grammar')))
ext_nfa_grammar = pgen.handleStart(PgenParser.parseString(MY_GRAMMAR_EXT))
my_nfa_grammar = nfa.compose_nfas(py_nfa_grammar, ext_nfa_grammar)
my_grammar0 = pgen.generateDfaGrammar(my_nfa_grammar, MY_START_SYMBOL)
pgen.translateLabels(my_grammar0, {'QUOTED' : mylexer.QUOTED})
pgen.generateFirstSets(my_grammar0)
my_grammar0[0] = map(tuple, my_grammar0[0])
my_grammar0 = tuple(my_grammar0)
my_grammar = DFAParser.addAccelerators(my_grammar0)
del my_grammar0

__DEBUG__ = False

if __DEBUG__:
    import pprint

# ______________________________________________________________________
# Class and function definitions

class MyComposedParser (object):
    def __init__ (self):
        global my_grammar
        self.handlers = trampoline.pgen_grammar_to_handlers(my_grammar, {})
        self.handlers['start'] = self.parse_start
Esempio n. 8
0
 def parseTokens (self, tokenizer):
     """PyPgenParser.parseTokens
     Method that takes a tokenizer and the current DFA and returns a parse
     tree.
     """
     return DFAParser.parsetok(tokenizer, self.grammarObj, self.start)