Exemple #1
0
        W,
    ]

    Complex = [
        ('(', Sentence, ')'),
        ('[', Sentence, ']'),
        (NEG, Sentence),
        (Sentence, CON, Sentence),
        (Sentence, DIS, Sentence),
        (Sentence, IMP, Sentence),
        (Sentence, IFF, Sentence),
    ]


# pprint(E)

# g = Grammar(*E)
# pprint(g)
pprint(E.lex2pats)
p = LALR(E)

# pprint([*p.lexer.tokenize('True & False', True)])
# pprint(p.parse('P & Q | R & !S'))

s = p.dump('meta_dumps.py')
p1 = LALR.load('meta_dumps.py', globals())

# print(s)

pprint(p1.parse('P & Q | R & !S'))
Exemple #2
0
import preamble
from metaparse import LALR

pCalc = LALR()

lex = pCalc.lexer
rule = pCalc.rule

# lex(<terminal-symbol> = <pattern>)
lex(IGNORED=r'\s+')
lex(NUM=r'[0-9]+')
lex(EQ=r'=')
lex(ID=r'[_a-zA-Z]\w*')

# lex(... , p = <precedence>)
lex(POW=r'\*\*', p=3)
lex(POW=r'\^')  # No need to give the precedence twice for POW.
lex(MUL=r'\*', p=2)
lex(ADD=r'\+', p=1)


# @rule
# def <lhs> ( <rhs> ):
#     <semantics>
@rule
def assign(ID, EQ, expr):
    context[ID] = expr
    return expr


@rule
Exemple #3
0
import preamble
from metaparse import LALR

calc = LALR()

with calc as (lex, rule):

    lex(IGNORED=r'\s+')

    @lex(NUM=r'[0-9]+')
    def NUM(val):
        return int(val)

    lex(EQ=r'=')
    lex(ID=r'[_a-zA-Z]\w*')

    lex(POW=r'\*\*', p=3)
    lex(MUL=r'\*', p=2)
    lex(ADD=r'\+', p=1)
    lex(SUB=r'\-', p=1)

    @rule
    def assign(ID, EQ, expr):
        table[ID] = expr
        return expr

    @rule
    def expr(ID):
        return table[ID]

    @rule
Exemple #4
0
import preamble
import metaparse as mp
from metaparse import LALR, END_TOKEN

p = LALR()

p.lexer.more(IGNORED=' ', PLUS='\+', TIMES='\*', LEFT='\(', RIGHT='\)')


@p.lexer(NUMBER='\d+')
def _(val):
    return int(val)


@p.rule
def expr(expr, PLUS, term):
    return expr + term


@p.rule
def expr(term):
    return term


@p.rule
def term(term, TIMES, factor):
    return term * factor


@p.rule
def term(factor):