def parse(seq):
	"""Returns the AST of the given token sequence."""
	global depth
	unarg = lambda f: lambda x: f(*x)

	tokval = lambda x: x.value # returns the value of a token
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	paren = lambda s: a(Token('Parentheses', s)) >> tokval # return the value if token is Op
	paren_ = lambda s: skip(paren(s)) # checks if token is Op and ignores it

	def application(z, list):
		return reduce(lambda s, x: Application(s, x), list, z)

	depth = 0
	variable = lambda x: Variable(str(x)+":"+str(depth))
	def abstraction(x):
		global depth
		abst = Abstraction(str(x[0])+":"+str(depth), x[1])
		depth += 1
		return abst
	
	variable = toktype('Name') >> variable
	term = variable | with_forward_decls(lambda: paren_('(') + exp + paren_(')')) | \
		with_forward_decls(lambda: skip(toktype('Lambda')) + toktype('Name') + \
			skip(toktype('Dot')) + exp >> abstraction)

	exp = term + many(term) >> unarg(application)

	return exp.parse(seq)
def parse(seq):
	"""Returns the AST of the given token sequence."""
	def eval_expr(z, list):
		return reduce(lambda s, (f, x): f(s, x), list, z)
	unarg = lambda f: lambda x: f(*x)
	const = lambda x: lambda _: x # like ^^^ in Scala

	tokval = lambda x: x.value # returns the value of a token
	op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
	op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	def lst(h,t):
		return [h,] + t

	makeop = lambda s, f: op(s) >> const(f)
	or_op = makeop('|', Or)
	
	char = with_forward_decls(lambda:
		toktype('Char') >> Char | op_('(') + exp + op_(')'))
	star = char + op_('*') >> Star | char

	lst2_exp = star + many(star) >> unarg(lst)
	lst_exp = lst2_exp >> Lst

	exp = lst_exp + many(or_op + lst_exp) >> unarg(eval_expr)

	return exp.parse(seq)
Esempio n. 3
0
def parse(seq):
    """
    Parses the list of tokens and generates an AST.
    """
    def eval_expr(z, list):
        return reduce(lambda s, (f, x): f(s, x), list, z)
    unarg = lambda f: lambda x: f(*x)
    tokval = lambda x: x.value # returns the value of a token
    toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
    const = lambda x: lambda _: x # like ^^^ in Scala

    op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
    op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it

    lst = lambda x: [x[0],] + x[1]
    tup = lambda x: (x[0], x[1])

    makeop = lambda s, f: op(s) >> const(f)

    add = makeop('+', Add)
    sub = makeop('-', Sub)
    mul = makeop('*', Mul)
    div = makeop('/', Div)

    lt = makeop('<', Lt)
    gt = makeop('>', Gt)
    eq = makeop('=', Eq)

    operation = add | sub | mul | div | lt | gt | eq

    decl = with_forward_decls(lambda:toktype('Var') + op_('=') + (exp | fun) >> tup)
    decls = decl + many(skip(toktype('Semicolon')) + decl) >> lst
    variable = toktype('Var') >> Variable
    variables = variable + many(skip(toktype('Comma')) + variable) >> lst
    fun = with_forward_decls(lambda: skip(toktype('Fun')) + variables + skip(toktype('Arrow')) + exp + skip(toktype('End'))) >> unarg(Fun)
    parameters = with_forward_decls(lambda: exp + many(skip(toktype('Comma')) + exp) >> lst)
    call = skip(toktype('Call')) + (fun | variable) + skip(toktype('Lp')) + parameters + skip(toktype('Rp')) >> unarg(Call)
    ex = with_forward_decls(lambda:variable | toktype('Number') >> (lambda x: Const(int(x))) |\
        toktype('True') >> (lambda x: Const(True)) | toktype('False') >> (lambda x: Const(False)) |\
        skip(toktype('Let')) + decls + skip(toktype('In')) + exp + skip(toktype('End')) >> unarg(Let) |\
        skip(toktype('If')) + exp + skip(toktype('Then')) + exp + maybe(skip(toktype('Else')) + exp) + skip(toktype('Fi')) >> unarg(If) |\
        fun | call)
    exp = ex + many(operation + ex) >> unarg(eval_expr)
    prog = skip(toktype('Prog')) + exp >> Prog

    return prog.parse(seq)
def parse(seq):
	"""Returns the AST of the given token sequence."""
	def eval_expr(z, list):
		return reduce(lambda s, (f, x): f(s, x), list, z)
	unarg = lambda f: lambda x: f(*x)
	const = lambda x: lambda _: x # like ^^^ in Scala

	tokval = lambda x: x.value # returns the value of a token
	op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
	op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	def lst(h,t):
		return [h,] + t
	call = lambda x: Call(x[0], x[1])

	makeop = lambda s, f: op(s) >> const(f)

	add = makeop('+', Plus)
	sub = makeop('-', Minus)
	mul = makeop('*', Times)
	div = makeop('/', Div)

	def make_const(i):
		return const(int(i))

	number = toktype('Number') >> Const

	mul_op = mul | div
	add_op = add | sub

	factor = with_forward_decls(lambda:
		number | op_('(') + exp + op_(')') | call)
	term = factor + many(mul_op + factor) >> unarg(eval_expr)
	exp = term + many(add_op + term) >> unarg(eval_expr)
	exp_lst = with_forward_decls(lambda:
		exp + many(op_(',') + exp) >> unarg(lst))
	call = toktype('Name') + op_('(') + exp_lst + op_(')') >> call

	return exp.parse(seq)
Esempio n. 5
0
def parse(tokens):
  ## building blocks
  kw_priority = some(toktype("kw_priority"))
  kw_probability = some(toktype("kw_probability"))
  kw_reaction = some(toktype("kw_reaction"))
  kw_exists = some(toktype("kw_exists"))
  kw_as = some(toktype("kw_as"))
  op_tilde = some(toktype("op_tilde"))
  op_priority_maximal = some(toktype("op_priority_maximal"))
  op_production = some(toktype("op_production"))
  atom = some(toktype("name"))
  number = some(toktype("number"))
  dissolve = some(toktype("op_dissolve"))
  osmose = some(toktype("op_osmose"))
  osmose_location = some(toktype("op_osmose_location"))
  env_open = some(toktype("env_open"))
  env_close = some(toktype("env_close"))
  membrane_open = some(toktype("membrane_open"))
  membrane_close = some(toktype("membrane_close"))
  
  ## grammar from the bottom up
  name = atom | number
  symbol = atom | (dissolve + maybe(name)) | (osmose + name + maybe(osmose_location + name))
  
  priority = kw_priority + op_tilde + name + op_priority_maximal + name
  
  reaction = (kw_reaction + maybe(kw_as + name) + op_tilde + 
             oneplus(name) + op_production + many(symbol))
  
  exists = kw_exists + op_tilde + oneplus(name)
  
  expr = (exists | reaction | priority)
  
  statement = with_forward_decls(lambda: membrane | expr) >> Statement
  
  body = maybe(name) + many(statement)
  
  membrane = (skip(membrane_open) + body + skip(membrane_close)) >> Membrane
  env = (skip(env_open) + body + skip(env_close)) >> Environment
  
  program = many(env) + skip(finished) >> Program
  
  return program.parse(tokens)
Esempio n. 6
0
def parse(tokens):
    ## building blocks
    kw_priority = some(toktype("kw_priority"))
    kw_probability = some(toktype("kw_probability"))
    kw_reaction = some(toktype("kw_reaction"))
    kw_exists = some(toktype("kw_exists"))
    kw_as = some(toktype("kw_as"))
    op_tilde = some(toktype("op_tilde"))
    op_priority_maximal = some(toktype("op_priority_maximal"))
    op_production = some(toktype("op_production"))
    atom = some(toktype("name"))
    number = some(toktype("number"))
    dissolve = some(toktype("op_dissolve"))
    osmose = some(toktype("op_osmose"))
    osmose_location = some(toktype("op_osmose_location"))
    env_open = some(toktype("env_open"))
    env_close = some(toktype("env_close"))
    membrane_open = some(toktype("membrane_open"))
    membrane_close = some(toktype("membrane_close"))

    ## grammar from the bottom up
    name = atom | number
    symbol = atom | (dissolve + maybe(name)) | (osmose + name + maybe(osmose_location + name))

    priority = kw_priority + op_tilde + name + op_priority_maximal + name

    reaction = kw_reaction + maybe(kw_as + name) + op_tilde + oneplus(name) + op_production + many(symbol)

    exists = kw_exists + op_tilde + oneplus(name)

    expr = exists | reaction | priority

    statement = with_forward_decls(lambda: membrane | expr) >> Statement

    body = maybe(name) + many(statement)

    membrane = (skip(membrane_open) + body + skip(membrane_close)) >> Membrane
    env = (skip(env_open) + body + skip(env_close)) >> Environment

    program = many(env) + skip(finished) >> Program

    return program.parse(tokens)
Esempio n. 7
0
def parse(tokens):
  var = some(toktype("name")) | some(toktype("number"))

  open_form = some(toktype("form_open"))
  close_form = some(toktype("form_close"))
  op_lambda = some(toktype("op_lambda"))
  op_map = some(toktype("op_map"))

  prim_bind = some(toktype("kw_bind"))
  prim_halt = some(toktype("kw_halt"))

  exp = with_forward_decls(lambda: lam | var | prim_exp | exprn) >> Expression
  lam = open_form + op_lambda + many(var) + op_map + oneplus(exp) + close_form >> Lambda
  bind_exp = open_form + prim_bind + var + lam + close_form
  halt_exp = open_form + prim_halt + exp + close_form
  prim_exp = bind_exp | halt_exp
  exprn = open_form + oneplus(exp) + close_form >> Form

  prog = many(exp) + skip(finished) >> Program
  
  return prog.parse(tokens)
Esempio n. 8
0
#@+node:peckj.20140124085532.4003: *3* grammar
posnumber = (some(lambda tok: tok.type == 'NUMBER') >> tokval >> make_number)

add = makeop('+', operator.add)
sub = makeop('-', operator.sub)
mul = makeop('*', operator.mul)
div = makeop('/', operator.div)
pow = makeop('**', operator.pow)

negnumber = (sub + posnumber) >> negate
number = posnumber | negnumber

mul_op = mul | div
add_op = add | sub

primary = with_forward_decls(lambda: number | (op_('(') + expr + op_(')')))
factor = primary + many(pow + primary) >> f
term = factor + many(mul_op + factor) >> f
expr = term + many(add_op + term) >> f

endmark = a(Token(token.ENDMARKER, ''))
end = skip(endmark + finished)
toplevel = maybe(expr) + end


#@+node:peckj.20140124085532.4012: *3* parse
def parse(tokens):
    return toplevel.parse(tokens)


parse_and_run = lambda x: parse(tokenize(x))
        return item
    larg, fun, rarg = item, rest[0], rest[1]
    return Function(fun, [larg, rarg])


#
# Parser.
#

lparen = some(lambda tok: tok == "(")
rparen = some(lambda tok: tok == ")")
op = some(lambda tok: tok in "+-*^&|")
eof = some(lambda tok: tok == EOF)
number = some(lambda tok: tok.isdigit()) >> make_number
paren_expr = with_forward_decls(
    lambda: lparen + expr + rparen
)

# *Mark here are not really required, but if you are going to do
# anything complex that requires that you discern between different
# parsing paths, marks are often give you least hassle.
expr = with_forward_decls(
    lambda:
    (number + pure(NumberMark) + expr_rest |
     paren_expr + pure(ParenMark) + expr_rest) >> make_expr)

# This one allows us to add more complex expressions like function
# application and ternary operators to the above definition with ease.
# Otherwise terms such as `apply = expr lparen many(expr) rpanen`
# would be impossible to add, always leading to infinite left recursion.
expr_rest = maybe(op + expr)
    Return a parser that tries to parse p, and raises a CustomParseError
    when it fails.
    """
    @Parser
    def _try_p(tokens, s):
        try:
            return p.run(tokens, s)
        except NoParseError as err:
            raise CustomParseError(msg, err)

    return _try_p


# Grammar starts here
test = with_forward_decls(
    lambda: choice([or_test + maybe(IF + or_test + ELSE + test), lambdef]))
not_test = with_forward_decls(lambda: (NOT + not_test) | comparison)
and_test = sep_by(not_test, AND)
or_test = sep_by(and_test, OR)
test_nocond = with_forward_decls(lambda: or_test | lambdef_nocond)
testlist = with_forward_decls(lambda: sep_by(test))

atom = with_forward_decls(lambda: choice([
    between(yield_expr | testlist_comp, empty=True),
    between(testlist_comp, op_('['), op_(']'), empty=True),
    between(dictorsetmaker, op_('{'), op_('}'), empty=True), IDENTIFIER,
    NUMBER, STRING + many(STRING), ELLIPSIS, NONE, TRUE, FALSE
]))

sliceop = COLON + maybe(test)
subscript = test | (maybe(test) + COLON + maybe(test) + maybe(sliceop))
Esempio n. 11
0
    Return a parser that tries to parse p, and raises a CustomParseError
    when it fails.
    """
    @Parser
    def _try_p(tokens, s):
        try:
            return p.run(tokens, s)
        except NoParseError as err:
            raise CustomParseError(msg, err)

    return _try_p


# Grammar starts here
test = with_forward_decls(lambda: choice([
    or_test + maybe(IF + or_test + ELSE + test),
    lambdef]))
not_test = with_forward_decls(lambda: (NOT + not_test) | comparison)
and_test = sep_by(not_test, AND)
or_test = sep_by(and_test, OR)
test_nocond = with_forward_decls(lambda: or_test | lambdef_nocond)
testlist = with_forward_decls(lambda: sep_by(test))


atom = with_forward_decls(lambda: choice([
    between(yield_expr | testlist_comp, empty=True),
    between(testlist_comp, op_('['), op_(']'), empty=True),
    between(dictorsetmaker, op_('{'), op_('}'), empty=True),
    IDENTIFIER,
    NUMBER,
    STRING + many(STRING),
Esempio n. 12
0
def parse(seq):
    """
    Parses the list of tokens and generates an AST.
    """
    def eval_expr(z, list):
        return reduce(lambda s, (f, x): f(s, x), list, z)
    unarg = lambda f: lambda x: f(*x)
    tokval = lambda x: x.value # returns the value of a token
    toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
    const = lambda x: lambda _: x # like ^^^ in Scala
    eval_cond = lambda x: x[1](x[0], x[2])

    op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
    op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it

    ident = lambda s: a(Token('Ident', s)) >> tokval # return the value if token is Op
    ident_ = lambda s: skip(ident(s)) # checks if token is Op and ignores it

    lst = lambda x: [x[0],] + x[1]

    makeop = lambda s, f: op(s) >> const(f)

    add = makeop('+', Add)
    sub = makeop('-', Sub)
    mul = makeop('*', Mul)
    div = makeop('/', Div)
    mod = makeop('%', Mod)

    lt = makeop('<', Lt)
    gt = makeop('>', Gt)
    eq = toktype('Eq') >> const(Eq)
    orop = toktype('Or') >> const(Or)
    andop = toktype('And') >> const(And)
    neq = toktype('Neq') >> const(Neq)
    notop = toktype('Not') >> const(Not)
    le = toktype('Le') >> const(Le)
    ge = toktype('Ge') >> const(Ge)

    point_op = mul | div | mod
    line_op = add | sub
    comp_op = lt | gt | eq | neq | notop | le | ge
    combinator = orop | andop

    empty_fun = lambda x: ()

    heap_assign = with_forward_decls(lambda: pointer + skip(toktype('Assign')) + exp >> unarg(HeapAssign))
    assign = with_forward_decls(lambda: toktype('Ident') + skip(toktype('Assign')) + exp >> unarg(Assignment))
    ifexp = with_forward_decls(lambda: ident_('if') + cond + cmd + \
                                       maybe(ident_('else') + cmd) >> unarg(IfThenElse))
    whileexp = with_forward_decls(lambda: ident_('while') + cond + cmd >> unarg(While))
    printexp = with_forward_decls(lambda: ident_('print') + exp >> Print)
    vardef = with_forward_decls(lambda: ident_('var') + toktype('Ident') + op_('=') + exp \
                                        >> unarg(Declaration))
    arglst = with_forward_decls(lambda: exp + many(skip(toktype('Comma')) + exp) >> lst)
    returnexp = with_forward_decls(lambda: ident_('return') + exp)
    call = with_forward_decls(lambda: dot + skip(toktype('Lp') + toktype('Rp')) >> Call | \
                                      dot + skip(toktype('Lp')) + arglst + skip(toktype('Rp')) >> unarg(Call))
    args = toktype('Ident') + many(skip(toktype('Comma')) + toktype('Ident')) >> lst
    decl = with_forward_decls(lambda: toktype('Ident') + op_('=') + exp >> unarg(Declaration))
    decls = decl + many(skip(toktype('Semicolon')) + decl) >> lst

    cmd = with_forward_decls(lambda: call | returnexp | assign | ifexp | whileexp | printexp | vardef | heap_assign | \
                                        skip(toktype('Lb')) + cmd_list + skip(toktype('Rb')))
    cmd_list = (cmd + many(skip(toktype('Semicolon')) + cmd) >> lst) >> CmdList

    variable = toktype('Ident') >> Variable
    dotop = toktype('Dot') >> const(Dot)
    dot = variable  + many(dotop + toktype('Ident')) >> unarg(eval_expr)
    pointer = with_forward_decls(lambda: op_('*') + exp >> Pointer)
    constexp = toktype('Number') >> Const
    string = toktype('String') >> String
    factor = with_forward_decls(lambda: dot | constexp | pointer | string | \
                                        skip(toktype('Lp')) + exp + skip(toktype('Rp')))
    summand = factor + many(point_op + factor) >> unarg(eval_expr)
    function = ident_('function') + (skip(toktype('Lp')) +  skip(toktype('Rp')) >> empty_fun | \
                skip(toktype('Lp')) + args + skip(toktype('Rp'))) + skip(toktype('Lb')) + cmd_list \
               + skip(toktype('Rb')) >> unarg(Function)
    objectexp = ident_('object') + skip(toktype('Lb')) + decls + skip(toktype('Rb')) >> Object

    exp = with_forward_decls(lambda: objectexp | function | call | \
                                     summand + many(line_op + summand) >> unarg(eval_expr) | cond)

    cond = (exp + comp_op + exp >> eval_cond) + many(combinator + (exp + comp_op + exp >> eval_cond)) >> unarg(eval_expr)

    return cmd.parse(seq)