示例#1
0
def test_non_halting_left_recursive():
    h1 = fwd()
    h1.define(x + h1)
    ok_(not non_halting(h1))

    h2 = fwd()
    h2.define(x + (h2 | x))
    ok_(not non_halting(h2))

    nh1 = fwd()
    nh1.define(nh1 + x)
    ok_(non_halting(nh1))

    nh2 = fwd()
    nh2.define(x | nh2)
    ok_(non_halting(nh2))

    nh3_fwd = fwd()
    nh3_fwd.define(nh3_fwd)
    nh3 = x + nh3_fwd + x
    ok_(non_halting(nh3))

    nh4 = fwd()
    nh4.define(maybe(x) + nh4 + x)
    ok_(non_halting(nh4))

    nh5 = fwd()
    nh5.define(many(x) + maybe(x) + nh5 + x)
    ok_(non_halting(nh5))

    h3 = fwd()
    h3.define(maybe(x) + many(x) + x + h3)
    ok_(not non_halting(h3))
示例#2
0
def parse(seq):
    'Sequence(Token) -> object'
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    value_flatten = lambda l: sum([[l[0]]] + list(l[1:]), [])
    n = lambda s: a(Token('Name', s)) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id = some(lambda t:
        t.type in ['Name', 'Number', 'Color', 'String']).named('id') >> tokval
    make_chart_attr = lambda args: DefAttrs(u'chart', [Attr(*args)])

    node_id = id  # + maybe(port)
    pair = (
        op_('(') + id + skip(maybe(op(','))) + id + op_(')')
        >> tuple)
    value = (id | pair)
    value_list = (
        value +
        many(op_(',') + value)
        >> value_flatten)
    a_list = (
        id +
        maybe(op_('=') + id) +
        skip(maybe(op(',')))
        >> unarg(Attr))
    attr_list = (
        many(op_('[') + many(a_list) + op_(']'))
        >> flatten)
    chart_attr = id + (op_('=') | op_(':')) + value_list >> make_chart_attr
    node_stmt = node_id + attr_list >> unarg(Node)

    stmt = (
        chart_attr
        | node_stmt
    )
    stmt_list = many(stmt + skip(maybe(op(';'))))
    chart_type = (
          n('p')   | n('pie')    | n('piechart')
        | n('p3')  | n('pie3d')  | n('piechart_3d')
        | n('lc')  | n('line')   | n('linechart')
        | n('lxy') | n('linechartxy')
        | n('bhs') | n('holizontal_barchart')
        | n('bvs') | n('vertical_barchart')
        | n('bhg') | n('holizontal_bargraph')
        | n('bvg') | n('vertical_bargraph')
        | n('v')   | n('venn')   | n('venndiagram')
        | n('s')   | n('plot')   | n('plotchart')
    )
    chart = (
        chart_type +
        maybe(id) +
        op_('{') +
        stmt_list +
        op_('}')
        >> unarg(Chart))
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
示例#3
0
def comparison_():
    """Returns the parse for a compound compare statement"""
    ops = op('==') | op('<') | op('>') | op('<=') | op('>=') | op('!=')
    op_vals = (boolean | number | timestamp_or_string)
    comp_op = string + ops + op_vals >> make(ASTCompOp)

    def multi(func):
        """For x + many(x) lists, call func only when there are multiple xs"""
        def multi_(args):
            x, xs = args
            if len(xs) == 0:
                return x
            return func(args)

        return multi_

    comp_stmt = forward_decl()
    comp_base = forward_decl()
    comp_base.define((op_('(') + comp_stmt + op_(')')) | comp_op
                     | ((n('not') + comp_base) >> make(ASTCompNot)))
    comp_and = comp_base + many(n_('and') + comp_base) >> multi(
        make(ASTCompAnd))
    comp_or = comp_and + many(n_('or') + comp_and) >> multi(make(ASTCompOr))
    comp_stmt.define(comp_or)

    return comp_stmt
示例#4
0
def test_non_halting_left_recursive():
    h1 = fwd()
    h1.define(x + h1)
    ok_(not non_halting(h1))

    h2 = fwd()
    h2.define(x + (h2 | x))
    ok_(not non_halting(h2))

    nh1 = fwd()
    nh1.define(nh1 + x)
    ok_(non_halting(nh1))

    nh2 = fwd()
    nh2.define(x | nh2)
    ok_(non_halting(nh2))

    nh3_fwd = fwd()
    nh3_fwd.define(nh3_fwd)
    nh3 = x + nh3_fwd + x
    ok_(non_halting(nh3))

    nh4 = fwd()
    nh4.define(maybe(x) + nh4 + x)
    ok_(non_halting(nh4))

    nh5 = fwd()
    nh5.define(many(x) + maybe(x) + nh5 + x)
    ok_(non_halting(nh5))

    h3 = fwd()
    h3.define(maybe(x) + many(x) + x + h3)
    ok_(not non_halting(h3))
def parse(seq):
	"""Returns the AST of the given token sequence."""
	def eval_expr(z, list):
		return reduce(lambda s, (f, x): f(s, x), list, z)
	unarg = lambda f: lambda x: f(*x)
	const = lambda x: lambda _: x # like ^^^ in Scala

	tokval = lambda x: x.value # returns the value of a token
	op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
	op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	def lst(h,t):
		return [h,] + t

	makeop = lambda s, f: op(s) >> const(f)
	or_op = makeop('|', Or)
	
	char = with_forward_decls(lambda:
		toktype('Char') >> Char | op_('(') + exp + op_(')'))
	star = char + op_('*') >> Star | char

	lst2_exp = star + many(star) >> unarg(lst)
	lst_exp = lst2_exp >> Lst

	exp = lst_exp + many(or_op + lst_exp) >> unarg(eval_expr)

	return exp.parse(seq)
示例#6
0
def parse(seq):
    'Sequence(Token) -> object'
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    n = lambda s: a(Token('Name', s)) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    date = some(lambda s: a(Token('Date', s))).named('date') >> tokval
    id = some(lambda t: t.type in ['Name', 'Number', 'String']).named(
        'id') >> tokval
    make_chart_attr = lambda args: DefAttrs(u'chart', [Attr(*args)])

    node_id = id  # + maybe(port)
    term = date + op_('-') + date
    value = (id | term | date)
    a_list = (id + maybe(op_('=') + id) + skip(maybe(op(','))) >> unarg(Attr))
    attr_list = (many(op_('[') + many(a_list) + op_(']')) >> flatten)
    chart_attr = id + (op_('=') | op_(':')) + value >> make_chart_attr
    node_stmt = node_id + attr_list >> unarg(Node)

    stmt = (chart_attr | node_stmt)
    stmt_list = many(stmt + skip(maybe(op(';'))))
    chart = (maybe(n('diagram')) + maybe(id) + op_('{') + stmt_list + op_('}')
             >> unarg(Chart))
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
示例#7
0
def json_text():
    """Returns the parser for Json formatted data"""
    # Taken from https://github.com/vlasovskikh/funcparserlib/blob/master/funcparserlib/tests/json.py
    # and modified slightly
    unwrap = lambda x: x.value

    null = (n('null') | n('Null')) >> const(None) >> unwrap

    value = forward_decl()
    member = (string >> unwrap) + op_(u':') + value >> tuple
    object = (
        op_(u'{') +
        maybe(member + many(op_(u',') + member) + maybe(op_(','))) +
        op_(u'}')
        >> make_object)
    array = (
        op_(u'[') +
        maybe(value + many(op_(u',') + value) + maybe(op_(','))) +
        op_(u']')
        >> make_array)

    value.define(
        null
        | (true >> unwrap)
        | (false >> unwrap)
        | object
        | array
        | (number >> unwrap)
        | (string >> unwrap))
    json_text = object | array

    return json_text
示例#8
0
def parse(tokens):
    t = lambda s: some(lambda tok: tok.type == s)

    inttype      = t('Int')
    chartype     = t('Char')
    unsignedtype = t('Unsigned') 
    name         = t('Name')
    star         = t('Star')
    void         = t('Void')
    lpar         = skip(t('LPar'))
    rpar         = skip(t('RPar'))
    comma        = skip(t('Comma'))
    semicolon    = skip(t('SemiColon'))

    def collapse(x):
        bp()
        if len(x[1]) > 0:
            # TODO: handle multiple stars
            return Token("UserTypePointer", x[0].value + " " + x[1][0].value)
        else:
            return Token("UserType", x[0].value)


    def make_func(x):
        return Token('Function', x.value)


    def make_type(x):
        if len(x) == 3: 
            return Token("UnsignedTypePointer", x)
        elif len(x) == 2: 
            if x[0].type == "Unsigned":
                return Token("UnsignedType", x)
            else:
                return Token("TypePointer", x)
        else:
            return Token("Type", x)


    udt      = name + many(star) >> collapse
    prim     = (inttype | chartype | unsignedtype + inttype | unsignedtype + chartype ) + many(star) >> make_type
    voidptr  = void + star + many(star)
    func     = name >> make_func

    accepted_types = voidptr | prim | udt

    # Return Type
    rettype = void | accepted_types 

    # Argument List
    decl      = accepted_types + name
    decl_list = decl + many(comma + decl)

    arg_list  = void | decl_list

    func_decl = rettype + func + lpar + arg_list + rpar + semicolon

    return func_decl.parse(tokens)
示例#9
0
def when_statement(tokens, state):
    kw = keyword
    parser = (kw("when") + expression + kw("then") +
              maybe(many(statement + op(";"))) + maybe(
                  many(
                      kw("elsewhen") + expression + kw("then") +
                      maybe(many(statement + op(";"))))) + kw("end") +
              kw("when")) >> WhenStatement
    return parser.run(tokens, state)
示例#10
0
def when_equation(tokens, state):
    kw = keyword
    parser = (kw("when") + expression + kw("then") +
              maybe(many(equation + op(";"))) + maybe(
                  many(
                      kw("elsewhen") + expression + kw("then") +
                      maybe(many(equation + op(";"))))) + kw("end") +
              kw("when")) >> WhenEquation
    return parser.run(tokens, state)
示例#11
0
def if_statement(tokens, state):
    kw = keyword
    parser = (kw("if") + expression + kw("then") +
              maybe(many(statement + op(";"))) + maybe(
                  many(
                      kw("elseif") + expression + kw("then") +
                      maybe(many(statement + op(";"))))) +
              maybe(kw("else") + maybe(many(statement + op(";")))) +
              kw("end") + kw("if")) >> IfStatement
    return parser.run(tokens, state)
示例#12
0
def if_equation(tokens, state):
    kw = keyword
    parser = (kw("if") + expression + kw("then") +
              maybe(many(equation + op(";"))) + maybe(
                  many(
                      kw("elseif") + expression + kw("then") +
                      maybe(many(equation + op(";"))))) +
              maybe(kw("else") + maybe(many(equation + op(";")))) + kw("end") +
              kw("if")) >> IfEquation
    return parser.run(tokens, state)
示例#13
0
def expression(tokens, state):
    kw = keyword
    parser = (simple_expression
              | kw("if") + expression + kw("then") + expression + maybe(
                  many(kw("elseif") + expression + kw("then") + expression)) +
              kw("else") + expression) >> Expression
    return parser.run(tokens, state)
示例#14
0
def separated(parser, by):
    def append(parsed):
        first = parsed[0]
        rest = parsed[1]
        rest.insert(0, first)
        return rest
    return ( parser + many(by + parser) ) >> append
示例#15
0
文件: __init__.py 项目: nigef/pyta
def one_plus(p):
    """Parser(a, b) -> Parser(a, [b])

    Return a parser that applies the parser p one or more times.
    """
    q = p + many(p)
    return q.named('(%s , { %s })' % (p.name, p.name))
def parse(seq):
	"""Returns the AST of the given token sequence."""
	global depth
	unarg = lambda f: lambda x: f(*x)

	tokval = lambda x: x.value # returns the value of a token
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	paren = lambda s: a(Token('Parentheses', s)) >> tokval # return the value if token is Op
	paren_ = lambda s: skip(paren(s)) # checks if token is Op and ignores it

	def application(z, list):
		return reduce(lambda s, x: Application(s, x), list, z)

	depth = 0
	variable = lambda x: Variable(str(x)+":"+str(depth))
	def abstraction(x):
		global depth
		abst = Abstraction(str(x[0])+":"+str(depth), x[1])
		depth += 1
		return abst
	
	variable = toktype('Name') >> variable
	term = variable | with_forward_decls(lambda: paren_('(') + exp + paren_(')')) | \
		with_forward_decls(lambda: skip(toktype('Lambda')) + toktype('Name') + \
			skip(toktype('Dot')) + exp >> abstraction)

	exp = term + many(term) >> unarg(application)

	return exp.parse(seq)
def one_plus(p):
    """Parser(a, b) -> Parser(a, [b])

    Return a parser that applies the parser p one or more times.
    """
    q = p + many(p)
    return q.named('(%s , { %s })' % (p.name, p.name))
示例#18
0
def parse(s):
    def value(t):
        return t.value

    def token(type):
        return some(lambda t: t.type == type)

    def instruction(name):
        return some(lambda t: t.type == 'instruction' and t.value == name)

    def unarg(f):
        return lambda args: f(*args)

    newline = token('newline')
    variable = token('variable') >> value
    number = token('number') >> value >> int
    increment = skip(instruction('inc')) + variable >> Inc
    decrement = skip(instruction('dec')) + variable >> Dec
    zero = (skip(instruction('zero')) + variable + number +
            skip(instruction('else')) + number >> unarg(Zero))
    stop = instruction('stop') >> (lambda x: Stop())

    instruction = increment | decrement | zero | stop
    top_level = many(instruction + skip(newline)) + skip(finished)

    tokens = tokenize(s)
    return top_level.parse(tokens)
示例#19
0
    def parse_identifier(line: str):
        """Parses just the identifer (first element) of the write"""
        tokval = lambda t: t.value
        joinval = "".join
        someToken = lambda type: some(lambda t: t.type == type)

        char = someToken('Char') >> tokval
        space = someToken('Space') >> tokval
        comma = someToken('Comma') >> tokval
        quote = someToken('Quote') >> tokval
        escape = someToken('Escape') >> tokval
        equal = someToken('Equal') >> tokval

        escape_space = skip(escape) + space >> joinval
        escape_comma = skip(escape) + comma >> joinval
        escape_equal = skip(escape) + equal >> joinval
        escape_escape = skip(escape) + escape >> joinval

        plain_int_text = someToken('Int') >> tokval
        plain_float_text = someToken('Float') >> tokval

        identifier = many(char | plain_float_text | plain_int_text |
                          escape_space | escape_comma | escape_equal |
                          escape_escape | plain_int_text | quote) >> joinval

        toplevel = identifier >> (lambda x: x)
        parsed =  toplevel.parse(LineTokenizer.tokenize(line))
        if len(parsed) == 0:
            raise NoParseError('parsed nothing')
        else:
            return parsed
示例#20
0
def parse(input):
    period = sometok("period")
    string = p.oneplus(sometok("string")) >> (lambda x: " ".join(x))
    number = sometok("number")

    title = string + p.skip(period) >> RecipeTitle
    ingredients_start = sometok("ingredients_start") + p.skip(period) >> IngredientStart

    dry_measure = p.maybe(sometok("measure_type")) + sometok("dry_measure")
    liquid_measure = sometok("liquid_measure")
    mix_measure = sometok("mix_measure")

    # is this valid ? 'g of butter', unit w/o initial_value
    ingredient = p.maybe(number) + p.maybe(dry_measure | liquid_measure | mix_measure) + string >> unarg(Ingredient)

    ingredients = p.many(ingredient)

    cooking_time = p.skip(sometok("cooking_time")) + (number >> unarg(CookingTime)) + p.skip(sometok("period"))

    oven_temp = p.skip(sometok("oven")) + p.many(number) + p.skip(sometok("oven_temp")) >> unarg(Oven)

    method_start = sometok("method_start") + p.skip(period)

    comment = p.skip(p.many(string | period))
    header = title + p.maybe(comment)

    instruction = (string + p.skip(period)) >> parse_instruction

    instructions = p.many(instruction)

    program = (method_start + instructions) >> unarg(MethodStart)

    serves = (sometok("serve") + number >> (lambda x: Serve("serve", x[1]))) + p.skip(period)

    ingredients_section = (ingredients_start + ingredients) >> unarg(IngredientSection)

    recipe = (
        header
        + p.maybe(ingredients_section)
        + p.maybe(cooking_time)
        + p.maybe(oven_temp)
        + p.maybe(program)
        + p.maybe(serves)
    ) >> RecipeNode

    main_parser = p.oneplus(recipe)
    return main_parser.parse(tokenize(input))
示例#21
0
def parse(tokens):
    '''Parses an SQL date range.

    Parses a list of Token object to see if it's a valid SQL
    clause meeting the following conditions:
    An optional sequence of ANDed simple conditions ANDed with
    an optional sequence of ORed complex condtions.
    Where a simple condition is a date unit, a sign, and a date value.
    And a complex condition is any legal SQL combination of simple
    conditions ANDed or ORed together.
    Date unit: YYYY, MM, DD, HH, MIN
    Sign: <, <=, =, >=, >
    Date value: any integer value, with an optional leading zero

    Returns:
       True if the tokens reprsent a valid SQL date range, False otherwise.
    '''
    try:
        left_paren = some(lambda t: t.value in '(')
        right_paren = some(lambda t: t.value in ')')
        oper = some(lambda t: t.value in SIGNS)
        unit = some(lambda t: t.value in UNITS)
        padded_num = some(lambda t: t.code == 2) + some(
            lambda t: t.code == 2)  # hmmm, better way???
        raw_num = some(lambda t: t.code == 2)
        num = padded_num | raw_num
        cond = unit + oper + num

        endmark = a(Token(token.ENDMARKER, ''))
        end = skip(endmark + finished)

        ands = maybe(cond + maybe(many(a(Token(token.NAME, 'AND')) + cond)))
        or_ands = left_paren + ands + right_paren
        ors_without_ands = or_ands + maybe(
            many(a(Token(token.NAME, 'OR')) + or_ands))
        ors_with_ands = (a(Token(token.NAME, 'AND')) + left_paren + or_ands +
                         maybe(many(a(Token(token.NAME, 'OR')) + or_ands)) +
                         right_paren)
        ors = maybe(ors_without_ands | ors_with_ands)
        full = left_paren + ands + ors + right_paren + end

        full.parse(tokens)
    except NoParseError:
        return False
    except TokenError:
        return False
    return True
示例#22
0
def parse(constraints):
    """Using funcparserlib turn constraints into a mongo query

    NOTE: this uses functors, see:

    http://spb-archlinux.ru/2009/funcparserlib/Tutorial
    """
    tokval = lambda tok: tok.value
    char = lambda tok: tok.code == 'CHAR'
    chars = some(char) >> tokval
    operator = lambda s: a(Token('OP', s)) >> tokval
    const = lambda x: lambda _: x
    makeop = lambda s: operator(s) >> const(s)

    item = many(chars) >> (lambda x: ''.join(x))
    test1 = item.parse(tokenize('hello123'))
    assert test1 == 'hello123'
    test1b = item.parse(tokenize('42a'))
    assert test1b == '42a'
    test1c = item.parse(tokenize('cam-oeprod-123299-master'))
    assert test1c == 'cam-oeprod-123299-master'
    test1d = item.parse(tokenize('Hello world'))
    assert test1d == 'Hello world'
    equals = makeop('=')
    assert equals.parse(tokenize('=')) == '='
    slash = makeop('/')
    value = item >> possible_int
    term = (item + equals + value) >> (lambda x: (x[0], x[2]))
    test2 = term.parse(tokenize('dut=catgut'))
    assert test2 == ('dut', 'catgut')
    endmark = a(Token('END', ''))
    seq = (many(((slash + term) >> (lambda x: x[1]))) >> dict)
    top = (seq + endmark) >> (lambda x: x[0])
    test3 = seq.parse(
        tokenize('/dut=catgut/foo=bar/n=30/bet=a42a/message=Hello World'))
    assert test3 == {
        'dut': 'catgut',
        'foo': 'bar',
        'n': 30,
        'message': 'Hello World',
        'bet': 'a42a'
    }
    test4 = seq.parse(tokenize('/suppress=,bar'))
    assert test4 == {'suppress': ',bar'}
    lexemes = tokenize(constraints)
    return top.parse(lexemes)
示例#23
0
def parse(seq):
    """
    Parses the list of tokens and generates an AST.
    """
    def eval_expr(z, list):
        return reduce(lambda s, (f, x): f(s, x), list, z)
    unarg = lambda f: lambda x: f(*x)
    tokval = lambda x: x.value # returns the value of a token
    toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
    const = lambda x: lambda _: x # like ^^^ in Scala

    op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
    op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it

    lst = lambda x: [x[0],] + x[1]
    tup = lambda x: (x[0], x[1])

    makeop = lambda s, f: op(s) >> const(f)

    add = makeop('+', Add)
    sub = makeop('-', Sub)
    mul = makeop('*', Mul)
    div = makeop('/', Div)

    lt = makeop('<', Lt)
    gt = makeop('>', Gt)
    eq = makeop('=', Eq)

    operation = add | sub | mul | div | lt | gt | eq

    decl = with_forward_decls(lambda:toktype('Var') + op_('=') + (exp | fun) >> tup)
    decls = decl + many(skip(toktype('Semicolon')) + decl) >> lst
    variable = toktype('Var') >> Variable
    variables = variable + many(skip(toktype('Comma')) + variable) >> lst
    fun = with_forward_decls(lambda: skip(toktype('Fun')) + variables + skip(toktype('Arrow')) + exp + skip(toktype('End'))) >> unarg(Fun)
    parameters = with_forward_decls(lambda: exp + many(skip(toktype('Comma')) + exp) >> lst)
    call = skip(toktype('Call')) + (fun | variable) + skip(toktype('Lp')) + parameters + skip(toktype('Rp')) >> unarg(Call)
    ex = with_forward_decls(lambda:variable | toktype('Number') >> (lambda x: Const(int(x))) |\
        toktype('True') >> (lambda x: Const(True)) | toktype('False') >> (lambda x: Const(False)) |\
        skip(toktype('Let')) + decls + skip(toktype('In')) + exp + skip(toktype('End')) >> unarg(Let) |\
        skip(toktype('If')) + exp + skip(toktype('Then')) + exp + maybe(skip(toktype('Else')) + exp) + skip(toktype('Fi')) >> unarg(If) |\
        fun | call)
    exp = ex + many(operation + ex) >> unarg(eval_expr)
    prog = skip(toktype('Prog')) + exp >> Prog

    return prog.parse(seq)
示例#24
0
def get_marginal_parser():
  """Return parser for tokens describing marginals."""
  solution_type = parser.skip(parser.a(Token(token.NAME, 'MAR')))
  minus = parser.a(Token(token.OP, '-'))
  begin = parser.skip(
    parser.maybe(minus + parser.a(Token(token.NAME, 'BEGIN')) + minus))
  marginal_parser = (solution_type + parser.many(number_parser + begin) +
                     end_parser)
  return marginal_parser
示例#25
0
    def __init__(self):

        # TODO: Make your changes in this section #############################

        value = number | string

        #######################################################################

        attribute = rawname + op_("=") + value + semicolon >> unarg(Attribute)
        attributes = many(attribute)

        # For chicken-and-egg problems, forward_decl will be your friend
        widgets = forward_decl()
        widget = rawname + opencurlyparen + attributes + widgets + closecurlyparen >> unarg(Widget)
        widgets.define(many(widget))

        # For the toplevel, we allow only one widget, not multiple widgets
        self.toplevel = widget + end
示例#26
0
文件: kll.py 项目: slezier/kll
def parse( tokenSequence ):
	"""Sequence(Token) -> object"""

	# Top-level Parser
	expression = scanCode_expression | usbCode_expression | variable_expression | capability_expression | define_expression

	kll_text = many( expression )
	kll_file = maybe( kll_text ) + skip( finished )

	return kll_file.parse( tokenSequence )
示例#27
0
文件: kll.py 项目: totallymonkey/kll
def parse(tokenSequence):
    """Sequence(Token) -> object"""

    # Top-level Parser
    expression = ignore_expression | scanCode_expression | usbCode_expression | variable_expression | capability_expression | define_expression

    kll_text = many(expression)
    kll_file = maybe(kll_text) + skip(finished)

    return kll_file.parse(tokenSequence)
示例#28
0
def parse(tokens):
  ## building blocks
  kw_priority = some(toktype("kw_priority"))
  kw_probability = some(toktype("kw_probability"))
  kw_reaction = some(toktype("kw_reaction"))
  kw_exists = some(toktype("kw_exists"))
  kw_as = some(toktype("kw_as"))
  op_tilde = some(toktype("op_tilde"))
  op_priority_maximal = some(toktype("op_priority_maximal"))
  op_production = some(toktype("op_production"))
  atom = some(toktype("name"))
  number = some(toktype("number"))
  dissolve = some(toktype("op_dissolve"))
  osmose = some(toktype("op_osmose"))
  osmose_location = some(toktype("op_osmose_location"))
  env_open = some(toktype("env_open"))
  env_close = some(toktype("env_close"))
  membrane_open = some(toktype("membrane_open"))
  membrane_close = some(toktype("membrane_close"))
  
  ## grammar from the bottom up
  name = atom | number
  symbol = atom | (dissolve + maybe(name)) | (osmose + name + maybe(osmose_location + name))
  
  priority = kw_priority + op_tilde + name + op_priority_maximal + name
  
  reaction = (kw_reaction + maybe(kw_as + name) + op_tilde + 
             oneplus(name) + op_production + many(symbol))
  
  exists = kw_exists + op_tilde + oneplus(name)
  
  expr = (exists | reaction | priority)
  
  statement = with_forward_decls(lambda: membrane | expr) >> Statement
  
  body = maybe(name) + many(statement)
  
  membrane = (skip(membrane_open) + body + skip(membrane_close)) >> Membrane
  env = (skip(env_open) + body + skip(env_close)) >> Environment
  
  program = many(env) + skip(finished) >> Program
  
  return program.parse(tokens)
示例#29
0
    def __init__(self):
        self.toplevel = None

        makeop = lambda s: op(s) >> const(lambda l, r: BinaryExpression(l, s, r))

        kw_while = kw('while')
        kw_if = kw('if')
        kw_else = kw('else')
        kw_put = kw('put')

        add = makeop('+')
        sub = makeop('-')
        mul = makeop('*')
        div = makeop('/')

        comparison_operator = makeop('<') | makeop('<=') | makeop('>') | \
                              makeop('>=') | makeop('==') | makeop('!=')

        equ = op_('=')

        sign = op('+') | op('-')
        int_const = maybe(sign) + number >> unarg(IntConst)
        variable = rawname >> Variable

        atom = int_const | variable
        expr1 = atom + many((mul | div) + atom) >> eval_expr
        expr2 = expr1 + many((add | sub) + expr1) >> eval_expr
        comparison_expr = expr2 + many(comparison_operator + expr2) >> eval_expr

        put_call = kw_put + inparens(comparison_expr) >> PutCall
        assignment = variable + equ + expr2 >> unarg(Assignment)
        condition = openparen + comparison_expr + closeparen

        stmt_block = forward_decl()
        while_stmt = kw_while + condition + stmt_block >> unarg(WhileStatement)
        if_stmt = kw_if + condition + stmt_block + maybe(kw_else + stmt_block) >> unarg(IfStatement)
        stmt = assignment | put_call | while_stmt | if_stmt
        stmt_block.define(opencurlyparen + many(stmt) + closecurlyparen)

        program = many(stmt) >> Program

        self.toplevel = program + end
示例#30
0
def parse(tokens):
    ## building blocks
    kw_priority = some(toktype("kw_priority"))
    kw_probability = some(toktype("kw_probability"))
    kw_reaction = some(toktype("kw_reaction"))
    kw_exists = some(toktype("kw_exists"))
    kw_as = some(toktype("kw_as"))
    op_tilde = some(toktype("op_tilde"))
    op_priority_maximal = some(toktype("op_priority_maximal"))
    op_production = some(toktype("op_production"))
    atom = some(toktype("name"))
    number = some(toktype("number"))
    dissolve = some(toktype("op_dissolve"))
    osmose = some(toktype("op_osmose"))
    osmose_location = some(toktype("op_osmose_location"))
    env_open = some(toktype("env_open"))
    env_close = some(toktype("env_close"))
    membrane_open = some(toktype("membrane_open"))
    membrane_close = some(toktype("membrane_close"))

    ## grammar from the bottom up
    name = atom | number
    symbol = atom | (dissolve + maybe(name)) | (osmose + name + maybe(osmose_location + name))

    priority = kw_priority + op_tilde + name + op_priority_maximal + name

    reaction = kw_reaction + maybe(kw_as + name) + op_tilde + oneplus(name) + op_production + many(symbol)

    exists = kw_exists + op_tilde + oneplus(name)

    expr = exists | reaction | priority

    statement = with_forward_decls(lambda: membrane | expr) >> Statement

    body = maybe(name) + many(statement)

    membrane = (skip(membrane_open) + body + skip(membrane_close)) >> Membrane
    env = (skip(env_open) + body + skip(env_close)) >> Environment

    program = many(env) + skip(finished) >> Program

    return program.parse(tokens)
示例#31
0
文件: constraints.py 项目: OpenXT/bvt
def parse(constraints): 
    """Using funcparserlib turn constraints into a mongo query

    NOTE: this uses functors, see:

    http://spb-archlinux.ru/2009/funcparserlib/Tutorial
    """
    tokval = lambda tok: tok.value
    char = lambda tok: tok.code == 'CHAR'
    chars = some( char )>>tokval
    operator = lambda s: a(Token('OP',s)) >> tokval
    const = lambda x : lambda _: x
    makeop = lambda s: operator(s) >> const(s)
    
    item = many (chars) >> (
        lambda x: ''.join(x))
    test1 = item.parse(tokenize('hello123'))
    assert test1 == 'hello123'
    test1b = item.parse(tokenize('42a'))
    assert test1b == '42a'
    test1c = item.parse(tokenize('cam-oeprod-123299-master'))
    assert test1c == 'cam-oeprod-123299-master'
    test1d = item.parse(tokenize('Hello world'))
    assert test1d == 'Hello world'
    equals = makeop('=')
    assert  equals.parse(tokenize('=')) == '='
    slash = makeop('/')
    value = item >> possible_int
    term = (item + equals + value) >> (lambda x: (x[0], x[2]))
    test2 = term.parse(tokenize('dut=catgut'))
    assert test2 == ('dut','catgut')
    endmark = a(Token('END', ''))
    seq = (many( ((slash + term) >> (lambda x: x[1])) ) >> dict)
    top = (seq + endmark) >> (lambda x: x[0])
    test3 = seq.parse(tokenize(
            '/dut=catgut/foo=bar/n=30/bet=a42a/message=Hello World'))
    assert test3 == {'dut': 'catgut', 'foo': 'bar', 'n': 30, 
                     'message':'Hello World', 'bet': 'a42a'}
    test4 = seq.parse(tokenize('/suppress=,bar'))
    assert test4 == {'suppress': ',bar'}
    lexemes = tokenize(constraints)
    return top.parse(lexemes)
示例#32
0
def parse(tokens):
    makeop = lambda s: op(s) >> const(lambda l, r: BinaryExpression(l, s, r))

    num = number >> Number
    var = rawname >> Variable

    add, sub, mul, div = map(makeop, ['+', '-', '*', '/'])
    lt, lte, gt, gte = map(makeop, ['<', '<=', '>', '>='])

    method_call = forward_decl()
    atom = num | method_call | var

    expr1 = atom + many((mul | div) + atom) >> eval_expr
    expr2 = expr1 + many((add | sub) + expr1) >> eval_expr
    expr3 = expr2 + many((lt | lte | gt | gte) + expr2) >> eval_expr

    method_call.define(rawname + inparens(maybe_empty_listof(expr2)) >> unarg(MethodCall))

    defn = expr3 + end
    return defn.parse(tokens)
示例#33
0
文件: axio.py 项目: gatesphere/axio
def parse(tokens):
  var = some(toktype("name")) | some(toktype("number"))

  open_form = some(toktype("form_open"))
  close_form = some(toktype("form_close"))
  op_lambda = some(toktype("op_lambda"))
  op_map = some(toktype("op_map"))

  prim_bind = some(toktype("kw_bind"))
  prim_halt = some(toktype("kw_halt"))

  exp = with_forward_decls(lambda: lam | var | prim_exp | exprn) >> Expression
  lam = open_form + op_lambda + many(var) + op_map + oneplus(exp) + close_form >> Lambda
  bind_exp = open_form + prim_bind + var + lam + close_form
  halt_exp = open_form + prim_halt + exp + close_form
  prim_exp = bind_exp | halt_exp
  exprn = open_form + oneplus(exp) + close_form >> Form

  prog = many(exp) + skip(finished) >> Program
  
  return prog.parse(tokens)
示例#34
0
    def __init__(self):

        # TODO: Make your changes in this section #############################

        nr = number >> Number
        plus_operator = op('+') >> Operator
        calculation = nr + plus_operator + nr >> unarg(Calculation)

        string_list = inbrackets(listof(string))
        value = calculation | number | string | string_list

        #######################################################################

        attribute = rawname + op_("=") + value + semicolon >> unarg(Attribute)
        attributes = many(attribute)

        widgets = forward_decl()
        widget = rawname + opencurlyparen + attributes + widgets + closecurlyparen >> unarg(Widget)
        widgets.define(many(widget))

        self.toplevel = widget + end
示例#35
0
def create_grammar():
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('Name', s)) >> tokval

    null = n('null')
    true = n('true')
    false = n('false')
    number = toktype('Number')
    string = toktype('String')
    value = forward_decl()
    member = string + op_(':') + value
    object_ = (op_('{') + maybe(member + many(op_(',') + member)) + op_('}'))
    array = (op_('[') + maybe(value + many(op_(',') + value)) + op_(']'))
    value.define(null | true | false | object_ | array | number | string)
    json_text = object_ | array
    json_file = json_text + skip(finished)

    return json_file
示例#36
0
    def __init__(self):
        self.toplevel = None

        makeop = lambda s: op(s) >> const(lambda l, r: BinaryExpression(l, s, r))

        kw_class = kw('class')
        kw_new = kw('new')
        kw_if = kw('if')
        kw_else = kw('else')
        kw_while = kw('while')
        kw_return = kw('return')

        add = makeop('+')
        sub = makeop('-')
        mul = makeop('*')
        div = makeop('/')

        equ = op_('=')

        int_const = number >> IntConst
        variable = rawname >> Variable
        type_ = rawname >> Type
        var_decl = type_ + rawname >> unarg(VarDeclaration)
        var_decls = many(var_decl + semicolon)

        method_call = forward_decl()
        atom = int_const | method_call | variable
        expr1 = atom + many((mul | div) + atom) >> eval_expr
        expr2 = expr1 + many((add | sub) + expr1) >> eval_expr
        method_call.define(rawname + inparens(maybe_empty_listof(expr2)) >> unarg(MethodCall))

        new_obj_expr = kw_new + type_ + openparen + closeparen >> NewObject
        assignment = variable + equ + (new_obj_expr | expr2) >> unarg(Assignment)
        return_stmt = kw_return + expr2 >> ReturnStatement
        simple_stmt = (assignment | method_call | return_stmt) + semicolon

        stmt_block = forward_decl()
        condition = openparen + expr2 + closeparen
        if_stmt = kw_if + condition + stmt_block + kw_else + stmt_block >> unarg(IfStatement)
        while_stmt = kw_while + condition + stmt_block >> unarg(WhileStatement)

        stmt = simple_stmt | if_stmt | while_stmt | return_stmt
        stmt_block.define(opencurlyparen + many(stmt) + closecurlyparen)

        method_decl = type_ + rawname + inparens(maybe_empty_listof(var_decl)) + \
                        opencurlyparen + var_decls + many(stmt) + \
                        closecurlyparen >> unarg(MethodDeclaration)

        method_decls = many(method_decl)
        class_decl = kw_class + rawname + opencurlyparen + var_decls + \
                      method_decls + closecurlyparen >> unarg(ClassDeclaration)

        program = many(class_decl) >> Program

        self.toplevel = program + end
示例#37
0
def create_parser():
    # operator: '~=' | '>=' | '<=' | '<' | '>' | '='
    operator = some(lambda tok: tok.type == 'CMP') >> choose_class

    # value: STRING | WORD
    word = some(lambda tok: tok.type == 'WORD') >> Text
    string = some(lambda tok: tok.type == 'STRING') >> QuotedText
    value = string | word

    # function: WORD '(' ')'
    open_brace = skip(a(Token('BR', '(')))
    close_brace = skip(a(Token('BR', ')')))
    function = word + open_brace + close_brace >> Function

    # field_expr: WORD operator value
    fieldexpr = (word + operator + (function | value)) >> (lambda x: x[1]
                                                           ([x[0], x[2]]))

    OR = a(Token('OP', 'OR')) >> choose_class
    AND = a(Token('OP', 'AND')) >> choose_class

    def eval(data):
        arg1, lst = data
        for f, arg2 in lst:
            arg1 = f([arg1, arg2])

        return arg1

    def eval(data):
        lft, args = data
        return reduce(lambda arg1, (f, arg2): f([arg1, arg2]), args, lft)

    expr = forward_decl()

    basexpr = open_brace + expr + close_brace | fieldexpr
    andexpr = (basexpr + many(AND + basexpr)) >> eval
    orexpr = (andexpr + many(OR + andexpr)) >> eval
    expr.define(orexpr)

    return expr
def parse(seq):
	"""Returns the AST of the given token sequence."""
	def eval_expr(z, list):
		return reduce(lambda s, (f, x): f(s, x), list, z)
	unarg = lambda f: lambda x: f(*x)
	const = lambda x: lambda _: x # like ^^^ in Scala

	tokval = lambda x: x.value # returns the value of a token
	op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
	op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	def lst(h,t):
		return [h,] + t
	call = lambda x: Call(x[0], x[1])

	makeop = lambda s, f: op(s) >> const(f)

	add = makeop('+', Plus)
	sub = makeop('-', Minus)
	mul = makeop('*', Times)
	div = makeop('/', Div)

	def make_const(i):
		return const(int(i))

	number = toktype('Number') >> Const

	mul_op = mul | div
	add_op = add | sub

	factor = with_forward_decls(lambda:
		number | op_('(') + exp + op_(')') | call)
	term = factor + many(mul_op + factor) >> unarg(eval_expr)
	exp = term + many(add_op + term) >> unarg(eval_expr)
	exp_lst = with_forward_decls(lambda:
		exp + many(op_(',') + exp) >> unarg(lst))
	call = toktype('Name') + op_('(') + exp_lst + op_(')') >> call

	return exp.parse(seq)
示例#39
0
def parser(last_error=None):
    last_error = LastError() if last_error is None else last_error

    def apl(f):
        return lambda x: f(*x)

    def delim(t):
        return skip(_tok(t))

    symbol = _tok(Token.SYMBOL) >> _gen(Symbol)
    string = _tok(Token.STRING) >> _gen(String)
    placeholder = _tok(Token.PLACEHOLDER) >> _gen(Placeholder)
    keyword = _tok(Token.KEYWORD) >> _gen(Keyword)

    # Note: tokenizer guarantee that value consists of dots and digits
    # TODO: convert exceptions
    number = _tok(Token.NUMBER) >> _gen(Number, literal_eval)

    expr = forward_decl()
    implicit_tuple = forward_decl()

    list_ = ((_tok(Token.OPEN_BRACKET) + many(expr | keyword) +
              _tok(Token.CLOSE_BRACKET)) >> apl(_list))

    dict_ = (error_ctx(
        _tok(Token.OPEN_BRACE) + many(keyword + expr) +
        _tok(Token.CLOSE_BRACE), last_error, DICT_ERROR) >> apl(_dict))

    inline_args = many(expr | keyword)

    explicit_tuple = (error_ctx(
        _tok(Token.OPEN_PAREN) + symbol + inline_args +
        _tok(Token.CLOSE_PAREN), last_error, EXPLICIT_TUPLE_ERROR) >>
                      apl(_tuple))

    indented_arg = (
        oneplus(implicit_tuple | expr + delim(Token.NEWLINE)) >> _maybe_join)

    indented_kwarg = (((keyword + expr + delim(Token.NEWLINE)) |
                       (keyword + delim(Token.NEWLINE) + delim(Token.INDENT) +
                        indented_arg + delim(Token.DEDENT))))

    indented_args_kwargs = (
        (many(indented_kwarg) + many(indented_arg)) >>
        apl(lambda pairs, args: list(chain(*(pairs + [args])))))

    implicit_tuple.define(
        error_ctx(
            symbol + inline_args + delim(Token.NEWLINE) + maybe(
                delim(Token.INDENT) + indented_args_kwargs +
                delim(Token.DEDENT)), last_error, IMPLICIT_TUPLE_ERROR) >> apl(
                    _implicit_tuple))

    expr.define(symbol | string | number | explicit_tuple | list_ | dict_
                | placeholder)

    body = ((many(implicit_tuple) + _tok(Token.EOF)) >> apl(_module))
    return body
示例#40
0
def parse(seq):
    """Sequence(Token) -> object"""
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    n = lambda s: a(Token(u'Name', s)) >> tokval
    op = lambda s: a(Token(u'Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id_types = [u'Name', u'Number', u'String']
    id = some(lambda t: t.type in id_types).named(u'id') >> tokval
    make_graph_attr = lambda args: DefAttrs(u'graph', [Attr(*args)])
    make_edge = lambda x, xs, attrs: Edge([x] + xs, attrs)

    node_id = id  # + maybe(port)
    a_list = (
        id + maybe(op_(u'=') + id) + skip(maybe(op(u','))) >> unarg(Attr))
    attr_list = (many(op_(u'[') + many(a_list) + op_(u']')) >> flatten)
    attr_stmt = (
        (n(u'graph') | n(u'node') | n(u'edge')) + attr_list >> unarg(DefAttrs))
    graph_attr = id + op_(u'=') + id >> make_graph_attr
    node_stmt = node_id + attr_list >> unarg(Node)
    # We use a forward_decl becaue of circular definitions like (stmt_list ->
    # stmt -> subgraph -> stmt_list)
    subgraph = forward_decl()
    edge_rhs = skip(op(u'->') | op(u'--')) + (subgraph | node_id)
    edge_stmt = ((subgraph | node_id) + oneplus(edge_rhs) + attr_list >>
                 unarg(make_edge))
    stmt = (attr_stmt | edge_stmt | subgraph | graph_attr | node_stmt)
    stmt_list = many(stmt + skip(maybe(op(u';'))))
    subgraph.define(
        skip(n(u'subgraph')) + maybe(id) + op_(u'{') + stmt_list +
        op_(u'}') >> unarg(SubGraph))
    graph = (maybe(n(u'strict')) + maybe(n(u'graph') | n(u'digraph')) +
             maybe(id) + op_(u'{') + stmt_list + op_(u'}') >> unarg(Graph))
    dotfile = graph + skip(finished)

    return dotfile.parse(seq)
示例#41
0
def primary(tokens, state):
    kw = keyword
    parser = (token_type("number")
              | token_type("string")
              | kw("false")
              | kw("true")
              | (name | kw("der") | kw("initial")) + function_call_args
              | component_reference
              | op("(") + output_expression_list + op(")")
              | (op("[") + expression_list +
                 maybe(many(op(";") + expression_list)) + op("]"))
              | op("{") + function_arguments + op("}")
              | kw("end"))

    return (parser >> Primary).run(tokens, state)
示例#42
0
def parse(seq):
    'Sequence(Token) -> object'
    tokval = lambda x: x.value
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id = some(lambda t: t.type in ['Name', 'Number', 'Color', 'String']).named(
        'id') >> tokval
    date = some(lambda t: t.type == 'Date').named('date') >> tokval
    make_node = lambda args: Node(*args)

    node_stmt = id + op_(':') + date + maybe(op_('-') + date) >> make_node
    chart = (many(node_stmt + skip(maybe(op(';')))) >> Chart)
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
示例#43
0
def class_specifier(tokens, state):
    normal = (token_type('ident') + string_comment + composition +
              keyword('end') + token_type('ident'))
    derived = (token_type('ident') + op("=") + base_prefix + name +
               maybe(array_subscript) + maybe(class_modification) + comment)
    enum_def = (token_type('ident') + op("=") + keyword('enumeration') +
                op("(") + (maybe(enum_list) | op(":")) + op(")") + comment)
    derivative = (token_type('ident') + op("=") + keyword('der') + op("(") +
                  name + op(",") + token_type('ident') +
                  maybe(many(op(",") + token_type('ident'))) + op(")") +
                  comment)
    extended = (keyword('extends') + token_type('ident') +
                maybe(class_modification) + string_comment + composition +
                keyword("end") + token_type('ident'))

    parser = (normal | derived | enum_def | derivative | extended)
    return (parser >> ClassSpecifier).run(tokens, state)
示例#44
0
    def test_error_info(self):
        tokenize = make_tokenizer([
            ('keyword', (r'(is|end)',)),
            ('id', (r'[a-z]+',)),
            ('space', (r'[ \t]+',)),
            ('nl', (r'[\n\r]+',)),
        ])
        try:
            list(tokenize('f is ф'))
        except LexerError as e:
            self.assertEqual(six.text_type(e),
                             'cannot tokenize data: 1,6: "f is \u0444"')
        else:
            self.fail('must raise LexerError')

        sometok = lambda type: some(lambda t: t.type == type)
        keyword = lambda s: a(Token('keyword', s))

        id = sometok('id')
        is_ = keyword('is')
        end = keyword('end')
        nl = sometok('nl')

        equality = id + skip(is_) + id >> tuple
        expr = equality + skip(nl)
        file = many(expr) + end

        msg = """\
spam is eggs
eggs isnt spam
end"""
        toks = [x for x in tokenize(msg) if x.type != 'space']
        try:
            file.parse(toks)
        except NoParseError as e:
            self.assertEqual(e.msg,
                             "got unexpected token: 2,11-2,14: id 'spam'")
            self.assertEqual(e.state.pos, 4)
            self.assertEqual(e.state.max, 7)
            # May raise KeyError
            t = toks[e.state.max]
            self.assertEqual(t, Token('id', 'spam'))
            self.assertEqual((t.start, t.end), ((2, 11), (2, 14)))
        else:
            self.fail('must raise NoParseError')
示例#45
0
    def test_error_info(self):
        tokenize = make_tokenizer([
            ('keyword', (r'(is|end)',)),
            ('id', (r'[a-z]+',)),
            ('space', (r'[ \t]+',)),
            ('nl', (r'[\n\r]+',)),
        ])
        try:
            list(tokenize('f is ф'))
        except LexerError as e:
            self.assertEqual(str(e),
                             'cannot tokenize data: 1,6: "f is \u0444"')
        else:
            self.fail('must raise LexerError')

        sometok = lambda type: some(lambda t: t.type == type)
        keyword = lambda s: a(Token('keyword', s))

        id = sometok('id')
        is_ = keyword('is')
        end = keyword('end')
        nl = sometok('nl')

        equality = id + skip(is_) + id >> tuple
        expr = equality + skip(nl)
        file = many(expr) + end

        msg = """\
spam is eggs
eggs isnt spam
end"""
        toks = [x for x in tokenize(msg) if x.type != 'space']
        try:
            file.parse(toks)
        except NoParseError as e:
            self.assertEqual(e.msg,
                             "got unexpected token: 2,11-2,14: id 'spam'")
            self.assertEqual(e.state.pos, 4)
            self.assertEqual(e.state.max, 7)
            # May raise KeyError
            t = toks[e.state.max]
            self.assertEqual(t, Token('id', 'spam'))
            self.assertEqual((t.start, t.end), ((2, 11), (2, 14)))
        else:
            self.fail('must raise NoParseError')
示例#46
0
def _parse_rule(seq):
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    sep = lambda s: a(Token(u'Sep', s)) >> tokval
    s_sep = lambda s: skip(sep(s))

    level = toktype(u'Level')
    comparator = toktype(u'Comparator') >> COMPARATORS.get
    number = toktype(u'Number') >> float
    historical = toktype(u'Historical')
    unit = toktype(u'Unit')
    operator = toktype(u'Operator')
    logical_operator = toktype(u'LogicalOperator') >> LOGICAL_OPERATORS.get

    exp = comparator + (
        (number + maybe(unit)) | historical) + maybe(operator + number)
    rule = (level + s_sep(':') + exp + many(logical_operator + exp))

    overall = rule + skip(finished)
    return overall.parse(seq)
示例#47
0
def parse1(tokens):
    """ 
    Experimenting with collapsing part of the parse tree
    to make it easier to work with.
    """
    t = lambda s: some(lambda tok: tok.type == s)

    name         = t('Name')
    star         = t('Star')

    def collapse(x):
        if len(x[1]) > 0:
            # TODO: handle multiple stars
            return Token("UserTypePointer", x[0].value + " " + x[1].value)
        else:
            return Token("UserType", x[0].value)

    udt = name + many(star) >> collapse

    return udt.parse(tokens)
示例#48
0
def _create_type_rules():
    comma = _token_type("comma")
    colon = _token_type("colon")
    question_mark = _token_type("question-mark")
    bar = _token_type("bar")
    equals = _token_type("equals")
    
    attr_name = type_name = arg_name = _token_type("name") >> _make_name

    primary_type = forward_decl()
    union_type = _one_or_more_with_separator(primary_type, bar) >> _make_union_type
    type_ = union_type
    
    type_ref = type_name >> _make_type_ref
    applied_type = (
        type_ref +
        skip(_token_type("open")) +
        _one_or_more_with_separator(type_, comma) +
        skip(_token_type("close"))
    ) >> _make_apply
    
    arg = (maybe(question_mark) + maybe(arg_name + skip(colon)) + type_) >> _make_arg
    
    generic_params = maybe(type_name + _token_type("fat-arrow")) >> _make_params
    args = _zero_or_more_with_separator(arg, comma)
    signature = (generic_params + args + _token_type("arrow") + type_) >> _make_signature
    sub_signature = (_token_type("paren-open") + signature + _token_type("paren-close")) >> (lambda result: result[1])
    
    primary_type.define(sub_signature | applied_type | type_ref)
    explicit_type = signature | type_
    
    type_definition = (type_name + skip(equals) + type_ + skip(finished))  >> _make_type_definition
    
    structural_type_attr = (attr_name + skip(colon) + explicit_type) >> tuple
    structural_type_attrs = many(structural_type_attr)
    
    structural_type_definition = (type_name + skip(colon) + structural_type_attrs + skip(finished)) >> _make_structural_type_definition
    
    generic = (_one_or_more_with_separator(type_name, comma) + skip(finished)) >> _make_generic
    
    return explicit_type + skip(finished), type_definition, structural_type_definition, generic
示例#49
0
def test_error_info():
    tokenize = make_tokenizer([
        Spec('keyword', r'(is|end)'),
        Spec('id',      r'[a-z]+'),
        Spec('space',   r'[ \t]+'),
        Spec('nl',      r'[\n\r]+'),
    ])
    try:
        list(tokenize('f is ф'))
    except LexerError as e:
        pass
    else:
        ok_(False, 'must raise LexerError')

    keyword = lambda s: tok('keyword', s)

    id = tok('id')
    is_ = keyword('is')
    end = keyword('end')
    nl = tok('nl')

    equality = id + skip(is_) + id >> tuple
    expr = equality + skip(nl)
    file = many(expr) + end

    msg = """\
rake is eggs
eggs isnt spam
end"""
    toks = [x for x in tokenize(msg) if x.type != 'space']
    try:
        file.parse(toks)
    except ParserError as e:
        msg, pos, i = e.args
        eq_(msg, "got unexpected token: id 'spam'")
        eq_(pos, ((2, 11), (2, 14)))
        # May raise KeyError
        t = toks[i]
        eq_(t, Token('id', 'spam'))
    else:
        ok_(False, 'must raise ParserError')
示例#50
0
def test_error_info():
    tokenize = make_tokenizer([
        Spec('keyword', r'(is|end)'),
        Spec('id', r'[a-z]+'),
        Spec('space', r'[ \t]+'),
        Spec('nl', r'[\n\r]+'),
    ])
    try:
        list(tokenize('f is ф'))
    except LexerError as e:
        pass
    else:
        ok_(False, 'must raise LexerError')

    keyword = lambda s: tok('keyword', s)

    id = tok('id')
    is_ = keyword('is')
    end = keyword('end')
    nl = tok('nl')

    equality = id + skip(is_) + id >> tuple
    expr = equality + skip(nl)
    file = many(expr) + end

    msg = """\
rake is eggs
eggs isnt spam
end"""
    toks = [x for x in tokenize(msg) if x.type != 'space']
    try:
        file.parse(toks)
    except ParserError as e:
        msg, pos, i = e.args
        eq_(msg, "got unexpected token: id 'spam'")
        eq_(pos, ((2, 11), (2, 14)))
        # May raise KeyError
        t = toks[i]
        eq_(t, Token('id', 'spam'))
    else:
        ok_(False, 'must raise ParserError')
示例#51
0
def _parse_rule(seq):
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    sep = lambda s: a(Token(u'Sep', s)) >> tokval
    s_sep = lambda s: skip(sep(s))

    level = toktype(u'Level')
    comparator = toktype(u'Comparator') >> COMPARATORS.get
    number = toktype(u'Number') >> float
    historical = toktype(u'Historical')
    unit = toktype(u'Unit')
    operator = toktype(u'Operator')
    logical_operator = toktype(u'LogicalOperator') >> LOGICAL_OPERATORS.get

    exp = comparator + ((number + maybe(unit)) | historical) + maybe(operator + number)
    rule = (
        level + s_sep(':') + exp + many(logical_operator + exp)
    )

    overall = rule + skip(finished)
    return overall.parse(seq)
示例#52
0
def parse(source):
    task = Task()

    get_value = lambda x: x.value
    value_of = lambda t: some(lambda x: x.type == t) >> get_value

    keyword = lambda s: skip(value_of(s))
    make_rule = lambda x: task.add_rule(Rule(**{x[0]: x[1][1:-1]}))
    set_root = lambda value: task.set_root_dir(value[1:-1])
    set_mask = lambda value: task.set_mask(value[1:-1])

    root = keyword('In') + value_of('Value') >> set_root
    mask = keyword('With') + value_of('Value') >> set_mask
    rule = keyword('Set') + \
           value_of('Attribute') + \
           keyword('Equals') + \
           value_of('Value') \
           >> make_rule
    parser = maybe(mask) + root + many(rule)
    parser.parse(source)
    return task
示例#53
0
def dolike(head):
    "Parse a `do`-like form."
    return pexpr(sym(head), many(FORM))
示例#54
0
posnumber = (some(lambda tok: tok.type == 'NUMBER') >> tokval >> make_number)

add = makeop('+', operator.add)
sub = makeop('-', operator.sub)
mul = makeop('*', operator.mul)
div = makeop('/', operator.div)
pow = makeop('**', operator.pow)

negnumber = (sub + posnumber) >> negate
number = posnumber | negnumber

mul_op = mul | div
add_op = add | sub

primary = with_forward_decls(lambda: number | (op_('(') + expr + op_(')')))
factor = primary + many(pow + primary) >> f
term = factor + many(mul_op + factor) >> f
expr = term + many(add_op + term) >> f

endmark = a(Token(token.ENDMARKER, ''))
end = skip(endmark + finished)
toplevel = maybe(expr) + end


#@+node:peckj.20140124085532.4012: *3* parse
def parse(tokens):
    return toplevel.parse(tokens)


parse_and_run = lambda x: parse(tokenize(x))
示例#55
0
文件: parse.py 项目: cryham/kll
unseqString   = tokenType('SequenceString') >> Make.unseqString # For use with variables

colRowOperator = lambda s: a( Token( 'ColRowOperator', s ) )
relCROperator  = lambda s: a( Token( 'RelCROperator', s ) )
pixelOperator  = tokenType('PixelOperator')

# Code variants
code_begin = tokenType('CodeBegin')
code_end   = tokenType('CodeEnd')

# Specifier
specifier_basic   = ( timing >> Make.specifierTiming ) | ( name >> Make.specifierState )
specifier_complex = ( name + skip( operator(':') ) + timing ) >> unarg( Make.specifierState )
specifier_state   = specifier_complex | specifier_basic
specifier_analog  = number >> Make.specifierAnalog
specifier_list    = skip( parenthesis('(') ) + many( ( specifier_state | specifier_analog ) + skip( maybe( comma ) ) ) + skip( parenthesis(')') )

# Scan Codes
scanCode_start       = tokenType('ScanCodeStart')
scanCode_range       = number + skip( dash ) + number >> Make.scanCode_range
scanCode_listElem    = number >> Make.scanCode
scanCode_specifier   = ( scanCode_range | scanCode_listElem ) + maybe( specifier_list ) >> unarg( Make.specifierUnroll )
scanCode_innerList   = many( scanCode_specifier + skip( maybe( comma ) ) ) >> flatten
scanCode_expanded    = skip( scanCode_start ) + scanCode_innerList + skip( code_end ) + maybe( specifier_list ) >> unarg( Make.specifierUnroll )
scanCode_elem        = scanCode + maybe( specifier_list ) >> unarg( Make.specifierUnroll )
scanCode_combo       = oneplus( ( scanCode_expanded | scanCode_elem ) + skip( maybe( plus ) ) )
scanCode_sequence    = oneplus( scanCode_combo + skip( maybe( comma ) ) )
scanCode_single      = ( skip( scanCode_start ) + scanCode_listElem + skip( code_end ) ) | scanCode
scanCode_il_nospec   = oneplus( ( scanCode_range | scanCode_listElem ) + skip( maybe( comma ) ) )
scanCode_nospecifier = skip( scanCode_start ) + scanCode_il_nospec + skip( code_end )
示例#56
0
def dolike(head):
    "Parse a `do`-like form."
    return pexpr(sym(head), many(FORM))
示例#57
0
# =========== Expressions parser
# FIXME: it should be rewritten using full Lua 5.2 grammar.

BINARY_OPS = set("+-*/^%><") | {"..", "==", "~=", ">=", "<=", "and", "or"}
UNARY_OPS = {"not", "-", "#"}

binary_op = p.some(lambda t: t.value in BINARY_OPS) >> token_value
unary_op = p.some(lambda t: t.value in UNARY_OPS) >> token_value

# expressions with binary and unary ops + parenthesis
@p.with_forward_decls
def value():
    single_value = table | tok_number | tok_string | tok_constant | iden
    return single_value | (close_rnd_brace + expr + open_rnd_brace)
_term = value + p.skip(p.maybe(unary_op))
expr = _term + p.many(binary_op + _term) >> flat

# [expression]
_index_lookup = p.skip(close_sq_brace) + expr + p.skip(open_sq_brace)

# foo=expr
# [foo]=expr
_key = iden | _index_lookup
_keyvalue = expr + token("=") + _key

# foo=expr, ["bar"]=42,
_table_sep = token(",") | token(";")
table_parameters = (
    p.maybe(_table_sep) +   # allow trailing comma/semicolon
    (_keyvalue | expr) +
    p.many(_table_sep + (_keyvalue | expr))
示例#58
0
def parse(seq):
    """Sequence(Token) -> object"""
    const = lambda x: lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('Name', s)) >> tokval

    def make_array(n):
        if n is None:
            return []
        else:
            return [n[0]] + n[1]

    def make_object(n):
        return dict(make_array(n))

    def make_number(n):
        try:
            return int(n)
        except ValueError:
            return float(n)

    def unescape(s):
        std = {
            '"': '"', '\\': '\\', '/': '/', 'b': '\b', 'f': '\f',
            'n': '\n', 'r': '\r', 't': '\t',
        }

        def sub(m):
            if m.group('standard') is not None:
                return std[m.group('standard')]
            else:
                return chr(int(m.group('unicode'), 16))

        return re_esc.sub(sub, s)

    def make_string(n):
        return unescape(n[1:-1])

    null = n('null') >> const(None)
    true = n('true') >> const(True)
    false = n('false') >> const(False)
    number = toktype('Number') >> make_number
    string = toktype('String') >> make_string
    value = forward_decl()
    member = string + op_(':') + value >> tuple
    object = (
        op_('{') +
        maybe(member + many(op_(',') + member)) +
        op_('}')
        >> make_object)
    array = (
        op_('[') +
        maybe(value + many(op_(',') + value)) +
        op_(']')
        >> make_array)
    value.define(
        null
        | true
        | false
        | object
        | array
        | number
        | string)
    json_text = object | array
    json_file = json_text + skip(finished)

    return json_file.parse(seq)