Example #1
0
def parse(seq):
    'Sequence(Token) -> object'
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    value_flatten = lambda l: sum([[l[0]]] + list(l[1:]), [])
    n = lambda s: a(Token('Name', s)) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id = some(lambda t:
        t.type in ['Name', 'Number', 'Color', 'String']).named('id') >> tokval
    make_chart_attr = lambda args: DefAttrs(u'chart', [Attr(*args)])

    node_id = id  # + maybe(port)
    pair = (
        op_('(') + id + skip(maybe(op(','))) + id + op_(')')
        >> tuple)
    value = (id | pair)
    value_list = (
        value +
        many(op_(',') + value)
        >> value_flatten)
    a_list = (
        id +
        maybe(op_('=') + id) +
        skip(maybe(op(',')))
        >> unarg(Attr))
    attr_list = (
        many(op_('[') + many(a_list) + op_(']'))
        >> flatten)
    chart_attr = id + (op_('=') | op_(':')) + value_list >> make_chart_attr
    node_stmt = node_id + attr_list >> unarg(Node)

    stmt = (
        chart_attr
        | node_stmt
    )
    stmt_list = many(stmt + skip(maybe(op(';'))))
    chart_type = (
          n('p')   | n('pie')    | n('piechart')
        | n('p3')  | n('pie3d')  | n('piechart_3d')
        | n('lc')  | n('line')   | n('linechart')
        | n('lxy') | n('linechartxy')
        | n('bhs') | n('holizontal_barchart')
        | n('bvs') | n('vertical_barchart')
        | n('bhg') | n('holizontal_bargraph')
        | n('bvg') | n('vertical_bargraph')
        | n('v')   | n('venn')   | n('venndiagram')
        | n('s')   | n('plot')   | n('plotchart')
    )
    chart = (
        chart_type +
        maybe(id) +
        op_('{') +
        stmt_list +
        op_('}')
        >> unarg(Chart))
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
Example #2
0
def parse(seq):
    'Sequence(Token) -> object'
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    n = lambda s: a(Token('Name', s)) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    date = some(lambda s: a(Token('Date', s))).named('date') >> tokval
    id = some(lambda t: t.type in ['Name', 'Number', 'String']).named(
        'id') >> tokval
    make_chart_attr = lambda args: DefAttrs(u'chart', [Attr(*args)])

    node_id = id  # + maybe(port)
    term = date + op_('-') + date
    value = (id | term | date)
    a_list = (id + maybe(op_('=') + id) + skip(maybe(op(','))) >> unarg(Attr))
    attr_list = (many(op_('[') + many(a_list) + op_(']')) >> flatten)
    chart_attr = id + (op_('=') | op_(':')) + value >> make_chart_attr
    node_stmt = node_id + attr_list >> unarg(Node)

    stmt = (chart_attr | node_stmt)
    stmt_list = many(stmt + skip(maybe(op(';'))))
    chart = (maybe(n('diagram')) + maybe(id) + op_('{') + stmt_list + op_('}')
             >> unarg(Chart))
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
Example #3
0
def get_marginal_parser():
  """Return parser for tokens describing marginals."""
  solution_type = parser.skip(parser.a(Token(token.NAME, 'MAR')))
  minus = parser.a(Token(token.OP, '-'))
  begin = parser.skip(
    parser.maybe(minus + parser.a(Token(token.NAME, 'BEGIN')) + minus))
  marginal_parser = (solution_type + parser.many(number_parser + begin) +
                     end_parser)
  return marginal_parser
Example #4
0
def get_number_parser():
  """Return parser that reads (float and int) numbers with whitespace."""
  number = (parser.some(lambda tok: tok.type == 'NUMBER')
            >> token_value
            >> string_to_number)
  indent = parser.some(lambda t: t.code == token.INDENT)
  dedent = parser.a(Token(token.DEDENT, ''))
  newline = parser.a(Token(54, '\n'))
  ignored_whitespace = parser.skip(indent | dedent | newline)
  return parser.oneplus(number | ignored_whitespace)
Example #5
0
def parse_expression(expression):
    """Parse an expression that appears in an execution node, i.e. a
    block delimited by ``{% %}``.

    This can be a compound expression like a ``for`` statement with
    several sub-expressions, or it can just be a single statement such
    as ``endif``.

    :param list expression: Tokenised expression.

    """
    from funcparserlib.parser import a, skip, some

    # For if expressions, we rely on the Python parser to process the
    # expression rather than using our own parser.
    if expression[0] == 'if':
        return IfNode(ast.parse(' '.join(expression[1:]), mode="eval"))

    variable_name = some(lambda x: re.match(r'[a-zA-Z_]+', x))

    # TODO We use the same function twice, first to match the token
    # and then to extract the value we care about from the token
    # (namely the contents of the quoted string). This smells wrong.
    def extract_quoted_string(x):
        result = re.match(r'\"([^\"]*)\"', x)
        if result:
            return result.groups(1)

    quoted_string = some(extract_quoted_string)

    for_expression = (skip(a('for')) + (variable_name >> str) + skip(a('in')) +
                      (variable_name >> str))

    extends_expression = (skip(a('extends')) +
                          (quoted_string >> extract_quoted_string))

    block_expression = (skip(a('block')) + (variable_name >> str))

    def make_for_node(x):
        return ForNode(*x)

    def make_extends_node(x):
        return ExtendsNode(*x)

    parser = ((for_expression >> make_for_node)
              | (extends_expression >> make_extends_node)
              | (block_expression >> BlockNode))

    try:
        return parser.parse(expression)
    except funcparserlib.parser.NoParseError as e:
        raise Exception("Invalid expression '%s'" % expression)
Example #6
0
def e(name):
    """Get an ASTValue with the given error value

    An ERRORTOKEN is any unrecognized input (invalid Python value)
    or an unterminated single quote
    """
    return a(Token('ERRORTOKEN', name)) >> tok_to_value
def parse(seq):
	"""Returns the AST of the given token sequence."""
	global depth
	unarg = lambda f: lambda x: f(*x)

	tokval = lambda x: x.value # returns the value of a token
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	paren = lambda s: a(Token('Parentheses', s)) >> tokval # return the value if token is Op
	paren_ = lambda s: skip(paren(s)) # checks if token is Op and ignores it

	def application(z, list):
		return reduce(lambda s, x: Application(s, x), list, z)

	depth = 0
	variable = lambda x: Variable(str(x)+":"+str(depth))
	def abstraction(x):
		global depth
		abst = Abstraction(str(x[0])+":"+str(depth), x[1])
		depth += 1
		return abst
	
	variable = toktype('Name') >> variable
	term = variable | with_forward_decls(lambda: paren_('(') + exp + paren_(')')) | \
		with_forward_decls(lambda: skip(toktype('Lambda')) + toktype('Name') + \
			skip(toktype('Dot')) + exp >> abstraction)

	exp = term + many(term) >> unarg(application)

	return exp.parse(seq)
def parse(sequence, query):
    tokval = lambda x: x.value
    toktype = lambda t: (some(lambda x: x.type == t).named('(type %s)' % t) >>
                         tokval)
    operation = lambda s: a(Token('Op', s)) >> tokval
    operation_ = lambda s: skip(operation(s))

    create_param = lambda param_name: query.get_aliased_param(param_name)
    make_and = lambda params: And(params[0], params[1])
    make_or = lambda params: Or(params[0], params[1])
    make_not = lambda inner: Not(inner)

    word = toktype('Word')
    inner_bracket = forward_decl()
    left_of_and = forward_decl()
    right_of_and = forward_decl()
    left_of_or = forward_decl()
    not_ = forward_decl()
    bracket = operation_('(') + inner_bracket + operation_(')')
    and_ = left_of_and + operation_('&') + right_of_and >> make_and
    or_ = left_of_or + operation_('|') + inner_bracket >> make_or
    param = word >> create_param

    not_.define(operation_('!') + (bracket | param))
    not_ = not_ >> make_not

    left_of_or.define(and_ | bracket | not_ | param)
    left_of_and.define(bracket | not_ | param)
    right_of_and.define(left_of_and)
    inner_bracket.define(or_ | and_ | bracket | not_ | param)

    definition = (bracket | inner_bracket) + finished

    return definition.parse(sequence)
def parse(seq):
	"""Returns the AST of the given token sequence."""
	def eval_expr(z, list):
		return reduce(lambda s, (f, x): f(s, x), list, z)
	unarg = lambda f: lambda x: f(*x)
	const = lambda x: lambda _: x # like ^^^ in Scala

	tokval = lambda x: x.value # returns the value of a token
	op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
	op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	def lst(h,t):
		return [h,] + t

	makeop = lambda s, f: op(s) >> const(f)
	or_op = makeop('|', Or)
	
	char = with_forward_decls(lambda:
		toktype('Char') >> Char | op_('(') + exp + op_(')'))
	star = char + op_('*') >> Star | char

	lst2_exp = star + many(star) >> unarg(lst)
	lst_exp = lst2_exp >> Lst

	exp = lst_exp + many(or_op + lst_exp) >> unarg(eval_expr)

	return exp.parse(seq)
Example #10
0
def parse(tokens):
    '''Parses an SQL date range.

    Parses a list of Token object to see if it's a valid SQL
    clause meeting the following conditions:
    An optional sequence of ANDed simple conditions ANDed with
    an optional sequence of ORed complex condtions.
    Where a simple condition is a date unit, a sign, and a date value.
    And a complex condition is any legal SQL combination of simple
    conditions ANDed or ORed together.
    Date unit: YYYY, MM, DD, HH, MIN
    Sign: <, <=, =, >=, >
    Date value: any integer value, with an optional leading zero

    Returns:
       True if the tokens reprsent a valid SQL date range, False otherwise.
    '''
    try:
        left_paren = some(lambda t: t.value in '(')
        right_paren = some(lambda t: t.value in ')')
        oper = some(lambda t: t.value in SIGNS)
        unit = some(lambda t: t.value in UNITS)
        padded_num = some(lambda t: t.code == 2) + some(
            lambda t: t.code == 2)  # hmmm, better way???
        raw_num = some(lambda t: t.code == 2)
        num = padded_num | raw_num
        cond = unit + oper + num

        endmark = a(Token(token.ENDMARKER, ''))
        end = skip(endmark + finished)

        ands = maybe(cond + maybe(many(a(Token(token.NAME, 'AND')) + cond)))
        or_ands = left_paren + ands + right_paren
        ors_without_ands = or_ands + maybe(
            many(a(Token(token.NAME, 'OR')) + or_ands))
        ors_with_ands = (a(Token(token.NAME, 'AND')) + left_paren + or_ands +
                         maybe(many(a(Token(token.NAME, 'OR')) + or_ands)) +
                         right_paren)
        ors = maybe(ors_without_ands | ors_with_ands)
        full = left_paren + ands + ors + right_paren + end

        full.parse(tokens)
    except NoParseError:
        return False
    except TokenError:
        return False
    return True
Example #11
0
def parse(constraints):
    """Using funcparserlib turn constraints into a mongo query

    NOTE: this uses functors, see:

    http://spb-archlinux.ru/2009/funcparserlib/Tutorial
    """
    tokval = lambda tok: tok.value
    char = lambda tok: tok.code == 'CHAR'
    chars = some(char) >> tokval
    operator = lambda s: a(Token('OP', s)) >> tokval
    const = lambda x: lambda _: x
    makeop = lambda s: operator(s) >> const(s)

    item = many(chars) >> (lambda x: ''.join(x))
    test1 = item.parse(tokenize('hello123'))
    assert test1 == 'hello123'
    test1b = item.parse(tokenize('42a'))
    assert test1b == '42a'
    test1c = item.parse(tokenize('cam-oeprod-123299-master'))
    assert test1c == 'cam-oeprod-123299-master'
    test1d = item.parse(tokenize('Hello world'))
    assert test1d == 'Hello world'
    equals = makeop('=')
    assert equals.parse(tokenize('=')) == '='
    slash = makeop('/')
    value = item >> possible_int
    term = (item + equals + value) >> (lambda x: (x[0], x[2]))
    test2 = term.parse(tokenize('dut=catgut'))
    assert test2 == ('dut', 'catgut')
    endmark = a(Token('END', ''))
    seq = (many(((slash + term) >> (lambda x: x[1]))) >> dict)
    top = (seq + endmark) >> (lambda x: x[0])
    test3 = seq.parse(
        tokenize('/dut=catgut/foo=bar/n=30/bet=a42a/message=Hello World'))
    assert test3 == {
        'dut': 'catgut',
        'foo': 'bar',
        'n': 30,
        'message': 'Hello World',
        'bet': 'a42a'
    }
    test4 = seq.parse(tokenize('/suppress=,bar'))
    assert test4 == {'suppress': ',bar'}
    lexemes = tokenize(constraints)
    return top.parse(lexemes)
Example #12
0
def parse(constraints): 
    """Using funcparserlib turn constraints into a mongo query

    NOTE: this uses functors, see:

    http://spb-archlinux.ru/2009/funcparserlib/Tutorial
    """
    tokval = lambda tok: tok.value
    char = lambda tok: tok.code == 'CHAR'
    chars = some( char )>>tokval
    operator = lambda s: a(Token('OP',s)) >> tokval
    const = lambda x : lambda _: x
    makeop = lambda s: operator(s) >> const(s)
    
    item = many (chars) >> (
        lambda x: ''.join(x))
    test1 = item.parse(tokenize('hello123'))
    assert test1 == 'hello123'
    test1b = item.parse(tokenize('42a'))
    assert test1b == '42a'
    test1c = item.parse(tokenize('cam-oeprod-123299-master'))
    assert test1c == 'cam-oeprod-123299-master'
    test1d = item.parse(tokenize('Hello world'))
    assert test1d == 'Hello world'
    equals = makeop('=')
    assert  equals.parse(tokenize('=')) == '='
    slash = makeop('/')
    value = item >> possible_int
    term = (item + equals + value) >> (lambda x: (x[0], x[2]))
    test2 = term.parse(tokenize('dut=catgut'))
    assert test2 == ('dut','catgut')
    endmark = a(Token('END', ''))
    seq = (many( ((slash + term) >> (lambda x: x[1])) ) >> dict)
    top = (seq + endmark) >> (lambda x: x[0])
    test3 = seq.parse(tokenize(
            '/dut=catgut/foo=bar/n=30/bet=a42a/message=Hello World'))
    assert test3 == {'dut': 'catgut', 'foo': 'bar', 'n': 30, 
                     'message':'Hello World', 'bet': 'a42a'}
    test4 = seq.parse(tokenize('/suppress=,bar'))
    assert test4 == {'suppress': ',bar'}
    lexemes = tokenize(constraints)
    return top.parse(lexemes)
Example #13
0
def parse(seq):
    'Sequence(Token) -> object'
    const = lambda x : lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    #css_text = 
    #css = skip(finished)
    number = some(lambda tok: tok.type == 'NUMBER') >> tokval >> int

    return type(number.parse(seq))
Example #14
0
def create_grammar():
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('Name', s)) >> tokval

    null = n('null')
    true = n('true')
    false = n('false')
    number = toktype('Number')
    string = toktype('String')
    value = forward_decl()
    member = string + op_(':') + value
    object_ = (op_('{') + maybe(member + many(op_(',') + member)) + op_('}'))
    array = (op_('[') + maybe(value + many(op_(',') + value)) + op_(']'))
    value.define(null | true | false | object_ | array | number | string)
    json_text = object_ | array
    json_file = json_text + skip(finished)

    return json_file
Example #15
0
def create_parser():
    # operator: '~=' | '>=' | '<=' | '<' | '>' | '='
    operator = some(lambda tok: tok.type == 'CMP') >> choose_class

    # value: STRING | WORD
    word = some(lambda tok: tok.type == 'WORD') >> Text
    string = some(lambda tok: tok.type == 'STRING') >> QuotedText
    value = string | word

    # function: WORD '(' ')'
    open_brace = skip(a(Token('BR', '(')))
    close_brace = skip(a(Token('BR', ')')))
    function = word + open_brace + close_brace >> Function

    # field_expr: WORD operator value
    fieldexpr = (word + operator + (function | value)) >> (lambda x: x[1]
                                                           ([x[0], x[2]]))

    OR = a(Token('OP', 'OR')) >> choose_class
    AND = a(Token('OP', 'AND')) >> choose_class

    def eval(data):
        arg1, lst = data
        for f, arg2 in lst:
            arg1 = f([arg1, arg2])

        return arg1

    def eval(data):
        lft, args = data
        return reduce(lambda arg1, (f, arg2): f([arg1, arg2]), args, lft)

    expr = forward_decl()

    basexpr = open_brace + expr + close_brace | fieldexpr
    andexpr = (basexpr + many(AND + basexpr)) >> eval
    orexpr = (andexpr + many(OR + andexpr)) >> eval
    expr.define(orexpr)

    return expr
Example #16
0
def _parse(seq):
    const = lambda x: lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token(u'Op', s)) >> tokval
    op_ = lambda s: skip(op(s))

    def make_string(args):
        context, value = args
        if not context: context = 'any:'
        return String(unescape_str(value[1:-1]), context[:-1])

    def make_regex(args):
        context, value = args
        value, modifiers = value.rsplit('/', 1)
        value = value[1:]
        if not context: context = 'any:'
        return Regex(unescape_regex(value), modifiers, context[:-1])

    def make_or(args):
        return Or(*args)

    def make_and(args):
        return And(*args)

    def make_not(x):
        return Not(x)

    context = maybe(toktype(u'Prefix'))
    string = (context + toktype(u'String')) >> make_string
    regex = (context + toktype(u'Regex')) >> make_regex

    par_term = forward_decl()
    simple_term = forward_decl()
    term = forward_decl()
    not_term = forward_decl()
    and_term = forward_decl()
    or_term = forward_decl()

    par_term.define(op_(u'(') + term + op_(u')'))

    simple_term.define(par_term | string | regex)
    not_term.define(op_('not') + not_term >> make_not | simple_term)
    and_term.define(not_term + op_('and') + and_term >> make_and | not_term)
    or_term.define(and_term + op_('or') + or_term >> make_or | and_term)

    term.define(or_term)

    eof = skip(toktype(u'EOF'))
    filter_expr = (term + eof) | (eof >> const(Any()))
    return filter_expr.parse(seq)
Example #17
0
def parse(seq):
    """Sequence(Token) -> object"""
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    n = lambda s: a(Token(u'Name', s)) >> tokval
    op = lambda s: a(Token(u'Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id_types = [u'Name', u'Number', u'String']
    id = some(lambda t: t.type in id_types).named(u'id') >> tokval
    make_graph_attr = lambda args: DefAttrs(u'graph', [Attr(*args)])
    make_edge = lambda x, xs, attrs: Edge([x] + xs, attrs)

    node_id = id  # + maybe(port)
    a_list = (
        id + maybe(op_(u'=') + id) + skip(maybe(op(u','))) >> unarg(Attr))
    attr_list = (many(op_(u'[') + many(a_list) + op_(u']')) >> flatten)
    attr_stmt = (
        (n(u'graph') | n(u'node') | n(u'edge')) + attr_list >> unarg(DefAttrs))
    graph_attr = id + op_(u'=') + id >> make_graph_attr
    node_stmt = node_id + attr_list >> unarg(Node)
    # We use a forward_decl becaue of circular definitions like (stmt_list ->
    # stmt -> subgraph -> stmt_list)
    subgraph = forward_decl()
    edge_rhs = skip(op(u'->') | op(u'--')) + (subgraph | node_id)
    edge_stmt = ((subgraph | node_id) + oneplus(edge_rhs) + attr_list >>
                 unarg(make_edge))
    stmt = (attr_stmt | edge_stmt | subgraph | graph_attr | node_stmt)
    stmt_list = many(stmt + skip(maybe(op(u';'))))
    subgraph.define(
        skip(n(u'subgraph')) + maybe(id) + op_(u'{') + stmt_list +
        op_(u'}') >> unarg(SubGraph))
    graph = (maybe(n(u'strict')) + maybe(n(u'graph') | n(u'digraph')) +
             maybe(id) + op_(u'{') + stmt_list + op_(u'}') >> unarg(Graph))
    dotfile = graph + skip(finished)

    return dotfile.parse(seq)
Example #18
0
def parse(seq):
    'Sequence(Token) -> object'
    tokval = lambda x: x.value
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id = some(lambda t: t.type in ['Name', 'Number', 'Color', 'String']).named(
        'id') >> tokval
    date = some(lambda t: t.type == 'Date').named('date') >> tokval
    make_node = lambda args: Node(*args)

    node_stmt = id + op_(':') + date + maybe(op_('-') + date) >> make_node
    chart = (many(node_stmt + skip(maybe(op(';')))) >> Chart)
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
Example #19
0
def parse(seq):
    """
    Parses the list of tokens and generates an AST.
    """
    def eval_expr(z, list):
        return reduce(lambda s, (f, x): f(s, x), list, z)
    unarg = lambda f: lambda x: f(*x)
    tokval = lambda x: x.value # returns the value of a token
    toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
    const = lambda x: lambda _: x # like ^^^ in Scala

    op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
    op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it

    lst = lambda x: [x[0],] + x[1]
    tup = lambda x: (x[0], x[1])

    makeop = lambda s, f: op(s) >> const(f)

    add = makeop('+', Add)
    sub = makeop('-', Sub)
    mul = makeop('*', Mul)
    div = makeop('/', Div)

    lt = makeop('<', Lt)
    gt = makeop('>', Gt)
    eq = makeop('=', Eq)

    operation = add | sub | mul | div | lt | gt | eq

    decl = with_forward_decls(lambda:toktype('Var') + op_('=') + (exp | fun) >> tup)
    decls = decl + many(skip(toktype('Semicolon')) + decl) >> lst
    variable = toktype('Var') >> Variable
    variables = variable + many(skip(toktype('Comma')) + variable) >> lst
    fun = with_forward_decls(lambda: skip(toktype('Fun')) + variables + skip(toktype('Arrow')) + exp + skip(toktype('End'))) >> unarg(Fun)
    parameters = with_forward_decls(lambda: exp + many(skip(toktype('Comma')) + exp) >> lst)
    call = skip(toktype('Call')) + (fun | variable) + skip(toktype('Lp')) + parameters + skip(toktype('Rp')) >> unarg(Call)
    ex = with_forward_decls(lambda:variable | toktype('Number') >> (lambda x: Const(int(x))) |\
        toktype('True') >> (lambda x: Const(True)) | toktype('False') >> (lambda x: Const(False)) |\
        skip(toktype('Let')) + decls + skip(toktype('In')) + exp + skip(toktype('End')) >> unarg(Let) |\
        skip(toktype('If')) + exp + skip(toktype('Then')) + exp + maybe(skip(toktype('Else')) + exp) + skip(toktype('Fi')) >> unarg(If) |\
        fun | call)
    exp = ex + many(operation + ex) >> unarg(eval_expr)
    prog = skip(toktype('Prog')) + exp >> Prog

    return prog.parse(seq)
Example #20
0
def language_element(key, tok_type, combinator=a):
    """
    Parser to match language element by using a certain combinator

    :param key: exact key of the token, e.g.: `begin`, `model`, `)`
    :param tok_type: predefined token type to be matched
    :param combinator: use the combinator to create a parser
    :return: a parser that matches elements using the above condition
    """
    if combinator == a:
        return combinator(Token(tok_type, key))
    elif combinator == some:
        return combinator(lambda tok: tok == Token(tok_type, key))
    elif combinator == maybe:
        return combinator(a(Token(tok_type, key)))
    else:
        raise Exception("Parser creation error")
Example #21
0
    def test_error_info(self):
        tokenize = make_tokenizer([
            ('keyword', (r'(is|end)',)),
            ('id', (r'[a-z]+',)),
            ('space', (r'[ \t]+',)),
            ('nl', (r'[\n\r]+',)),
        ])
        try:
            list(tokenize('f is ф'))
        except LexerError as e:
            self.assertEqual(six.text_type(e),
                             'cannot tokenize data: 1,6: "f is \u0444"')
        else:
            self.fail('must raise LexerError')

        sometok = lambda type: some(lambda t: t.type == type)
        keyword = lambda s: a(Token('keyword', s))

        id = sometok('id')
        is_ = keyword('is')
        end = keyword('end')
        nl = sometok('nl')

        equality = id + skip(is_) + id >> tuple
        expr = equality + skip(nl)
        file = many(expr) + end

        msg = """\
spam is eggs
eggs isnt spam
end"""
        toks = [x for x in tokenize(msg) if x.type != 'space']
        try:
            file.parse(toks)
        except NoParseError as e:
            self.assertEqual(e.msg,
                             "got unexpected token: 2,11-2,14: id 'spam'")
            self.assertEqual(e.state.pos, 4)
            self.assertEqual(e.state.max, 7)
            # May raise KeyError
            t = toks[e.state.max]
            self.assertEqual(t, Token('id', 'spam'))
            self.assertEqual((t.start, t.end), ((2, 11), (2, 14)))
        else:
            self.fail('must raise NoParseError')
Example #22
0
    def test_error_info(self):
        tokenize = make_tokenizer([
            ('keyword', (r'(is|end)',)),
            ('id', (r'[a-z]+',)),
            ('space', (r'[ \t]+',)),
            ('nl', (r'[\n\r]+',)),
        ])
        try:
            list(tokenize('f is ф'))
        except LexerError as e:
            self.assertEqual(str(e),
                             'cannot tokenize data: 1,6: "f is \u0444"')
        else:
            self.fail('must raise LexerError')

        sometok = lambda type: some(lambda t: t.type == type)
        keyword = lambda s: a(Token('keyword', s))

        id = sometok('id')
        is_ = keyword('is')
        end = keyword('end')
        nl = sometok('nl')

        equality = id + skip(is_) + id >> tuple
        expr = equality + skip(nl)
        file = many(expr) + end

        msg = """\
spam is eggs
eggs isnt spam
end"""
        toks = [x for x in tokenize(msg) if x.type != 'space']
        try:
            file.parse(toks)
        except NoParseError as e:
            self.assertEqual(e.msg,
                             "got unexpected token: 2,11-2,14: id 'spam'")
            self.assertEqual(e.state.pos, 4)
            self.assertEqual(e.state.max, 7)
            # May raise KeyError
            t = toks[e.state.max]
            self.assertEqual(t, Token('id', 'spam'))
            self.assertEqual((t.start, t.end), ((2, 11), (2, 14)))
        else:
            self.fail('must raise NoParseError')
Example #23
0
def _parse_rule(seq):
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    sep = lambda s: a(Token(u'Sep', s)) >> tokval
    s_sep = lambda s: skip(sep(s))

    level = toktype(u'Level')
    comparator = toktype(u'Comparator') >> COMPARATORS.get
    number = toktype(u'Number') >> float
    historical = toktype(u'Historical')
    unit = toktype(u'Unit')
    operator = toktype(u'Operator')
    logical_operator = toktype(u'LogicalOperator') >> LOGICAL_OPERATORS.get

    exp = comparator + (
        (number + maybe(unit)) | historical) + maybe(operator + number)
    rule = (level + s_sep(':') + exp + many(logical_operator + exp))

    overall = rule + skip(finished)
    return overall.parse(seq)
Example #24
0
def _parse_rule(seq):
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    sep = lambda s: a(Token(u'Sep', s)) >> tokval
    s_sep = lambda s: skip(sep(s))

    level = toktype(u'Level')
    comparator = toktype(u'Comparator') >> COMPARATORS.get
    number = toktype(u'Number') >> float
    historical = toktype(u'Historical')
    unit = toktype(u'Unit')
    operator = toktype(u'Operator')
    logical_operator = toktype(u'LogicalOperator') >> LOGICAL_OPERATORS.get

    exp = comparator + ((number + maybe(unit)) | historical) + maybe(operator + number)
    rule = (
        level + s_sep(':') + exp + many(logical_operator + exp)
    )

    overall = rule + skip(finished)
    return overall.parse(seq)
def parse(seq):
	"""Returns the AST of the given token sequence."""
	def eval_expr(z, list):
		return reduce(lambda s, (f, x): f(s, x), list, z)
	unarg = lambda f: lambda x: f(*x)
	const = lambda x: lambda _: x # like ^^^ in Scala

	tokval = lambda x: x.value # returns the value of a token
	op = lambda s: a(Token('Op', s)) >> tokval # return the value if token is Op
	op_ = lambda s: skip(op(s)) # checks if token is Op and ignores it
	toktype = lambda t: some(lambda x: x.type == t) >> tokval # checks type of token
	def lst(h,t):
		return [h,] + t
	call = lambda x: Call(x[0], x[1])

	makeop = lambda s, f: op(s) >> const(f)

	add = makeop('+', Plus)
	sub = makeop('-', Minus)
	mul = makeop('*', Times)
	div = makeop('/', Div)

	def make_const(i):
		return const(int(i))

	number = toktype('Number') >> Const

	mul_op = mul | div
	add_op = add | sub

	factor = with_forward_decls(lambda:
		number | op_('(') + exp + op_(')') | call)
	term = factor + many(mul_op + factor) >> unarg(eval_expr)
	exp = term + many(add_op + term) >> unarg(eval_expr)
	exp_lst = with_forward_decls(lambda:
		exp + many(op_(',') + exp) >> unarg(lst))
	call = toktype('Name') + op_('(') + exp_lst + op_(')') >> call

	return exp.parse(seq)
Example #26
0
def grammar():
    lparen = skip(a(LParen()))
    rparen = skip(a(RParen()))

    def collapse(t):
        t[0].terms = t[1]
        return t[0]

    @with_forward_decls
    def ldap_filter():
        return (ldap_and | ldap_or | ldap_not | ldap_test)

    ldap_and = (lparen + a(And()) + oneplus(ldap_filter) + rparen) >> collapse
    ldap_or = (lparen + a(Or()) + oneplus(ldap_filter) + rparen) >> collapse
    ldap_not = (lparen + a(Not()) + ldap_filter + rparen) >> collapse
    ldap_test = lparen + a(Test()) + rparen

    return ldap_filter + skip(finished)
Example #27
0
def parse(sequence, query):
    tokval = lambda x: x.value
    toktype = lambda t: (
        some(lambda x: x.type == t).named('(type %s)' % t) >> tokval
    )
    operation = lambda s: a(Token('Op', s)) >> tokval
    operation_ = lambda s: skip(operation(s))

    create_param = lambda param_name: query.get_aliased_param(
        param_name
    )
    make_and = lambda params: And(params[0], params[1])
    make_or = lambda params: Or(params[0], params[1])
    make_not = lambda inner: Not(inner)

    word = toktype('Word')
    inner_bracket = forward_decl()
    left_of_and = forward_decl()
    right_of_and = forward_decl()
    left_of_or = forward_decl()
    not_ = forward_decl()
    bracket = operation_('(') + inner_bracket + operation_(')')
    and_ = left_of_and + operation_('&') + right_of_and >> make_and
    or_ = left_of_or + operation_('|') + inner_bracket >> make_or
    param = word >> create_param

    not_.define(operation_('!') + (bracket | param))
    not_ = not_ >> make_not

    left_of_or.define(and_ | bracket | not_ | param)
    left_of_and.define(bracket | not_ | param)
    right_of_and.define(left_of_and)
    inner_bracket.define(or_ | and_ | bracket | not_ | param)

    definition = (bracket | inner_bracket) + finished

    return definition.parse(sequence)
Example #28
0
def grammar():
    lparen = skip(a(LParen()))
    rparen = skip(a(RParen()))

    def collapse(t):
        t[0].terms = t[1]
        return t[0]

    @with_forward_decls
    def ldap_filter():
        return (ldap_and | ldap_or | ldap_not | ldap_test)

    ldap_and = (lparen + a(And()) + oneplus(ldap_filter) + rparen) >> collapse
    ldap_or = (lparen + a(Or()) + oneplus(ldap_filter) + rparen) >> collapse
    ldap_not = (lparen + a(Not()) + ldap_filter + rparen) >> collapse
    ldap_test = lparen + a(Test()) + rparen

    return ldap_filter + skip(finished)
Example #29
0
from __future__ import absolute_import

import funcparserlib.parser as p

string = lambda x: x or ''
cat = ''.join

negative = p.maybe(p.a('-')) >> string
digits = p.oneplus(p.some(lambda char: char.isdigit())) >> cat
decimal_part = (p.maybe(p.a('.') + digits)) >> string >> cat
number = (negative + digits + decimal_part) >> cat >> float

addition = number + p.skip(p.a('+')) + number >> sum

expression = addition | number
expression = expression + p.finished

def calculate(text):
    return expression.parse(text)[0]
Example #30
0
def parse(seq):
    """Sequence(Token) -> object"""
    const = lambda x: lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('Name', s)) >> tokval

    def make_array(n):
        if n is None:
            return []
        else:
            return [n[0]] + n[1]

    def make_object(n):
        return dict(make_array(n))

    def make_number(n):
        try:
            return int(n)
        except ValueError:
            return float(n)

    def unescape(s):
        std = {
            '"': '"',
            '\\': '\\',
            '/': '/',
            'b': '\b',
            'f': '\f',
            'n': '\n',
            'r': '\r',
            't': '\t',
        }

        def sub(m):
            if m.group('standard') is not None:
                return std[m.group('standard')]
            else:
                return chr(int(m.group('unicode'), 16))

        return re_esc.sub(sub, s)

    def make_string(n):
        return unescape(n[1:-1])

    null = n('null') >> const(None)
    true = n('true') >> const(True)
    false = n('false') >> const(False)
    number = toktype('Number') >> make_number
    string = toktype('String') >> make_string
    value = forward_decl()
    member = string + op_(':') + value >> tuple
    object = (op_('{') + maybe(member + many(op_(',') + member)) + op_('}') >>
              make_object)
    array = (op_('[') + maybe(value + many(op_(',') + value)) + op_(']') >>
             make_array)
    value.define(null | true | false | object | array | number | string)
    json_text = object | array
    json_file = json_text + skip(finished)

    return json_file.parse(seq)
Example #31
0
File: tokens.py Project: joar/moln
 def type_decl(type_name):
     return p.a(
         Token('Type', type_name)
     ).named('Type({})'.format(type_name))
Example #32
0
File: tokens.py Project: joar/moln
def decode_tokens(s: typing.List[Token]):
    def _to_int(token):
        _log.debug('_to_int: %r', token)
        return int(token)

    str_strip_re = re.compile(rb'^\d+:')

    def _to_string(s):
        _log.debug('_to_string: %r', s)
        return str_strip_re.sub(b'', s)

    def _to_list(_tokens):
        _log.debug('_to_list: %r', _tokens)

        return _tokens

    def _to_dict(n):
        _log.debug('_to_dict: %r', n)
        return dict(_to_list(n))

    def token_value(x):
        return x.value

    def token_type(t):
        return p.some(lambda x: x.name == t) >> token_value

    def type_decl(type_name):
        return p.a(
            Token('Type', type_name)
        ).named('Type({})'.format(type_name))

    value = p.forward_decl().named('Value')

    integer = token_type('Number')

    end = p.a(Token('End', b'e'))

    # String is special, has no type
    str_decl = (
        token_type('String') >> _to_string
    ).named('String')

    dict_decl = (
        p.skip(type_decl(b'd')) +
        p.many(value + value) +
        p.skip(end) >> _to_dict
    ).named('Dict')

    list_decl = (
        p.skip(type_decl(b'l')) +
        p.many(value) +
        p.skip(end) >> _to_list
    ).named('List')

    integer_decl = (
        p.skip(type_decl(b'i')) +
        integer +
        p.skip(end) >> _to_int
    ).named('Integer')

    value.define(
        integer_decl |
        dict_decl |
        list_decl |
        str_decl
    )

    bencode_decl = (
        value +
        p.skip(p.finished)
    ).named('Bencode')

    return bencode_decl.parse(s)
Example #33
0
# -*- coding: utf-8 -*-

'Tests for issue #8: prevent definitions of non-halting parsers.'

from funcparserlib.parser import (a, many, fwd, maybe, pure, oneplus,
                                  GrammarError, makes_progress, non_halting)
from funcparserlib.contrib.common import const
from nose.tools import ok_, assert_raises

x = a('x')
p1 = maybe(x)
p3 = maybe(x) + x
p4 = many(p3)
p5 = x | many(x)
p8 = x >> const(True)
p9 = pure(True)


def test_makes_progress():
    ok_(not makes_progress(p1))
    ok_(not makes_progress(p4))
    ok_(not makes_progress(p5))
    ok_(not makes_progress(p9))
    ok_(makes_progress(p3))
    ok_(makes_progress(p8))


def test_non_halting_many():
    assert_raises(GrammarError, lambda: many(many(x)).parse(''))
    assert_raises(GrammarError, lambda: oneplus(many(x)).parse(''))
    assert_raises(GrammarError, lambda: many(p1).parse(''))
Example #34
0
 def test_oneplus(self):
     x = a(u'x')
     y = a(u'y')
     expr = oneplus(x + y)
     self.assertEqual(expr.parse(u'xyxyxy'),
                      ([(u'x', u'y'), (u'x', u'y'), (u'x', u'y')]))
Example #35
0
File: kll.py Project: audax/kll
    return make_hidCode_range("SysCode", rangeVals)


def make_consCode_range(rangeVals):
    return make_hidCode_range("ConsCode", rangeVals)

    ## Base Rules


const = lambda x: lambda _: x
unarg = lambda f: lambda x: f(*x)
flatten = lambda list: sum(list, [])

tokenValue = lambda x: x.value
tokenType = lambda t: some(lambda x: x.type == t) >> tokenValue
operator = lambda s: a(Token("Operator", s)) >> tokenValue
parenthesis = lambda s: a(Token("Parenthesis", s)) >> tokenValue
eol = a(Token("EndOfLine", ";"))


def listElem(item):
    return [item]


def listToTuple(items):
    return tuple(items)

    # Flatten only the top layer (list of lists of ...)


def oneLayerFlatten(items):
Example #36
0
def op(s):
    return (commented(parser.a(lexer.Token('op', s)) >>
            (lambda tok: tok.value)) >> unarg(Op))
Example #37
0
def parse(seq):
    """Sequence(Token) -> object"""
    const = lambda x: lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('Name', s)) >> tokval

    def make_array(n):
        if n is None:
            return []
        else:
            return [n[0]] + n[1]

    def make_object(n):
        return dict(make_array(n))

    def make_number(n):
        try:
            return int(n)
        except ValueError:
            return float(n)

    def unescape(s):
        std = {
            '"': '"', '\\': '\\', '/': '/', 'b': '\b', 'f': '\f',
            'n': '\n', 'r': '\r', 't': '\t',
        }

        def sub(m):
            if m.group('standard') is not None:
                return std[m.group('standard')]
            else:
                return chr(int(m.group('unicode'), 16))

        return re_esc.sub(sub, s)

    def make_string(n):
        return unescape(n[1:-1])

    null = n('null') >> const(None)
    true = n('true') >> const(True)
    false = n('false') >> const(False)
    number = toktype('Number') >> make_number
    string = toktype('String') >> make_string
    value = forward_decl()
    member = string + op_(':') + value >> tuple
    object = (
        op_('{') +
        maybe(member + many(op_(',') + member)) +
        op_('}')
        >> make_object)
    array = (
        op_('[') +
        maybe(value + many(op_(',') + value)) +
        op_(']')
        >> make_array)
    value.define(
        null
        | true
        | false
        | object
        | array
        | number
        | string)
    json_text = object | array
    json_file = json_text + skip(finished)

    return json_file.parse(seq)
Example #38
0
def parse(seq):
    """Sequence(Token) -> object"""
    const = lambda x: lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('OP', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('NAME', s)) >> tokval

    def make_array(n):
        if n is None:
            return []
        else:
            return [n[0]] + n[1]

    def make_object(n):
        return dict(make_array(n))

    def make_int(n):
        return '%s' % int(n)

    def make_real(n):
        return '%s' % float(n)

    def unescape(s):
        std = {
            '"': '"',
            '\\': '\\',
            '/': '/',
            'b': '\b',
            'f': '\f',
            'n': '\n',
            'r': '\r',
            't': '\t',
        }

        def sub(m):
            if m.group('standard') is not None:
                return std[m.group('standard')]
            else:
                return unichr(int(m.group('unicode'), 16))

        return re_esc.sub(sub, s)

    def make_string(n):
        return n
        #return unescape(n[1:-1])

    def make_all_models(models):
        return dict(models)


#   all_attrs = []
#        for i in attrs:
#            attr = i[0]
#            if attr not in all_attrs:
#                all_attrs.append(attr)
#            else:
#                raise Exception('Attribute %s is already defined in class'%attr)

    def make_fields(n):
        #return dict(n)
        return Field(n)

    def make_params(n):
        return n

    null = toktype('NONE') >> const("None")
    true = toktype('TRUE') >> const("True")
    false = toktype('FALSE') >> const("False")
    number = toktype('INT') >> make_int
    real = toktype('REAL') >> make_real
    string = toktype('STRING') >> make_string
    value = forward_decl()
    name = toktype('NAME')
    field = toktype('FIELD') + maybe(op_('(') + many(value) +
                                     op_(')')) >> tuple
    member = string + op_(':') + value >> tuple
    attrs = forward_decl()
    params = forward_decl()

    models = many(name + op_('::') + many(attrs)) >> make_all_models

    attrs.define(name + op_(':') + field + many(params) >> make_fields)

    params.define(name + op_('=') + value >> tuple)

    value.define(null | true | false | name | number | real | string)
    parser_text = models
    parser_file = parser_text + skip(finished)
    return parser_file.parse(seq)
Example #39
0
def sym(wanted):
    "Parse and skip the given symbol or keyword."
    if wanted.startswith(":"):
        return skip(a(HyKeyword(wanted[1:])))
    return skip(some(lambda x: isinstance(x, HySymbol) and x == wanted))
Example #40
0
def parse(seq):
    """Sequence(Token) -> object"""
    tokval = lambda x: x.value
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    _id = some(lambda t: t.type in ['Name', 'Number', 'String']) >> tokval
    keyword = lambda s: a(Token('Name', s)) >> tokval
    separator = some(lambda t: t.type == 'Separator') >> tokval

    def make_separator(sep):
        return Separator(sep[0:3], sep[3:-3].strip())

    #
    # parts of syntax
    #
    option_stmt = (_id + maybe(op_('=') + _id) >> create_mapper(Attr))
    option_list = (maybe(
        op_('[') + option_stmt + many(op_(',') + option_stmt) + op_(']')) >>
                   create_mapper(oneplus_to_list, default_value=[]))

    #  attributes statement::
    #     default_shape = box;
    #     default_fontsize = 16;
    #
    attribute_stmt = (_id + op_('=') + _id >> create_mapper(Attr))

    #  node statement::
    #     A;
    #     B [attr = value, attr = value];
    #
    node_stmt = (_id + option_list >> create_mapper(Node))

    #  separator statement::
    #     === message ===
    #     ... message ...
    #
    separator_stmt = (separator >> make_separator)

    #  edge statement::
    #     A -> B;
    #     C -> D {
    #       D -> E;
    #     }
    #
    edge_block = forward_decl()
    edge_relation = (op('<<--') | op('<--') | op('<<-') | op('<-') | op('->')
                     | op('->>') | op('-->') | op('-->>') | op('=>'))
    edge_stmt = (_id + edge_relation + _id + many(edge_relation + _id) +
                 option_list + maybe(edge_block) >> create_mapper(Edge))
    edge_block_inline_stmt_list = (many(edge_stmt + skip(maybe(op(';')))
                                        | separator_stmt))
    edge_block.define(
        op_('{') + edge_block_inline_stmt_list + op_('}') >> Statements)

    #  group statement::
    #     group {
    #        A;
    #     }
    #
    group_inline_stmt_list = (many((attribute_stmt | node_stmt) +
                                   skip(maybe(op(';')))))
    group_stmt = (skip(keyword('group')) + skip(maybe(_id)) + op_('{') +
                  group_inline_stmt_list + op_('}') >> Group)

    #  combined fragment (alt, loop) statement::
    #     loop {
    #        A -> B;
    #     }
    #     alt {
    #        D -> E;
    #     }
    #
    fragment_stmt = forward_decl()
    fragment_inline_stmt = (attribute_stmt | fragment_stmt | edge_stmt
                            | node_stmt)
    fragment_inline_stmt_list = (many(fragment_inline_stmt +
                                      skip(maybe(op(';')))))
    fragment_types = (keyword('alt') | keyword('loop'))
    fragment_stmt.define(fragment_types + maybe(_id) + op_('{') +
                         fragment_inline_stmt_list +
                         op_('}') >> create_mapper(Fragment))

    #  extension statement (class, plugin)::
    #     class red [color = red];
    #     plugin attributes [name = Name];
    #
    extension_stmt = ((keyword('class') | keyword('plugin')) + _id +
                      option_list >> create_mapper(Extension))

    # diagram statement::
    #     seqdiag {
    #        A -> B;
    #     }
    #
    diagram_id = (
        (keyword('diagram') | keyword('seqdiag')) + maybe(_id) >> list)
    diagram_inline_stmt = (extension_stmt | attribute_stmt | fragment_stmt
                           | group_stmt | edge_stmt | separator_stmt
                           | node_stmt)
    diagram_inline_stmt_list = (many(diagram_inline_stmt +
                                     skip(maybe(op(';')))))
    diagram = (maybe(diagram_id) + op_('{') + diagram_inline_stmt_list +
               op_('}') >> create_mapper(Diagram))
    dotfile = diagram + skip(finished)

    return dotfile.parse(seq)
Example #41
0
def parse(seq):
    """Sequence(Token) -> object"""
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    n = lambda s: a(Token('Name', s)) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id_types = ['Name', 'Number', 'String']
    id = some(lambda t: t.type in id_types).named('id') >> tokval
    make_graph_attr = lambda args: DefAttrs('graph', [Attr(*args)])
    make_edge = lambda x, xs, attrs: Edge([x] + xs, attrs)

    node_id = id # + maybe(port)
    a_list = (
        id +
        maybe(op_('=') + id) +
        skip(maybe(op(',')))
        >> unarg(Attr))
    attr_list = (
        many(op_('[') + many(a_list) + op_(']'))
        >> flatten)
    attr_stmt = (
        (n('graph') | n('node') | n('edge')) +
        attr_list
        >> unarg(DefAttrs))
    graph_attr = id + op_('=') + id >> make_graph_attr
    node_stmt = node_id + attr_list >> unarg(Node)
    # We use a forward_decl becaue of circular definitions like (stmt_list ->
    # stmt -> subgraph -> stmt_list)
    subgraph = forward_decl()
    edge_rhs = skip(op('->') | op('--')) + (subgraph | node_id)
    edge_stmt = (
        (subgraph | node_id) +
        oneplus(edge_rhs) +
        attr_list
        >> unarg(make_edge))
    stmt = (
        attr_stmt
        | edge_stmt
        | subgraph
        | graph_attr
        | node_stmt
    )
    stmt_list = many(stmt + skip(maybe(op(';'))))
    subgraph.define(
        skip(n('subgraph')) +
        maybe(id) +
        op_('{') +
        stmt_list +
        op_('}')
        >> unarg(SubGraph))
    graph = (
        maybe(n('strict')) +
        maybe(n('graph') | n('digraph')) +
        maybe(id) +
        op_('{') +
        stmt_list +
        op_('}')
        >> unarg(Graph))
    dotfile = graph + skip(finished)

    return dotfile.parse(seq)
Example #42
0
def evaluate(expression, environment):
    """Evaluate an expression in the specified variable environment."""

    # Well known functions
    const = lambda x: lambda _: x
    unarg = lambda f: lambda args: f(*args)

    # Semantic actions and auxiliary functions
    tokval = lambda tok: tok.value
    makeop = lambda s, f: op(s) >> const(f)
    sometok = lambda type: some(lambda tok: tok.type == type)

    def eval_name(s):
        try:
            return environment[s] # Case-sensitive
        except KeyError:
            raise ValueError('unbound variable: %s' % s)

    def make_number(s):
        try:
            return int(s)
        except ValueError:
            return float(s)

    def eval_expr(expr, op_expr_pairs):
        result = expr
        for op, expr in op_expr_pairs:
            result = op(result, expr)
        return result

    def eval_call(func_name, maybe_expr_and_exprs):
        if maybe_expr_and_exprs:
            expr, exprs = maybe_expr_and_exprs
            args = [expr] + exprs
        else:
            args = []

        f = eval_name(func_name)
        if not callable(f):
            raise TypeError('variable is not callable: %s' % func_name)

        argcount = len(args)
        f_argcount = f.func_code.co_argcount
        if f_argcount != argcount:
            raise TypeError('%s takes %d arguments (%d given)' %
                            (func_name, f_argcount, argcount))
        return f(*args)

    # Primitives
    number = (
        sometok('number')
        >> tokval
        >> make_number)
    raw_name = sometok('name') >> tokval
    name = raw_name >> eval_name
    op  = lambda s: a(Token('op', s)) >> tokval
    op_ = lambda s: skip(op(s))

    add = makeop('+', operator.add)
    sub = makeop('-', operator.sub)
    mul = makeop('*', operator.mul)
    div = makeop('/', operator.div)

    mul_op = mul | div
    add_op = add | sub

    # Means of composition
    expr = forward_decl()

    call = (
        raw_name + op_('(') + maybe(expr + many(op_(',') + expr)) + op_(')')
        >> unarg(eval_call))

    primary = (
          number
        | call
        | name
        | op_('(') + expr + op_(')'))

    term = (
        primary + many(mul_op + primary)
        >> unarg(eval_expr))

    expr.define(
        term + many(add_op + term)
        >> unarg(eval_expr))

    # Toplevel parsers
    toplevel = maybe(expr) + skip(finished)

    return toplevel.parse(tokenize(expression))
Example #43
0
#@+node:peckj.20140124085532.4005: *4* make_number and negate
def make_number(s):
    try:
        return int(s)
    except ValueError:
        return float(s)


#negate = lambda n: n * -1
def negate(n):
    return n[1] * -1


#@+node:peckj.20140124085532.4010: *4* makeop
op = lambda s: a(Token(token.OP, s)) >> tokval
op_ = lambda s: skip(op(s))
const = lambda x: lambda _: x
makeop = lambda s, f: op(s) >> const(f)


#@+node:peckj.20140124085532.4008: *4* eval_expr
def eval_expr(z, list):
    return reduce(lambda s, (f, x): f(s, x), list, z)


f = unarg(eval_expr)
#@+node:peckj.20140124085532.4003: *3* grammar
posnumber = (some(lambda tok: tok.type == 'NUMBER') >> tokval >> make_number)

add = makeop('+', operator.add)
Example #44
0
def parse(seq):
    """Sequence(Token) -> object"""
    id_tokens = ['Name', 'IPAddr', 'Number', 'String']

    tokval = lambda x: x.value
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    _id = some(lambda t: t.type in id_tokens) >> tokval
    keyword = lambda s: a(Token('Name', s)) >> tokval

    def make_peer(first, edge_type, second, followers, attrs):
        edges = [Edge(first, edge_type, second, attrs)]

        from_node = second
        for edge_type, to_node in followers:
            edges.append(Edge(from_node, edge_type, to_node, attrs))
            from_node = to_node

        return Peer(edges)

    def make_route(first, edge_type, second, followers, attrs):
        edges = [Edge(first, edge_type, second, attrs)]

        from_node = second
        for edge_type, to_node in followers:
            edges.append(Edge(from_node, edge_type, to_node, attrs))
            from_node = to_node

        return Route(edges)

    #
    # parts of syntax
    #
    node_list = (
        _id +
        many(op_(',') + _id)
        >> create_mapper(oneplus_to_list)
    )
    option_stmt = (
        _id +
        maybe(op_('=') + _id)
        >> create_mapper(Attr)
    )
    option_list = (
        maybe(op_('[') + option_stmt + many(op_(',') + option_stmt) + op_(']'))
        >> create_mapper(oneplus_to_list, default_value=[])
    )

    #  node statement::
    #     A;
    #     B [attr = value, attr = value];
    #
    node_stmt = (
        _id + option_list
        >> create_mapper(Node)
    )

    #  peer network statement::
    #     A -- B;
    #
    edge_stmt = (
        _id +
        op('--') +
        _id +
        many(op('--') + _id) +
        option_list
        >> create_mapper(make_peer)
    )

    #  attributes statement::
    #     default_shape = box;
    #     default_fontsize = 16;
    #
    attribute_stmt = (
        _id + op_('=') + _id
        >> create_mapper(Attr)
    )

    #  extension statement (class, plugin)::
    #     class red [color = red];
    #     plugin attributes [name = Name];
    #
    extension_stmt = (
        (keyword('class') | keyword('plugin')) +
        _id +
        option_list
        >> create_mapper(Extension)
    )

    #  group statement::
    #     group {
    #        A;
    #     }
    #
    group_inline_stmt = (
        attribute_stmt |
        node_stmt
    )
    group_inline_stmt_list = (
        many(group_inline_stmt + skip(maybe(op(';'))))
    )
    group_stmt = (
        skip(keyword('group')) +
        maybe(_id) +
        op_('{') +
        group_inline_stmt_list +
        op_('}')
        >> create_mapper(Group)
    )

    #  network statement::
    #     network {
    #        A;
    #     }
    #
    network_inline_stmt = (
        attribute_stmt |
        group_stmt |
        node_stmt
    )
    network_inline_stmt_list = (
        many(network_inline_stmt + skip(maybe(op(';'))))
    )
    network_stmt = (
        skip(keyword('network')) +
        maybe(_id) +
        op_('{') +
        network_inline_stmt_list +
        op_('}')
        >> create_mapper(Network)
    )

    #  route statement::
    #     route {
    #       A -> B -> C;
    #     }
    #
    route_inline_stmt = (
        _id +
        op_('->') +
        _id +
        many(op_('->') + _id) +
        option_list
        >> create_mapper(make_route)
    )
    route_stmt = (
        skip(keyword('route')) +
        maybe(_id) +
        op_('{') +
        network_inline_stmt_list +
        op_('}')
        >> create_mapper(Network)
    )

    #
    # diagram statement::
    #     nwdiag {
    #        A;
    #     }
    #
    diagram_id = (
        (keyword('diagram') | keyword('nwdiag')) +
        maybe(_id)
        >> list
    )
    diagram_inline_stmt = (
        extension_stmt |
        network_stmt |
        group_stmt |
        attribute_stmt |
        route_stmt |
        edge_stmt |
        node_stmt
    )
    diagram_inline_stmt_list = (
        many(diagram_inline_stmt + skip(maybe(op(';'))))
    )
    diagram = (
        maybe(diagram_id) +
        op_('{') +
        diagram_inline_stmt_list +
        op_('}')
        >> create_mapper(Diagram)
    )
    dotfile = diagram + skip(finished)

    return dotfile.parse(seq)
Example #45
0
File: kll.py Project: slezier/kll
		start, end = end, start

	# Iterate from start to end, and generate the range
	return list( range( start, end + 1 ) )
	pass


 ## Base Rules

const       = lambda x: lambda _: x
unarg       = lambda f: lambda x: f(*x)
flatten     = lambda list: sum( list, [] )

tokenValue  = lambda x: x.value
tokenType   = lambda t: some( lambda x: x.type == t ) >> tokenValue
operator    = lambda s: a( Token( 'Operator', s ) ) >> tokenValue
parenthesis = lambda s: a( Token( 'Parenthesis', s ) ) >> tokenValue
eol         = a( Token( 'EndOfLine', ';' ) )

def listElem( item ):
	return [ item ]

def listToTuple( items ):
	return tuple( items )

  # Flatten only the top layer (list of lists of ...)
def oneLayerFlatten( items ):
	mainList = []
	for sublist in items:
		for item in sublist:
			mainList.append( item )
Example #46
0
 def test_many_backtracking(self):
     x = a(u'x')
     y = a(u'y')
     expr = many(x + y) + x + x
     self.assertEqual(expr.parse(u'xyxyxx'),
                      ([(u'x', u'y'), (u'x', u'y')], u'x', u'x'))
Example #47
0
tok_number = token("number")
tok_string = token("string")
dot = token(".")
colon = token(":")
single_quote = token('"')
double_quote = token("'")
quote = (single_quote | double_quote)
open_sq_brace = token("[")
close_sq_brace = token("]")
open_rnd_brace = token("(")
close_rnd_brace = token(")")

tok_constant = p.some(lambda t: t.value in {'nil', 'true', 'false'})
iden_start = p.skip(p.some(lambda t: t.type not in ".:"))
tok_splash = (p.a(Token("iden", "splash")) + iden_start) >> token_value
iden = token("iden")
opt_iden = iden | p.pure("")

# =========== Expressions parser
# FIXME: it should be rewritten using full Lua 5.2 grammar.

BINARY_OPS = set("+-*/^%><") | {"..", "==", "~=", ">=", "<=", "and", "or"}
UNARY_OPS = {"not", "-", "#"}

binary_op = p.some(lambda t: t.value in BINARY_OPS) >> token_value
unary_op = p.some(lambda t: t.value in UNARY_OPS) >> token_value

# expressions with binary and unary ops + parenthesis
@p.with_forward_decls
def value():
Example #48
0
        first = parsed[0]
        rest = parsed[1]
        rest.insert(0, first)
        return rest
    return ( parser + many(by + parser) ) >> append

identifier = some(lambda x: re.match(r"[A-Za-z][A-Za-z0-9_]*", x))
"""
parse(mapping, "R_1: x, => T") 
= 
{ 
    'table'               : 'R_1',
    'columns'             : ['x'],
    'auto_increment_table' : 'T',
}
"""
mapping = ( 
    identifier + skip(a(':')) + separated(identifier, skip(a(','))) + skip(a('=>')) + identifier 
) >> (lambda result: {
    'table'               : result[0],
    'columns'             : result[1],
    'auto_increment_table' : result[2],
})

ignore = (
    identifier + skip(a('.')) + identifier
)

if __name__ == '__main__':
    main()
Example #49
0
def make_application(function, args):
    return Application(function, args)

def make_lambda(x):
    if x[0] is not None:
        args = [x[0]] + x[1]
        expr = x[2]
    else:
        args = []
        expr = x[1]

    return AnonymousFunction(args, expr)

# primitives
op          = lambda s: a(Token('operator', s))
op_         = lambda s: skip(op(s))

kw          = lambda s: a(Token('name', s))
kw_         = lambda s: skip(kw(s))

add         = op('+') >> const(operator.add)
sub         = op('-') >> const(operator.sub)
mul         = op('*') >> const(operator.mul)
div         = op('/') >> const(operator.truediv)
power       = op('**') >> const(operator.pow)

and_        = op('and') >> logical_and
or_         = op('or') >> logical_or
not_        = op('not') >> const(operator.not_)
Example #50
0
import string
import funcparserlib.parser as p

import nodes
from timeseries import ScalarTimeSeries, TimeSeries, generate_two_dummy_data_sets


if __name__ == "__main__":
    input_ts = dict(zip(['A_TS', 'B_TS'], generate_two_dummy_data_sets()))

    integer = p.oneplus(p.some(lambda c: c.isdigit())) >> (lambda toks: int(''.join(toks)))
    def to_number(toks):
        if not toks[1]:
            return float(toks[0])
        return float("%s.%s".format(toks))
    number = integer + p.maybe(p.skip(p.a('.')) + integer) >> to_number
    timeseries_name = p.oneplus(p.some(lambda c: c.isupper() or c == '_')) >> (lambda toks: ''.join(toks))

    timeseries_expr = timeseries_name >> (lambda name: nodes.TimeSeriesNode(input_ts[name]))
    scalar_expr = number >> (lambda n: nodes.TimeSeriesNode(ScalarTimeSeries(n)))

    expr = p.forward_decl()

    expr_rest = p.forward_decl()
    expr.define(
        timeseries_expr + expr_rest >> (lambda x: x[1](x[0]))
        | scalar_expr + expr_rest >> (lambda x: x[1](x[0]))
    )

    # Holy cow, is this there a better way to get around left-recursion?
    def generate_arithmetic_node_function(node_type):
Example #51
0
File: parse.py Project: cryham/kll
		print( tokens )
		return tokens



### Rules ###

## Base Rules

const       = lambda x: lambda _: x
unarg       = lambda f: lambda x: f(*x)
flatten     = lambda list: sum( list, [] )

tokenValue  = lambda x: x.value
tokenType   = lambda t: some( lambda x: x.type == t ) >> tokenValue
operator    = lambda s: a( Token( 'Operator', s ) ) >> tokenValue
parenthesis = lambda s: a( Token( 'Parenthesis', s ) ) >> tokenValue
bracket     = lambda s: a( Token( 'Bracket', s ) ) >> tokenValue
eol         = a( Token( 'EndOfLine', ';' ) )

def maybeFlatten( items ):
	'''
	Iterate through top-level lists
	Flatten, only if the element is also a list

	[[1,2],3,[[4,5]]] -> [1,2,3,[4,5]]
	'''
	new_list = []
	for elem in items:
		# Flatten only if a list
		if isinstance( elem, list ):
Example #52
0
        tokenize = make_tokenizer([
            (u'keyword', (ur'(is|end)',)),
            (u'id', (ur'[a-z]+',)),
            (u'space', (ur'[ \t]+',)),
            (u'nl', (ur'[\n\r]+',)),
        ])
        try:
            list(tokenize(u'f is ф'))
        except LexerError, e:
            self.assertEqual(unicode(e),
                             u'cannot tokenize data: 1,6: "f is \u0444"')
        else:
            self.fail(u'must raise LexerError')

        sometok = lambda type: some(lambda t: t.type == type)
        keyword = lambda s: a(Token(u'keyword', s))

        id = sometok(u'id')
        is_ = keyword(u'is')
        end = keyword(u'end')
        nl = sometok(u'nl')

        equality = id + skip(is_) + id >> tuple
        expr = equality + skip(nl)
        file = many(expr) + end

        msg = """\
spam is eggs
eggs isnt spam
end"""
        toks = [x for x in tokenize(msg) if x.type != u'space']
Example #53
0
def get_network_parser():
  """Turn a net string into numbers describing a Bayes net."""
  net_type = parser.skip(parser.a(Token(token.NAME, 'BAYES')))
  return net_type + number_parser + end_parser
Example #54
0
def test_many_backtracking():
    x = a('x')
    y = a('y')
    expr = many(x + y) + x + x
    eq_(expr.parse('xyxyxx'), ([('x', 'y'), ('x', 'y')], 'x', 'x'))
Example #55
0
def sym(wanted):
    "Parse and skip the given symbol or keyword."
    if wanted.startswith(":"):
        return skip(a(HyKeyword(wanted[1:])))
    return skip(some(lambda x: isinstance(x, HySymbol) and x == wanted))
Example #56
0
def test_many_backtracking():
    x = a('x')
    y = a('y')
    expr = many(x + y) + x + x
    eq_(expr.parse('xyxyxx'), ([('x', 'y'), ('x', 'y')], 'x', 'x'))