Example #1
0
def parse(seq):
    'Sequence(Token) -> object'
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    n = lambda s: a(Token('Name', s)) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    date = some(lambda s: a(Token('Date', s))).named('date') >> tokval
    id = some(lambda t: t.type in ['Name', 'Number', 'String']).named(
        'id') >> tokval
    make_chart_attr = lambda args: DefAttrs(u'chart', [Attr(*args)])

    node_id = id  # + maybe(port)
    term = date + op_('-') + date
    value = (id | term | date)
    a_list = (id + maybe(op_('=') + id) + skip(maybe(op(','))) >> unarg(Attr))
    attr_list = (many(op_('[') + many(a_list) + op_(']')) >> flatten)
    chart_attr = id + (op_('=') | op_(':')) + value >> make_chart_attr
    node_stmt = node_id + attr_list >> unarg(Node)

    stmt = (chart_attr | node_stmt)
    stmt_list = many(stmt + skip(maybe(op(';'))))
    chart = (maybe(n('diagram')) + maybe(id) + op_('{') + stmt_list + op_('}')
             >> unarg(Chart))
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
Example #2
0
 def test_str(self):
     self.assertEqual(tokenize('"a\\"b\\_c"'),
                      [Token('STRING', '"a\\"b\\_c"'),
                       Token('NL', '')])
     self.assertEqual(tokenize("'a\\'b\\_c'"),
                      [Token('STRING', "'a\\'b\\_c'"),
                       Token('NL', '')])
Example #3
0
def parse(seq):
    'Sequence(Token) -> object'
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    value_flatten = lambda l: sum([[l[0]]] + list(l[1:]), [])
    n = lambda s: a(Token('Name', s)) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id = some(lambda t:
        t.type in ['Name', 'Number', 'Color', 'String']).named('id') >> tokval
    make_chart_attr = lambda args: DefAttrs(u'chart', [Attr(*args)])

    node_id = id  # + maybe(port)
    pair = (
        op_('(') + id + skip(maybe(op(','))) + id + op_(')')
        >> tuple)
    value = (id | pair)
    value_list = (
        value +
        many(op_(',') + value)
        >> value_flatten)
    a_list = (
        id +
        maybe(op_('=') + id) +
        skip(maybe(op(',')))
        >> unarg(Attr))
    attr_list = (
        many(op_('[') + many(a_list) + op_(']'))
        >> flatten)
    chart_attr = id + (op_('=') | op_(':')) + value_list >> make_chart_attr
    node_stmt = node_id + attr_list >> unarg(Node)

    stmt = (
        chart_attr
        | node_stmt
    )
    stmt_list = many(stmt + skip(maybe(op(';'))))
    chart_type = (
          n('p')   | n('pie')    | n('piechart')
        | n('p3')  | n('pie3d')  | n('piechart_3d')
        | n('lc')  | n('line')   | n('linechart')
        | n('lxy') | n('linechartxy')
        | n('bhs') | n('holizontal_barchart')
        | n('bvs') | n('vertical_barchart')
        | n('bhg') | n('holizontal_bargraph')
        | n('bvg') | n('vertical_bargraph')
        | n('v')   | n('venn')   | n('venndiagram')
        | n('s')   | n('plot')   | n('plotchart')
    )
    chart = (
        chart_type +
        maybe(id) +
        op_('{') +
        stmt_list +
        op_('}')
        >> unarg(Chart))
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
def parse(sequence, query):
    tokval = lambda x: x.value
    toktype = lambda t: (some(lambda x: x.type == t).named('(type %s)' % t) >>
                         tokval)
    operation = lambda s: a(Token('Op', s)) >> tokval
    operation_ = lambda s: skip(operation(s))

    create_param = lambda param_name: query.get_aliased_param(param_name)
    make_and = lambda params: And(params[0], params[1])
    make_or = lambda params: Or(params[0], params[1])
    make_not = lambda inner: Not(inner)

    word = toktype('Word')
    inner_bracket = forward_decl()
    left_of_and = forward_decl()
    right_of_and = forward_decl()
    left_of_or = forward_decl()
    not_ = forward_decl()
    bracket = operation_('(') + inner_bracket + operation_(')')
    and_ = left_of_and + operation_('&') + right_of_and >> make_and
    or_ = left_of_or + operation_('|') + inner_bracket >> make_or
    param = word >> create_param

    not_.define(operation_('!') + (bracket | param))
    not_ = not_ >> make_not

    left_of_or.define(and_ | bracket | not_ | param)
    left_of_and.define(bracket | not_ | param)
    right_of_and.define(left_of_and)
    inner_bracket.define(or_ | and_ | bracket | not_ | param)

    definition = (bracket | inner_bracket) + finished

    return definition.parse(sequence)
Example #5
0
def language_element(key, tok_type, combinator=a):
    """
    Parser to match language element by using a certain combinator

    :param key: exact key of the token, e.g.: `begin`, `model`, `)`
    :param tok_type: predefined token type to be matched
    :param combinator: use the combinator to create a parser
    :return: a parser that matches elements using the above condition
    """
    if combinator == a:
        return combinator(Token(tok_type, key))
    elif combinator == some:
        return combinator(lambda tok: tok == Token(tok_type, key))
    elif combinator == maybe:
        return combinator(a(Token(tok_type, key)))
    else:
        raise Exception("Parser creation error")
Example #6
0
    def test_error_info(self):
        tokenize = make_tokenizer([
            ('keyword', (r'(is|end)',)),
            ('id', (r'[a-z]+',)),
            ('space', (r'[ \t]+',)),
            ('nl', (r'[\n\r]+',)),
        ])
        try:
            list(tokenize('f is ф'))
        except LexerError as e:
            self.assertEqual(six.text_type(e),
                             'cannot tokenize data: 1,6: "f is \u0444"')
        else:
            self.fail('must raise LexerError')

        sometok = lambda type: some(lambda t: t.type == type)
        keyword = lambda s: a(Token('keyword', s))

        id = sometok('id')
        is_ = keyword('is')
        end = keyword('end')
        nl = sometok('nl')

        equality = id + skip(is_) + id >> tuple
        expr = equality + skip(nl)
        file = many(expr) + end

        msg = """\
spam is eggs
eggs isnt spam
end"""
        toks = [x for x in tokenize(msg) if x.type != 'space']
        try:
            file.parse(toks)
        except NoParseError as e:
            self.assertEqual(e.msg,
                             "got unexpected token: 2,11-2,14: id 'spam'")
            self.assertEqual(e.state.pos, 4)
            self.assertEqual(e.state.max, 7)
            # May raise KeyError
            t = toks[e.state.max]
            self.assertEqual(t, Token('id', 'spam'))
            self.assertEqual((t.start, t.end), ((2, 11), (2, 14)))
        else:
            self.fail('must raise NoParseError')
def test_lexer():
    assert list(tokenize_property('P17')) == [Token('PID', 'P17')]
    assert list(tokenize_property('P17/P297')) == [
        Token('PID', 'P17'),
        Token('SLASH', '/'),
        Token('PID', 'P297')
    ]
    assert list(tokenize_property('(P17/P297)')) == ([
        Token('LBRA', '('),
        Token('PID', 'P17'),
        Token('SLASH', '/'),
        Token('PID', 'P297'),
        Token('RBRA', ')')
    ])
 def test_lexer(self):
     self.assertEqual(list(tokenize_property('P17')), [Token('PID', 'P17')])
     self.assertEqual(
         list(tokenize_property('P17/P297')),
         [Token('PID', 'P17'),
          Token('SLASH', '/'),
          Token('PID', 'P297')])
     self.assertEqual(list(tokenize_property('(P17/P297)')), [
         Token('LBRA', '('),
         Token('PID', 'P17'),
         Token('SLASH', '/'),
         Token('PID', 'P297'),
         Token('RBRA', ')')
     ])
Example #9
0
 def test_id(self):
     with self.assertRaises(LexerError):
         tokenize('-abc')
     with self.assertRaises(LexerError):
         tokenize('_abc')
     with self.assertRaises(LexerError):
         tokenize('abc-')
     with self.assertRaises(LexerError):
         tokenize('abc_')
     self.assertEqual(
         tokenize('abc_d0-3'),
         [Token('ID', 'abc_d0-3'), Token('NL', '')])
     self.assertEqual(
         tokenize('`1.233 444\\` ee`'),
         [Token('QUOTE_ID', '`1.233 444\\` ee`'),
          Token('NL', '')])
     with self.assertRaises(LexerError):
         tokenize('01-')
Example #10
0
def create_grammar():
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('Name', s)) >> tokval

    null = n('null')
    true = n('true')
    false = n('false')
    number = toktype('Number')
    string = toktype('String')
    value = forward_decl()
    member = string + op_(':') + value
    object_ = (op_('{') + maybe(member + many(op_(',') + member)) + op_('}'))
    array = (op_('[') + maybe(value + many(op_(',') + value)) + op_(']'))
    value.define(null | true | false | object_ | array | number | string)
    json_text = object_ | array
    json_file = json_text + skip(finished)

    return json_file
Example #11
0
def create_parser():
    # operator: '~=' | '>=' | '<=' | '<' | '>' | '='
    operator = some(lambda tok: tok.type == 'CMP') >> choose_class

    # value: STRING | WORD
    word = some(lambda tok: tok.type == 'WORD') >> Text
    string = some(lambda tok: tok.type == 'STRING') >> QuotedText
    value = string | word

    # function: WORD '(' ')'
    open_brace = skip(a(Token('BR', '(')))
    close_brace = skip(a(Token('BR', ')')))
    function = word + open_brace + close_brace >> Function

    # field_expr: WORD operator value
    fieldexpr = (word + operator + (function | value)) >> (lambda x: x[1]
                                                           ([x[0], x[2]]))

    OR = a(Token('OP', 'OR')) >> choose_class
    AND = a(Token('OP', 'AND')) >> choose_class

    def eval(data):
        arg1, lst = data
        for f, arg2 in lst:
            arg1 = f([arg1, arg2])

        return arg1

    def eval(data):
        lft, args = data
        return reduce(lambda arg1, (f, arg2): f([arg1, arg2]), args, lft)

    expr = forward_decl()

    basexpr = open_brace + expr + close_brace | fieldexpr
    andexpr = (basexpr + many(AND + basexpr)) >> eval
    orexpr = (andexpr + many(OR + andexpr)) >> eval
    expr.define(orexpr)

    return expr
Example #12
0
def _parse(seq):
    const = lambda x: lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token(u'Op', s)) >> tokval
    op_ = lambda s: skip(op(s))

    def make_string(args):
        context, value = args
        if not context: context = 'any:'
        return String(unescape_str(value[1:-1]), context[:-1])

    def make_regex(args):
        context, value = args
        value, modifiers = value.rsplit('/', 1)
        value = value[1:]
        if not context: context = 'any:'
        return Regex(unescape_regex(value), modifiers, context[:-1])

    def make_or(args):
        return Or(*args)

    def make_and(args):
        return And(*args)

    def make_not(x):
        return Not(x)

    context = maybe(toktype(u'Prefix'))
    string = (context + toktype(u'String')) >> make_string
    regex = (context + toktype(u'Regex')) >> make_regex

    par_term = forward_decl()
    simple_term = forward_decl()
    term = forward_decl()
    not_term = forward_decl()
    and_term = forward_decl()
    or_term = forward_decl()

    par_term.define(op_(u'(') + term + op_(u')'))

    simple_term.define(par_term | string | regex)
    not_term.define(op_('not') + not_term >> make_not | simple_term)
    and_term.define(not_term + op_('and') + and_term >> make_and | not_term)
    or_term.define(and_term + op_('or') + or_term >> make_or | and_term)

    term.define(or_term)

    eof = skip(toktype(u'EOF'))
    filter_expr = (term + eof) | (eof >> const(Any()))
    return filter_expr.parse(seq)
Example #13
0
def parse(seq):
    """Sequence(Token) -> object"""
    unarg = lambda f: lambda args: f(*args)
    tokval = lambda x: x.value
    flatten = lambda list: sum(list, [])
    n = lambda s: a(Token(u'Name', s)) >> tokval
    op = lambda s: a(Token(u'Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id_types = [u'Name', u'Number', u'String']
    id = some(lambda t: t.type in id_types).named(u'id') >> tokval
    make_graph_attr = lambda args: DefAttrs(u'graph', [Attr(*args)])
    make_edge = lambda x, xs, attrs: Edge([x] + xs, attrs)

    node_id = id  # + maybe(port)
    a_list = (
        id + maybe(op_(u'=') + id) + skip(maybe(op(u','))) >> unarg(Attr))
    attr_list = (many(op_(u'[') + many(a_list) + op_(u']')) >> flatten)
    attr_stmt = (
        (n(u'graph') | n(u'node') | n(u'edge')) + attr_list >> unarg(DefAttrs))
    graph_attr = id + op_(u'=') + id >> make_graph_attr
    node_stmt = node_id + attr_list >> unarg(Node)
    # We use a forward_decl becaue of circular definitions like (stmt_list ->
    # stmt -> subgraph -> stmt_list)
    subgraph = forward_decl()
    edge_rhs = skip(op(u'->') | op(u'--')) + (subgraph | node_id)
    edge_stmt = ((subgraph | node_id) + oneplus(edge_rhs) + attr_list >>
                 unarg(make_edge))
    stmt = (attr_stmt | edge_stmt | subgraph | graph_attr | node_stmt)
    stmt_list = many(stmt + skip(maybe(op(u';'))))
    subgraph.define(
        skip(n(u'subgraph')) + maybe(id) + op_(u'{') + stmt_list +
        op_(u'}') >> unarg(SubGraph))
    graph = (maybe(n(u'strict')) + maybe(n(u'graph') | n(u'digraph')) +
             maybe(id) + op_(u'{') + stmt_list + op_(u'}') >> unarg(Graph))
    dotfile = graph + skip(finished)

    return dotfile.parse(seq)
Example #14
0
def parse(seq):
    'Sequence(Token) -> object'
    tokval = lambda x: x.value
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    id = some(lambda t: t.type in ['Name', 'Number', 'Color', 'String']).named(
        'id') >> tokval
    date = some(lambda t: t.type == 'Date').named('date') >> tokval
    make_node = lambda args: Node(*args)

    node_stmt = id + op_(':') + date + maybe(op_('-') + date) >> make_node
    chart = (many(node_stmt + skip(maybe(op(';')))) >> Chart)
    dotfile = chart + skip(finished)

    return dotfile.parse(seq)
Example #15
0
def lex_string_expr(string):
    """Lex a string expression."""
    tokenizer = lexer.make_tokenizer([
        ('concat', [r'#']),
        ('string', [r'"[^"]+"']),
        ('name',   [r'[A-Za-z_][A-Za-z_0-9\-:?\'\.\s]*']),
        ('space',  [r'[ \t\r\n]+']),
    ])

    try:
        return remove_whitespace_tokens(tokenizer(string))
    except lexer.LexerError:
        # If we fail to lex the string, it is not a valid string expression so
        # just return it as a single token
        return [Token('string', string)]
Example #16
0
def _parse_rule(seq):
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    sep = lambda s: a(Token(u'Sep', s)) >> tokval
    s_sep = lambda s: skip(sep(s))

    level = toktype(u'Level')
    comparator = toktype(u'Comparator') >> COMPARATORS.get
    number = toktype(u'Number') >> float
    historical = toktype(u'Historical')
    unit = toktype(u'Unit')
    operator = toktype(u'Operator')
    logical_operator = toktype(u'LogicalOperator') >> LOGICAL_OPERATORS.get

    exp = comparator + (
        (number + maybe(unit)) | historical) + maybe(operator + number)
    rule = (level + s_sep(':') + exp + many(logical_operator + exp))

    overall = rule + skip(finished)
    return overall.parse(seq)
Example #17
0
def test_error_info():
    tokenize = make_tokenizer([
        Spec('keyword', r'(is|end)'),
        Spec('id', r'[a-z]+'),
        Spec('space', r'[ \t]+'),
        Spec('nl', r'[\n\r]+'),
    ])
    try:
        list(tokenize('f is ф'))
    except LexerError as e:
        pass
    else:
        ok_(False, 'must raise LexerError')

    keyword = lambda s: tok('keyword', s)

    id = tok('id')
    is_ = keyword('is')
    end = keyword('end')
    nl = tok('nl')

    equality = id + skip(is_) + id >> tuple
    expr = equality + skip(nl)
    file = many(expr) + end

    msg = """\
rake is eggs
eggs isnt spam
end"""
    toks = [x for x in tokenize(msg) if x.type != 'space']
    try:
        file.parse(toks)
    except ParserError as e:
        msg, pos, i = e.args
        eq_(msg, "got unexpected token: id 'spam'")
        eq_(pos, ((2, 11), (2, 14)))
        # May raise KeyError
        t = toks[i]
        eq_(t, Token('id', 'spam'))
    else:
        ok_(False, 'must raise ParserError')
Example #18
0
def parse(seq):
    'Sequence(Token) -> grammar dict'
    make_patterns = lambda x: ('patterns', x)
    n = lambda s: a(Token('Name', s)) >> tokval
    # plan syntax
    f_add = n('add') + skip(space)
    f_apply = n('apply') + skip(space)
    f_lookup = n('lookup') + skip(maybe(space))
    f_parallel = n('parallel') + skip(space)
    f_sequential = n('sequential') + skip(space)
    f_firstmatch = n('firstmatch') + skip(space)
    f_parse = n('parse') + skip(space) + name >> list
    f_decompose = n('decompose') + skip(space) + name >> list
    func_clause = oneplus(f_add | f_apply | f_lookup | f_parallel
                          | f_sequential | f_firstmatch) + maybe(
                              f_parse | f_decompose) >> unfoldl >> tuple
    stage_clause = skip(
        n('stage')) + skip(space) + name + skip(space) + func_clause + skip(
            maybe(space))
    return_clause = n('return') + skip(space) + skip(
        n('if')) + skip(space) + name + skip(maybe(space))
    for_clause = skip(n('for')) + skip(space) + name + skip(op(':')) + skip(
        maybe(space)) + many(stage_clause | return_clause) >> tuple
    plan_dict = oneplus(for_clause) >> dict
    plan = n('plan') + skip(maybe(space)) + plan_dict >> tuple
    # pattern syntax
    pattern = skip(n('pattern')) + skip(space) + fullgloss_parser() + skip(
        space) + skip(op('|')) + skip(space) + fullgloss_parser() + skip(
            maybe(space)) >> unarg(Pattern)
    sec_header = skip(n('section')) + skip(space) + name + skip(maybe(space))
    section = sec_header + many(pattern)
    sections = many(section) >> dict
    patterns = sections >> make_patterns
    grammar = plan + patterns + skip(finished) >> dict

    return grammar.parse(seq)
Example #19
0
def parse(seq):
    """Sequence(Token) -> object"""
    id_tokens = ['Name', 'Number', 'String', 'RackHeight', 'Units']
    rackitem_tokens = ['QuotedRackItem', 'RackItem']

    tokval = lambda x: x.value
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    _id = some(lambda t: t.type in id_tokens) >> tokval
    keyword = lambda s: a(Token('Name', s)) >> tokval
    rackheight = some(lambda t: t.type == 'RackHeight') >> tokval
    number = some(lambda t: t.type == 'Number').named('number') >> tokval
    rackitem = some(lambda t: t.type in rackitem_tokens) >> tokval

    def make_num_rackitem(num, text, attr):
        return RackItem(num, text.strip(), attr)

    def make_nonnum_rackitem(text, attr):
        return RackItem(None, text.strip(), attr)

    def make_nonvalued_attr(rackheight):
        return Attr(rackheight, None)

    #
    # parts of syntax
    #
    option_stmt = (
        _id +
        maybe(op_('=') + _id)
        >> create_mapper(Attr)
    )
    option_list = (
        maybe(op_('[') + option_stmt + many(op_(',') + option_stmt) + op_(']'))
        >> create_mapper(oneplus_to_list, default_value=[])
    )

    #  attributes statement::
    #     default_shape = box;
    #     default_fontsize = 16;
    #     12U;
    #     ascending;
    #
    attribute_stmt = (
        _id + op_('=') + _id
        >> create_mapper(Attr)
    )
    rackheight_stmt = (
        rackheight
        >> make_nonvalued_attr
    )
    ascending_stmt = (
        keyword('ascending')
        >> make_nonvalued_attr
    )

    #  field statement::
    #     1: A
    #     2: B [attr = value, attr = value];
    #     * C [attr = value, attr = value];
    #     * D [attr = value, attr = value];
    #
    numbered_field_item_stmt = (
        number +
        op_(':') +
        rackitem +
        option_list
        >> create_mapper(make_num_rackitem)
    )
    nonnumbered_field_item_stmt = (
        (op_('-') | op_('*')) +
        rackitem +
        option_list
        >> create_mapper(make_nonnum_rackitem)
    )
    field_item_stmt = (
        numbered_field_item_stmt |
        nonnumbered_field_item_stmt
    )

    #  rack statement::
    #     rack {
    #       1: A;
    #     }
    #

    # rack definition
    rack_inline_stmt = (
        attribute_stmt |
        rackheight_stmt |
        field_item_stmt
    )
    rack_inline_stmt_list = (
        many(rack_inline_stmt + skip(maybe(op(';'))))
    )
    rack_stmt = (
        skip(keyword('rack')) +
        maybe(_id) +
        op_('{') +
        rack_inline_stmt_list +
        op_('}')
        >> create_mapper(Rack)
    )

    #  extension statement (plugin)::
    #     plugin attributes [name = Name];
    #
    extension_stmt = (
        keyword('plugin') +
        _id +
        option_list
        >> create_mapper(Extension)
    )

    #
    # diagram statement::
    #     rackdiag {
    #        A;
    #     }
    #
    diagram_id = (
        (keyword('diagram') | keyword('rackdiag')) +
        maybe(_id)
        >> list
    )
    diagram_inline_stmt = (
        ascending_stmt |
        extension_stmt |
        rack_stmt |
        field_item_stmt |
        attribute_stmt |
        rackheight_stmt
    )
    diagram_inline_stmt_list = (
        many(diagram_inline_stmt + skip(maybe(op(';'))))
    )
    diagram = (
        maybe(diagram_id) +
        op_('{') +
        diagram_inline_stmt_list +
        op_('}')
        >> create_mapper(Diagram)
    )
    dotfile = diagram + skip(finished)

    return dotfile.parse(seq)
Example #20
0
                    s.append(child)
                    added.add(child)

def __flatten_helper(lst: Iterable[Any]) -> Generator[Any, None, None]:
    for x in lst:
        if isinstance(x, Iterable):
            yield from __flatten_helper(x)
        else:
            yield x

def __flatten(lst: Sequence[Any]) -> List[Any]:
    return list(__flatten_helper(lst))

__toktype = lambda t: some(lambda x: x.type == t) # type: ignore
__tokop = lambda typ: skip(some(lambda x: x.type == typ))
__tokkw = lambda name: skip(a(Token('ID', name)))
__tokkw_keep = lambda name: a(Token('ID', name))
__identity = __wrap_result(lambda n, s, e: n)

def __make_array(n):
    r = __collapse_results(n, depth=1)
    re
    if n is None:
        return __Result([], (0, 0), (0, 0))
    else:
        return __Result(
            [x.value for x in [n[0]] + n[1] if x.value is not None],
            min(x.start for x in [n[0]] + n[1] if x.start > (0, 0)),
            max(x.end for x in [n[0]] + n[1] if x.end > (0, 0)),
        )
Example #21
0
def parse(seq):
    """Sequence(Token) -> object"""
    const = lambda x: lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('OP', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('NAME', s)) >> tokval

    def make_array(n):
        if n is None:
            return []
        else:
            return [n[0]] + n[1]

    def make_object(n):
        return dict(make_array(n))

    def make_int(n):
        return '%s' % int(n)

    def make_real(n):
        return '%s' % float(n)

    def unescape(s):
        std = {
            '"': '"',
            '\\': '\\',
            '/': '/',
            'b': '\b',
            'f': '\f',
            'n': '\n',
            'r': '\r',
            't': '\t',
        }

        def sub(m):
            if m.group('standard') is not None:
                return std[m.group('standard')]
            else:
                return unichr(int(m.group('unicode'), 16))

        return re_esc.sub(sub, s)

    def make_string(n):
        return n
        #return unescape(n[1:-1])

    def make_all_models(models):
        return dict(models)


#   all_attrs = []
#        for i in attrs:
#            attr = i[0]
#            if attr not in all_attrs:
#                all_attrs.append(attr)
#            else:
#                raise Exception('Attribute %s is already defined in class'%attr)

    def make_fields(n):
        #return dict(n)
        return Field(n)

    def make_params(n):
        return n

    null = toktype('NONE') >> const("None")
    true = toktype('TRUE') >> const("True")
    false = toktype('FALSE') >> const("False")
    number = toktype('INT') >> make_int
    real = toktype('REAL') >> make_real
    string = toktype('STRING') >> make_string
    value = forward_decl()
    name = toktype('NAME')
    field = toktype('FIELD') + maybe(op_('(') + many(value) +
                                     op_(')')) >> tuple
    member = string + op_(':') + value >> tuple
    attrs = forward_decl()
    params = forward_decl()

    models = many(name + op_('::') + many(attrs)) >> make_all_models

    attrs.define(name + op_(':') + field + many(params) >> make_fields)

    params.define(name + op_('=') + value >> tuple)

    value.define(null | true | false | name | number | real | string)
    parser_text = models
    parser_file = parser_text + skip(finished)
    return parser_file.parse(seq)
Example #22
0
    def make_parser(type_, value, param, parse):
        nonlocal actor_set
        if type_ not in valid_token_types and type_ not in ('ACTOR', 'VALUE'):
            exit_bad_rule(f'"{type_}" is not a valid token type')
        if type_ == 'ACTOR':
            actor_set = True
            if value:
                exit_bad_rule("ACTOR token cannot be used with value check")
        if type_ == 'VALUE':
            if value:
                exit_bad_rule("VALUE token cannot be used with value check")
            if param is None or parse not in ('INT', 'FLOAT', 'BOOL', 'STRING'):
                exit_bad_rule("VALUE token not of form VALUE(parameter=TYPE)")
            parse = f'__assert_{parse.lower()}(__value)'
        if value:
            try:
                value = eval_(value)
            except:
                exit_bad_rule("Failed to parse custom rule value check")
            if type_ == 'ID':
                out.append(skip(a(Token('ID', value)) | a(Token('QUOTE_ID', value))))
            else:
                out.append(skip(a(Token(type_, value))))
            return True
        elif type_ == 'ACTOR':
            if param is not None:
                @__wrap_result
                def handle_secondary_only(t, start, end):
                    return {'.actor_secondary': '' if t is None else t}
                if param != '.name':
                    exit_bad_rule("ACTOR token parameter must be .name")
                if parse is None:
                    exit_bad_rule("ACTOR .name must be set if present")
                try:
                    name = eval_(parse)
                except:
                    exit_bad_rule("Failed to parse actor primary name")
                params['.actor_name'] = name
                out.append(maybe(__tokop('AT') + id_) >> handle_secondary_only)
            else:
                @__wrap_result
                def handle_actor(t, start, end):
                    name, secondary = t
                    return {'.actor_name': name, '.actor_secondary': '' if secondary is None else secondary}
                out.append((id_ + maybe(__tokop('AT') + id_)) >> handle_actor)
        elif not param:
            out.append(skip(some(lambda x: x.type == type_ if type_ != 'ID' else x.type in ('ID', 'QUOTE_ID'))))
        else:
            parse = parse or '__value'
            try:
                f = eval_(f'lambda __type, __value: {parse}')
            except:
                exit_bad_rule("Failed to parse custom rule")

            @__wrap_result
            def inner(t, start, end):
                try:
                    return {param: f(type_, t)}
                except Exception as e:
                    emit_error(str(e), start, end)
                    raise LogError()

            value_wrapper = {
                'INT': __int,
                'FLOAT': __float,
                'BOOL': __bool,
                'STRING': __string,
                'ID': __identifier,
            }.get(type_, __identity)
            if param[0] == '.':
                value_wrapper = __identity

            if type_ != 'VALUE':
                out.append(some(lambda x: x.type == type_ if type_ != 'ID' else x.type in ('ID', 'QUOTE_ID')) >> value_wrapper >> inner)
            else:
                out.append(__value >> value_wrapper >> inner)
        return False
Example #23
0
def parse(seq):
    """Sequence(Token) -> object"""
    const = lambda x: lambda _: x
    tokval = lambda x: x.value
    toktype = lambda t: some(lambda x: x.type == t) >> tokval
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    n = lambda s: a(Token('Name', s)) >> tokval

    def make_array(n):
        if n is None:
            return []
        else:
            return [n[0]] + n[1]

    def make_object(n):
        return dict(make_array(n))

    def make_number(n):
        try:
            return int(n)
        except ValueError:
            return float(n)

    def unescape(s):
        std = {
            '"': '"',
            '\\': '\\',
            '/': '/',
            'b': '\b',
            'f': '\f',
            'n': '\n',
            'r': '\r',
            't': '\t',
        }

        def sub(m):
            if m.group('standard') is not None:
                return std[m.group('standard')]
            else:
                return chr(int(m.group('unicode'), 16))

        return re_esc.sub(sub, s)

    def make_string(n):
        return unescape(n[1:-1])

    null = n('null') >> const(None)
    true = n('true') >> const(True)
    false = n('false') >> const(False)
    number = toktype('Number') >> make_number
    string = toktype('String') >> make_string
    value = forward_decl()
    member = string + op_(':') + value >> tuple
    object = (op_('{') + maybe(member + many(op_(',') + member)) + op_('}') >>
              make_object)
    array = (op_('[') + maybe(value + many(op_(',') + value)) + op_(']') >>
             make_array)
    value.define(null | true | false | object | array | number | string)
    json_text = object | array
    json_file = json_text + skip(finished)

    return json_file.parse(seq)
Example #24
0
 def make_token(self, token_type, value):
     """Create a token type with a value."""
     return Token(token_type, value, (self.last_lnum, self.lastpos),
                  (self.lnum, self.pos))
Example #25
0
        tokenize = make_tokenizer([
            (u'keyword', (ur'(is|end)',)),
            (u'id', (ur'[a-z]+',)),
            (u'space', (ur'[ \t]+',)),
            (u'nl', (ur'[\n\r]+',)),
        ])
        try:
            list(tokenize(u'f is ф'))
        except LexerError, e:
            self.assertEqual(unicode(e),
                             u'cannot tokenize data: 1,6: "f is \u0444"')
        else:
            self.fail(u'must raise LexerError')

        sometok = lambda type: some(lambda t: t.type == type)
        keyword = lambda s: a(Token(u'keyword', s))

        id = sometok(u'id')
        is_ = keyword(u'is')
        end = keyword(u'end')
        nl = sometok(u'nl')

        equality = id + skip(is_) + id >> tuple
        expr = equality + skip(nl)
        file = many(expr) + end

        msg = """\
spam is eggs
eggs isnt spam
end"""
        toks = [x for x in tokenize(msg) if x.type != u'space']
Example #26
0
def parse(seq):
    """Sequence(Token) -> object"""
    tokval = lambda x: x.value
    op = lambda s: a(Token('Op', s)) >> tokval
    op_ = lambda s: skip(op(s))
    _id = some(lambda t: t.type in ['Name', 'Number', 'String']) >> tokval
    keyword = lambda s: a(Token('Name', s)) >> tokval
    separator = some(lambda t: t.type == 'Separator') >> tokval

    def make_separator(sep):
        return Separator(sep[0:3], sep[3:-3].strip())

    #
    # parts of syntax
    #
    option_stmt = (_id + maybe(op_('=') + _id) >> create_mapper(Attr))
    option_list = (maybe(
        op_('[') + option_stmt + many(op_(',') + option_stmt) + op_(']')) >>
                   create_mapper(oneplus_to_list, default_value=[]))

    #  attributes statement::
    #     default_shape = box;
    #     default_fontsize = 16;
    #
    attribute_stmt = (_id + op_('=') + _id >> create_mapper(Attr))

    #  node statement::
    #     A;
    #     B [attr = value, attr = value];
    #
    node_stmt = (_id + option_list >> create_mapper(Node))

    #  separator statement::
    #     === message ===
    #     ... message ...
    #
    separator_stmt = (separator >> make_separator)

    #  edge statement::
    #     A -> B;
    #     C -> D {
    #       D -> E;
    #     }
    #
    edge_block = forward_decl()
    edge_relation = (op('<<--') | op('<--') | op('<<-') | op('<-') | op('->')
                     | op('->>') | op('-->') | op('-->>') | op('=>'))
    edge_stmt = (_id + edge_relation + _id + many(edge_relation + _id) +
                 option_list + maybe(edge_block) >> create_mapper(Edge))
    edge_block_inline_stmt_list = (many(edge_stmt + skip(maybe(op(';')))
                                        | separator_stmt))
    edge_block.define(
        op_('{') + edge_block_inline_stmt_list + op_('}') >> Statements)

    #  group statement::
    #     group {
    #        A;
    #     }
    #
    group_inline_stmt_list = (many((attribute_stmt | node_stmt) +
                                   skip(maybe(op(';')))))
    group_stmt = (skip(keyword('group')) + skip(maybe(_id)) + op_('{') +
                  group_inline_stmt_list + op_('}') >> Group)

    #  combined fragment (alt, loop) statement::
    #     loop {
    #        A -> B;
    #     }
    #     alt {
    #        D -> E;
    #     }
    #
    fragment_stmt = forward_decl()
    fragment_inline_stmt = (attribute_stmt | fragment_stmt | edge_stmt
                            | node_stmt)
    fragment_inline_stmt_list = (many(fragment_inline_stmt +
                                      skip(maybe(op(';')))))
    fragment_types = (keyword('alt') | keyword('loop'))
    fragment_stmt.define(fragment_types + maybe(_id) + op_('{') +
                         fragment_inline_stmt_list +
                         op_('}') >> create_mapper(Fragment))

    #  extension statement (class, plugin)::
    #     class red [color = red];
    #     plugin attributes [name = Name];
    #
    extension_stmt = ((keyword('class') | keyword('plugin')) + _id +
                      option_list >> create_mapper(Extension))

    # diagram statement::
    #     seqdiag {
    #        A -> B;
    #     }
    #
    diagram_id = (
        (keyword('diagram') | keyword('seqdiag')) + maybe(_id) >> list)
    diagram_inline_stmt = (extension_stmt | attribute_stmt | fragment_stmt
                           | group_stmt | edge_stmt | separator_stmt
                           | node_stmt)
    diagram_inline_stmt_list = (many(diagram_inline_stmt +
                                     skip(maybe(op(';')))))
    diagram = (maybe(diagram_id) + op_('{') + diagram_inline_stmt_list +
               op_('}') >> create_mapper(Diagram))
    dotfile = diagram + skip(finished)

    return dotfile.parse(seq)
Example #27
0
    def parse(self, seq):
        const = lambda x: lambda _: x
        unarg = lambda f: lambda x: f(*x)
        tokval = lambda x: x.value
        toktype = lambda s: some(lambda x: x.type == s) >> tokval

        def sumstr(s):
            #import pdb; pdb.set_trace()
            return "".join(str(x) for x in s)

        keyword = lambda s: some(lambda x: x.type == 'KEYWORD' and x.value.
                                 lower() == s)
        op = lambda s: a(Token('OP', s)) >> tokval
        op_ = lambda s: skip(op(s))
        onename = many(
            toktype('NAME') | toktype('KEYWORD') | toktype('HEADER')
            | toktype('NUMBER')) >> sumstr
        name = toktype('NAME') | toktype('KEYWORD') | toktype('HEADER')
        number = toktype('NUMBER') >> get_num
        get_string = lambda v: v[1:-1]
        string = (toktype('STRING') >> get_string) | (
            toktype('CHAR') >> get_char)
        nl = skip(toktype('NL'))
        typed_func = lambda k, c: keyword(k) + op_('(') + c + op_(')')
        simple_func = lambda k: typed_func(k, onename)
        assign_func = lambda k: keyword(k) + op_('(') + name + op_('=') + (
            name | string) + op_(')')

        # statement components
        vkey = op_('[') + many(name) + op_(']')
        prefix = toktype('TARGET') | assign_func('if') | typed_func(
            'platform', string)
        context_statement = (keyword('context') + op_('(') + number +
                             op_(')')) | keyword('context')
        index_statement = keyword('index') + op_('(') + onename + op_(
            ',') + number + op_(')')
        deadkey = (keyword('deadkey')
                   | keyword('dk')) + op_('(') + (number | name) + op_(')')
        context = context_statement | keyword('beep') | keyword('nul') | simple_func('use') | \
                    (index_statement >> AnyIndex) | string | (deadkey >> DeadKey) | \
                    simple_func('reset') | simple_func('save') | assign_func('set') | \
                    simple_func('outs')
        match = (simple_func('any') >> AnyIndex) | string | (
            deadkey >> DeadKey) | simple_func('notany')

        # top level statements
        stripspace = lambda s: s.replace(' ', '')
        header = (((toktype('HEADER') >> stripspace) + string) |
                  (toktype('HEADERN') + number)) + nl
        group = (simple_func('group') >> self.set_group) + maybe(
            toktype('USINGKEYS')) + nl
        store = maybe(many(prefix)) + simple_func('store') + many(string | simple_func('outs') | \
                        (vkey >> VKey) | (deadkey >> DeadKey) | keyword('beep')) + nl
        begin = keyword('begin') + onename + op_('>') + simple_func('use') + nl
        rule = maybe(many(prefix)) + maybe(many(match)) + maybe(toktype('PLUS') + \
                ((vkey >> VKey) | string | (simple_func('any') >> AnyIndex))) + op_('>') + many(context) + nl
        matchrule = (keyword('match')
                     | keyword('nomatch')) + op_('>') + many(context) + nl

        # a file is a sequence of certain types of statement
        make_header = lambda s: Store([(Token('HEADER', 'header'), '&' + s[0]),
                                       [s[1]]])
        kmfile = many((header >> make_header) | (store >> self.make_store) | \
                      skip(begin >> self.store_begin)) + \
                 maybe(many(group | (rule >> self.make_rule) | \
                            (matchrule >> self.make_match_rule) | \
                            (store >> self.make_store))) + finished
        return kmfile.parse(seq)
Example #28
0
def tokenize(string: str) -> List[Token]:
    t = make_tokenizer(__tokens)
    pstack: List[Token] = []
    indent = ['']

    if string and string[-1] not in ('\r', '\n'):
        string = string + '\n'

    num_lines = len(re.findall(r'\r\n|\r|\n', string))
    tokens: List[Token] = []
    buffered: List[Token] = []
    space_since_nl = False
    buffering = 0

    emit_token = lambda tok: (buffered if buffering else tokens).append(tok)
    gen = peekable(t(string))
    for x in gen:
        assert x.start is not None
        assert x.end is not None
        start, end = x.start, x.end

        if x.type == 'COMMENT':
            continue
        elif x.type == 'LPAREN':
            pstack.append(x)
        elif x.type == 'RPAREN':
            if not pstack:
                raise LexerError(start, 'no parentheses to close')
            if pstack.pop().type != 'LPAREN':
                raise LexerError(start, "expecting ']' but got ')'")
        elif x.type == 'LSQUARE':
            pstack.append(x)
        elif x.type == 'RSQUARE':
            if not pstack:
                raise LexerError(start, 'no bracket to close')
            if pstack.pop().type != 'LSQUARE':
                raise LexerError(start, "expecting ')' but got ']'")
        elif x.type == 'NL':
            if pstack:
                continue
            space_since_nl = False
            if tokens and tokens[-1].type == 'NL' and buffering != 2:
                continue
            x = Token('NL', '', start=start, end=end)
        elif x.type == 'ID' and x.name == 'entrypoint':
            if space_since_nl:
                raise LexerError(start, 'entrypoint must be unindented')
            buffering = 2
        elif x.type == 'SP':
            space_since_nl = True
            nxt = gen.peek(None)
            next_comment = (nxt is not None and nxt.type == 'COMMENT')
            if buffering == 1 and not next_comment:
                buffering = 0
            if tokens and tokens[-1].type == 'NL' and not buffering and not next_comment:
                indent_diff = __compare_indent(indent[-1], x.name, start)
                if indent_diff < 0:
                    found = False
                    while indent:
                        s = indent.pop()
                        if s == x.name:
                            indent.append(s)
                            break
                        emit_token(Token('DEDENT', '', start=start, end=end))
                    if not indent:
                        raise LexerError(end, 'dedent to unknown level')
                elif indent_diff > 0:
                    indent.append(x.name)
                    emit_token(Token('INDENT', '', start=start, end=end))

            if not buffering and buffered:
                tokens.extend(buffered)
                buffered = []

            continue


        if x.type == 'NL' and buffering:
            buffering = 1
        elif buffering == 1:
            buffering = 0

        if x.type != 'INDENT' and tokens and tokens[-1].type == 'NL' and not space_since_nl \
                and not buffering:
            while len(indent) > 1:
                s = indent.pop()
                emit_token(Token('DEDENT', '', start=start, end=end))
            if not buffering and buffered:
                tokens.extend(buffered)
                buffered = []

        emit_token(x)

    if pstack:
        raise LexerError((num_lines + 1, 0), 'unclosed parentheses/brackets')

    if buffering or buffered:
        raise LexerError((num_lines + 1, 0), 'unexpecte end of file')

    while indent[-1]:
        indent.pop()
        tokens.append(Token('DEDENT', '', start=(num_lines + 1, 0), end=(num_lines + 1, 0)))

    return tokens
Example #29
0
    return make_hidCode_range('SysCode', rangeVals)


def make_consCode_range(rangeVals):
    return make_hidCode_range('ConsCode', rangeVals)


## Base Rules

const = lambda x: lambda _: x
unarg = lambda f: lambda x: f(*x)
flatten = lambda list: sum(list, [])

tokenValue = lambda x: x.value
tokenType = lambda t: some(lambda x: x.type == t) >> tokenValue
operator = lambda s: a(Token('Operator', s)) >> tokenValue
parenthesis = lambda s: a(Token('Parenthesis', s)) >> tokenValue
eol = a(Token('EndOfLine', ';'))


def listElem(item):
    return [item]


def listToTuple(items):
    return tuple(items)


# Flatten only the top layer (list of lists of ...)
def oneLayerFlatten(items):
    mainList = []
Example #30
0
def parse(
    seq: List[Token],
    gen_actor: Callable[[str, str], Actor],
    exported_tco: bool = True,
    custom_action_parser_pfx: Optional[Parser] = None,
    custom_action_parser_reg: Optional[Parser] = None,
    custom_query_parser_op: Optional[Parser] = None,
    custom_query_parser_pfx: Optional[Parser] = None,
    custom_query_parser_reg: Optional[Parser] = None
) -> Tuple[List[RootNode], List[Actor]]:
    actors: Dict[Tuple[str, str], Actor] = {}

    nid = 0
    def next_id() -> int:
        nonlocal nid
        rv, nid = nid, nid + 1
        return rv

    def check_function(p, type_, start, end):
        if len(p) == 1:
            p = p[0]
        if isinstance(p, dict):
            actor = (p.pop('.actor_name'), p.pop('.actor_secondary'))
            name = p.pop('.name')
            negated = p.pop('.negated')
            params = pdict = p
            prepare_params = False
        else:
            actor_name, actor_secondary, name, params = p
            actor = (actor_name, actor_secondary or '')
            negated = False
            prepare_params = True

        function_name = f'EventFlow{type_.capitalize()}{name}'
        if actor not in actors:
            actors[actor] = gen_actor(*actor)
        mp = getattr(actors[actor], ['actions', 'queries'][type_ == 'query'])
        if function_name not in mp:
            emit_warning(f'no {type_} with name "{function_name}" found, using empty call', start, end)
            if type_ == 'action':
                actors[actor].register_action(Action(actor, function_name, []))
            else:
                actors[actor].register_query(Query(actor, function_name, [], IntType, False))
        function = mp[function_name]
        if prepare_params:
            try:
                pdict = function.prepare_param_dict([p for name, p in params if name is None])
                for name, p in params:
                    if name is not None:
                        if name in pdict:
                            emit_warning(f'keyword argument name {name} matches positional argument name', start, end)
                        pdict[name] = p
            except AssertionError as e:
                emit_error(str(e), start, end)
                raise LogError()
        return function_name, function, pdict, negated

    @__wrap_result
    def make_action(n, start, end):
        action_name, action, pdict, _ = check_function(n, 'action', start, end)
        return (), (ActionNode(f'Event{next_id()}', action, pdict),)

    @__wrap_result
    def make_case(n, start, end):
        if isinstance(n, tuple) and len(n) == 2:
            return ([x.value for x in n[0]], n[1])
        return n

    def _get_query_num_values(query, start, end):
        num_values = query.num_values
        if num_values == 999999999:
            emit_warning(f'maximum value for {query.name} unknown; assuming 50', start, end, print_source=False)
            emit_warning(f'setting a maximum value in functions.csv may reduce generated bfevfl size', start, end)
            num_values = 50
        return num_values

    @__wrap_result
    def make_switch(n, start, end):
        p, branches = n[:-1], n[-1]
        cases = branches[0] + branches[2]
        default = branches[1]

        query_name, query, pdict, _ = check_function(p, 'query', start, end)
        sw = SwitchNode(f'Event{next_id()}', query, pdict)
        entrypoints = []
        enum_values = {}
        if query.rv.type.startswith('enum['):
            enum_values_list = query.rv.type[5:-1].split(',')
            enum_values = {v.strip(): i for i, v in enumerate(enum_values_list)}
        for values, block in cases:
            eps, node, connector = block
            entrypoints.extend(eps)

            sw.add_out_edge(node)
            connector.add_out_edge(sw.connector)

            for value in values:
                if isinstance(value, str):
                    if not enum_values:
                        emit_error(f'Query "{query.name}" does not return an enum', start, end)
                        raise LogError()
                    if value not in enum_values:
                        emit_error(f'Enum "{value}" is not a valid return value of "{query.name}"', start, end)
                        raise LogError()
                    value = enum_values[value]
                sw.add_case(node, value)

        num_values = _get_query_num_values(query, start, end)
        default_values = set(range(num_values)) - set(sum((v for v, n in cases), []))
        if default_values:
            if default is not None:
                _, default, connector = default
                connector.add_out_edge(sw.connector)

            default_branch = default or sw.connector
            sw.add_out_edge(default_branch)
            for value in default_values:
                sw.add_case(default_branch, value)
        elif default:
            emit_warning(f'default branch for {query_name} call is dead code, ignoring', start, end)

        return entrypoints, (sw,)

    @__wrap_result
    def make_bool_function(p, start, end):
        query_name, query, pdict, negated = check_function(p, 'query', start, end)
        num_values = _get_query_num_values(query, start, end)
        if num_values > 2:
            emit_warning(f'call to {query_name} treated as boolean function but may not be', start, end)
        return ((query, pdict), [({0}, query.inverted != negated), (set(range(1, num_values)), (not query.inverted) != negated)])

    @__wrap_result
    def make_in(p, start, end):
        p, values = p[:-1], p[-1]
        query_name, query, pdict, _ = check_function(p, 'query', start, end)
        num_values = _get_query_num_values(query, start, end)
        matched = set()
        unmatched = set(range(num_values))
        enum_values = {}
        if query.rv.type.startswith('enum['):
            enum_values_list = query.rv.type[5:-1].split(',')
            enum_values = {v.strip(): i for i, v in enumerate(enum_values_list)}
        for value in values:
            if isinstance(value.value, str):
                if not enum_values:
                    emit_error(f'Query "{query.name}" does not return an enum', start, end)
                    raise LogError()
                if value.value not in enum_values:
                    emit_error(f'Enum "{value.value}" is not a valid return value of "{query.name}"', start, end)
                    raise LogError()
                value.value = enum_values[value.value]
            if 0 > value.value or num_values <= value.value:
                emit_warning('{value.value} never returned by {query_name}, ignored', start, end)
                continue
            matched.add(value.value)
            unmatched.remove(value.value)
        if not matched or not unmatched:
            emit_warning(f'always true or always false check', start, end)
        return ((query, pdict), [(matched, True), (unmatched, False)])

    @__wrap_result
    def make_cmp(p, start, end):
        p, op, value = p[:-2], p[-2], p[-1]
        query_name, query, pdict, _ = check_function(p, 'query', start, end)
        num_values = _get_query_num_values(query, start, end)
        if isinstance(value.value, str):
            if query.rv.type.startswith('enum['):
                enum_values_list = query.rv.type[5:-1].split(',')
                value.value = enum_values_list.index(value.value)
                if value.value == -1:
                    emit_error(f'Enum "{value.value}" is not a valid return value of "{query.name}"', start, end)
                    raise LogError()
            else:
                emit_error(f'Query "{query.name}" does not return an enum', start, end)
                raise LogError()
        if op == '==' or op == '!=':
            matched = {value.value} if 0 <= value.value < num_values else set()
            unmatched = set(i for i in range(num_values) if i != value.value)
        elif op == '<' or op == '>=':
            matched = set(range(min(num_values, value.value)))
            unmatched = set(range(value.value, num_values))
        else:
            matched = set(range(min(num_values, value.value + 1)))
            unmatched = set(range(value.value + 1, num_values))
        if op in ('!=', '>=', '>'):
            matched, unmatched = unmatched, matched
        if not matched or not unmatched:
            emit_warning(f'always true or always false check', start, end)
        return ((query, pdict), [(matched, True), (unmatched, False)])

    def _predicate_replace(values, old, new):
        for i in range(len(values)):
            if values[i][1] == old:
                values[i] = (values[i][0], new)

    @__wrap_result
    def make_or(p, start, end):
        left, right = p
        if isinstance(right, TypedValue):
            left, right = right, left
        if isinstance(left, TypedValue):
            if isinstance(right, TypedValue):
                return TypedValue(type=BoolType, value=left.value or right.value)
            if not left.value:
                return right
            else:
                return TypedValue(type=BoolType, value=True)
        # todo: can probably optimize for smaller output
        _predicate_replace(left[1], False, right)
        return left

    @__wrap_result
    def make_and(p, start, end):
        left, right = p
        if isinstance(right, TypedValue):
            left, right = right, left
        if isinstance(left, TypedValue):
            if isinstance(right, TypedValue):
                return TypedValue(type=BoolType, value=left.value and right.value)
            if left.value:
                return right
            else:
                return TypedValue(type=BoolType, value=False)
        # todo: can probably optimize for smaller output
        _predicate_replace(left[1], True, right)
        return left

    @__wrap_result
    def make_not(p, start, end):
        if isinstance(p, TypedValue):
            p.value = not p.value
            return p
        _predicate_replace(p[1], True, None)
        _predicate_replace(p[1], False, True)
        _predicate_replace(p[1], None, False)
        return p

    def _expand_table(table, current, next_):
        ((query, pdict), values) = table
        sw = SwitchNode(f'Event{next_id()}', query, pdict)
        for match, action in values:
            if isinstance(action, tuple):
                to = _expand_table(action, current, next_)
            elif action:
                to = current
            else:
                to = next_
            for value in match:
                sw.add_case(to, value)
            sw.add_out_edge(to)
        return sw

    @__wrap_result
    def make_ifelse(n, start, end):
        if_, block, elifs, else_ = n
        cond_branches = [(if_, block)] + elifs + ([(None, else_)] if else_ else [])

        entrypoints = []
        next_ = next_connector = ConnectorNode(f'Connector{next_id()}')
        for table, body in cond_branches[::-1]:
            eps, node, branch_connector = body
            entrypoints.extend(eps)

            if table is None:
                next_ = node
                next_connector = branch_connector
            elif isinstance(table, TypedValue):
                if table.value:
                    next_ = node
                    next_connector = branch_connector
                else:
                    continue
            else:
                next_ = _expand_table(table, node, next_)
                branch_connector.add_out_edge(next_.connector)
                next_connector.add_out_edge(next_.connector)
                next_connector = next_.connector
        return entrypoints, (next_,)

    @__wrap_result
    def make_fork(n_, start, end):
        for entrypoints, node, connector in n_:
            for ep in entrypoints:
                __replace_node(ep, connector, None)
            __replace_node(node, connector, None)
        eps = __flatten([ep for ep, _, _ in n_])
        n = [x for _, x, _ in n_]

        fork_id, join_id = next_id(), next_id()
        join = JoinNode(f'Event{join_id}')
        fork = ForkNode(f'Event{fork_id}', join, n)

        for node in n:
            fork.add_out_edge(node)

        return eps, (fork, join)

    @__wrap_result
    def make_while(n, start, end):
        table, (eps, node, connector) = n
        next_connector = ConnectorNode(f'Connector{next_id()}')
        if isinstance(table, TypedValue):
            if table.value:
                # while true
                connector.add_out_edge(node)
                return eps, (node, connector)
            else:
                # while false
                if eps:
                    emit_error('entrypoints in while-false not supported', start, end)
                    raise LogError()
                return [], (connector, connector)
        else:
            next_ = _expand_table(table, node, next_connector)
            connector.add_out_edge(next_)
            return eps, (next_, next_connector)

    @__wrap_result
    def make_do_while(n, start, end):
        (eps, node, connector), table = n
        next_connector = ConnectorNode(f'Connector{next_id()}')
        if isinstance(table, TypedValue):
            if table.value:
                # do while true
                connector.add_out_edge(node)
            return eps, (node, connector)
        else:
            next_ = _expand_table(table, node, next_connector)
            connector.add_out_edge(next_)
            return eps, (node, next_connector)

    @__wrap_result
    def make_subflow_param(n, start, end):
        return (n,)

    @__wrap_result
    def make_subflow(n, start, end):
        ns, name, params = n
        param_dict = {k[0][0]: k[0][1] for k in params}
        return (), (SubflowNode(f'Event{next_id()}', ns or '', name, param_dict),)

    @__wrap_result
    def make_none(_, start, end):
        return None

    @__wrap_result
    def make_return(_, start, end):
        return (), (TerminalNode,)

    @__wrap_result
    def named_value(n, start, end):
        return n

    @__wrap_result
    def unnamed_value(n, start, end):
        return (None, n)

    @__wrap_result
    def make_param(n, start, end):
        return n

    def verify_params(name, root, params_list):
        for param, type_, value in params_list:
            if value is not None and type_ != value.type:
                emit_error(f'variable definition for {param} in {name} is of type {type_} but has default value of type {value.type}')
                raise LogError()
        params = {param: type_ for param, type_, value in params_list}
        for node in find_postorder(root):
            if isinstance(node, (ActionNode, SwitchNode)):
                for param, value in node.params.items():
                    if isinstance(value.value, Argument):
                        if value.value not in params:
                            emit_error(f'variable {value.value} not defined in flow {name}')
                            raise LogError()

                        expected_type = params[value.value]
                        actual_type = value.type
                        if param.startswith('EntryVariableKey'):
                            if param[16:].startswith('Int_'):
                                actual_type = IntType
                            elif param[16:].startswith('Bool_'):
                                actual_type = BoolType
                            elif param[16:].startswith('Float_'):
                                actual_type = FloatType
                            elif param[16:].startswith('String_'):
                                actual_type = StringType
                        else:
                            value.type = ArgumentType

                        if actual_type != AnyType and expected_type != actual_type:
                            emit_error(f'variable {value.value} has the wrong type, defined to be {expected_type} but used as {actual_type}')
                            raise LogError()

            if isinstance(node, SubflowNode):
                for param, value in node.params.items():
                    if isinstance(value.value, Argument):
                        if value.value not in params:
                            emit_error(f'variable {value.value} not defined in flow {name}')
                            raise LogError()
                        value.type = params[value.value]

    @__wrap_result
    def make_flow(n, start, end):
        local, name, params, body = n
        entrypoints, body_root, body_connector = body
        verify_params(name, body_root, params)
        vardefs = [RootNode.VarDef(name=n, type=t, initial_value=v.value if v else None) for n, t, v in params]
        valueless_vardefs = [RootNode.VarDef(name=n, type=t, initial_value=None) for n, t, v in params]
        node = RootNode(name, local is not None, False, vardefs)
        for e in entrypoints:
            e.vardefs = valueless_vardefs[:]
        node.add_out_edge(body_root)
        body_connector.add_out_edge(TerminalNode)
        return list(entrypoints) + [node]

    @__wrap_result
    def link_ep_block(n, start, end):
        connector = ConnectorNode(f'Connector{next_id()}')
        ep, block_info = n
        block_info = [x for x in block_info if x is not None]
        if block_info:
            eps, block = (__flatten(p) for p in zip(*(x for x in block_info if x is not None)))
        else:
            eps, block = [], ()

        if not block:
            if ep is not None:
                ep_node = RootNode(ep, True, True, [])
                ep_node.add_out_edge(connector)
                eps.append(ep_node)
            return (eps, connector, connector)

        for n1, n2 in zip(block, block[1:] + [connector]):
            n1_conn = n1
            if isinstance(n1, tuple):
                n1, n1_conn = n1
            if isinstance(n2, tuple):
                n2, _ = n2
            if isinstance(n1, SwitchNode):
                n1.connector.add_out_edge(n2)
            else:
                n1_conn.add_out_edge(n2)

        if ep is not None:
            ep_node = RootNode(ep, True, True, [])
            ep_node.add_out_edge(block[0])
            eps.append(ep_node)

        return (eps, block[0], connector)

    @__wrap_result
    def link_block(n, start, end):
        connector = ConnectorNode(f'Connector{next_id()}')
        n = [n[0]] + n[1]
        eps, blocks, connectors = zip(*n)
        eps = __flatten(eps)

        for connector, block in zip(connectors[:-1], blocks[1:]):
            connector.add_out_edge(block)

        return (eps, blocks[0], connectors[-1])

    @__wrap_result
    def collect_flows(n, start, end):
        if n is None:
            return []
        else:
            return __flatten([x for x in n if x is not None])

    block = forward_decl()

    # pass = PASS NL
    pass_ = (__tokkw('pass') + __tokop('NL')) >> make_none

    # return = RETURN NL
    return_ = (__tokkw('return') + __tokop('NL')) >> make_return

    # id_value = id ASSIGN value | value
    id_value = ((id_ + __tokop('ASSIGN') + __value) >> named_value) | (__value >> unnamed_value)

    # function_params = [id_value {COMMA id_value}]
    function_params = maybe(id_value + many(__tokop('COMMA') + id_value)) >> __make_array

    # actor_name = id [ AT id ]
    actor_name = id_ + maybe(__tokop('AT') + id_)
    # function_name = id
    function_name = id_
    # base_function = actor_name DOT action_name LPAREN function_params RPAREN
    base_function = (
        actor_name + __tokop('DOT') + function_name +
        __tokop('LPAREN') + function_params + __tokop('RPAREN')
    )
    # function = custom_query_parser_reg | base_function | custom_query_parser_pfx | LPAREN function RPAREN
    function = forward_decl()
    if custom_query_parser_reg is not None:
        function_ = custom_query_parser_reg | base_function
    else:
        function_ = base_function
    if custom_query_parser_pfx is not None:
        function_ = function_ | custom_query_parser_pfx
    function_ = function_ | (__tokop('LPAREN') + function + __tokop('RPAREN'))
    function.define(function_)

    # action = (custom_action_parser_reg | base_function | custom_action_parser_pfx) NL
    if custom_action_parser_reg is not None:
        action = custom_action_parser_reg | base_function
    else:
        action = base_function
    if custom_action_parser_pfx is not None:
        action = action | custom_action_parser_pfx
    action = action + __tokop('NL') >> make_action

    # int_or_enum = INT | QUOTE_ID
    int_or_enum = (__toktype('INT') >> __int) |  (__toktype('QUOTE_ID') >> __identifier)

    # __intlist = int_or_enum {COMMA int_or_enum} [COMMA] | LPAREN __intlist RPAREN
    __intlist = forward_decl()
    __intlist.define((int_or_enum + many(__tokop('COMMA') + int_or_enum) + maybe(__tokop('COMMA')) >> __make_array) | \
            __tokop('LPAREN') + __intlist + __tokop('RPAREN'))

    # case = CASE __intlist block
    case = __tokkw('case') + __intlist + block >> make_case

    # default = DEFAULT block
    default = __tokkw('default') + block >> make_case

    # cases = { case } [ default ] { case } | pass
    cases = many(case) + maybe(default) + many(case) | pass_

    # switch = SWITCH function COLON NL INDENT cases DEDENT
    switch = __tokkw('switch') + function + __tokop('COLON') + __tokop('NL') + \
            __tokop('INDENT') + cases + __tokop('DEDENT') >> make_switch

    predicate = forward_decl()
    predicate0 = forward_decl()
    predicate1 = forward_decl()
    predicate2 = forward_decl()
    predicate3 = forward_decl()

    # bool_function = function
    bool_function = function >> make_bool_function

    # in_predicate = function IN __intlist
    in_predicate = function + __tokkw('in') + __intlist >> make_in

    # not_in_predicate = function IN __intlist
    not_in_predicate = function + __tokkw('not') + __tokkw('in') + __intlist >> make_in >> make_not

    # cmp_predicate = function CMP INT
    cmp_predicate = function + __toktype('CMP') + int_or_enum >> make_cmp

    # not_predicate = NOT predicate0
    not_predicate = __tokkw('not') + predicate0 >> make_not

    # const_predicate = TRUE | FALSE
    const_predicate = (__tokkw_keep('true') | __tokkw_keep('false')) >> __bool

    # paren_predicate = LPAREN predicate RPAREN
    paren_predicate = __tokop('LPAREN') + predicate + __tokop('RPAREN')

    # predicate0 = not_predicate | const_predicate | bool_function | paren_predicate
    predicate0.define(not_predicate | const_predicate | bool_function | paren_predicate)

    # predicate1 = custom_query_parser_op | in_predicate | not_in_predicate | cmp_predicate | predicate0
    predicate1_ = in_predicate | not_in_predicate | cmp_predicate | predicate0
    if custom_query_parser_op is not None:
        predicate1_ = (custom_query_parser_op >> make_bool_function) | predicate1_
    predicate1.define(predicate1_)

    # and_predicate = predicate1 AND predicate2
    and_predicate = predicate1 + __tokkw('and') + predicate2 >> make_and

    # predicate2 = and_predicate | predicate1
    predicate2.define(and_predicate | predicate1)

    # or_predicate = predicate2 OR predicate3
    or_predicate = predicate2 + __tokkw('or') + predicate3 >> make_or

    # predicate3 = or_predicate | predicate2
    predicate3.define(or_predicate | predicate2)

    # predicate = predicate3
    predicate.define(predicate3)

    # if = IF predicate block
    if_ = __tokkw('if') + predicate + block

    # elif = ELIF predicate block
    elif_ = __tokkw('elif') + predicate + block

    # else = ELSE block
    else_ = __tokkw('else') + block

    # ifelse = if { elif } [ else ]
    ifelse = if_ + many(elif_) + maybe(else_) >> make_ifelse

    # branches = { BRANCH block }
    # branchless case handled implicitly by lack of INDENT
    branches = many(__tokkw('branch') + block)

    # fork = FORK COLON NL INDENT branches DEDENT
    fork = __tokkw('fork') + __tokop('COLON') + __tokop('NL') + \
            __tokop('INDENT') + branches + __tokop('DEDENT') >> make_fork

    # while = WHILE predicate block
    while_ = __tokkw('while') + predicate + block >> make_while

    # do_while = DO block WHILE predicate NL
    do_while = __tokkw('do') + block + __tokkw('while') + predicate + __tokop('NL')  >> make_do_while

    # flow_name = [id COLON COLON] id
    flow_name = maybe(id_ + __tokop('COLON') + __tokop('COLON')) + id_

    # subflow_param = id ASSIGN nonenum_value
    subflow_param = id_ + __tokop('ASSIGN') + __nonenum_value >> make_subflow_param

    # subflow_params = [subflow_param { COMMA subflow_param }]
    subflow_params = maybe(subflow_param + many(__tokop('COMMA') + subflow_param)) >> __make_array

    # run = RUN flow_name LPAREN subflow_params RPAREN NL
    run = (
        __tokkw('run') + flow_name + __tokop('LPAREN') + subflow_params + __tokop('RPAREN') + __tokop('NL')
    ) >> make_subflow

    # stmt = action | switch | ifelse | fork | while_ | do_while | run | pass_ | return | NL
    stmt = action | switch | ifelse | fork | while_ | do_while | run | pass_ | return_ | (__tokop('NL') >> make_none)

    # entrypoint = ENTRYPOINT id COLON NL
    entrypoint = __tokkw('entrypoint') + id_ + __tokop('COLON') + __tokop('NL')

    # stmts = stmt { stmt }
    stmts = stmt + many(stmt) >> __make_array

    # ep_block_body = [entrypoint] stmts
    ep_block_body = maybe(entrypoint) + stmts >> link_ep_block

    # block_body = ep_block_body { ep_block_body }
    block_body = ep_block_body + many(ep_block_body) >> link_block

    # block = COLON NL INDENT block_body DEDENT
    block.define(__tokop('COLON') + __tokop('NL') + __tokop('INDENT') + block_body + __tokop('DEDENT'))

    # type = INT | FLOAT | STR | BOOL | ANY
    type_atom = (__tokkw_keep('int') | __tokkw_keep('float') | __tokkw_keep('str') | __tokkw_keep('bool') | __tokkw_keep('any')) >> __type

    # flow_param = ID COLON TYPE [ASSIGN base_value]
    flow_param = id_ + __tokop('COLON') + type_atom + maybe(__tokop('ASSIGN') + __base_value) >> make_param

    # flow_params = [flow_param { COMMA flow_param }]
    flow_params = maybe(flow_param + many(__tokop('COMMA') + flow_param)) >> __make_array

    # flow = [LOCAL] FLOW ID LPAREN flow_params RPAREN block
    flow = (
        maybe(a(Token('ID', 'local'))) + __tokkw('flow') + id_ + __tokop('LPAREN') + flow_params + __tokop('RPAREN') + block
    ) >> make_flow

    # file = { flow | NL }
    evfl_file = many(flow | (__tokop('NL') >> make_none)) >> collect_flows

    parser = evfl_file + skip(finished)
    roots: List[RootNode] = parser.parse(seq).value
    local_roots = {r.name: r for r in roots if r.local}
    exported_roots = {r.name: r for r in roots if not r.local}
    for n in roots:
        __collapse_connectors(n)
    __process_local_calls(roots, local_roots, exported_roots, exported_tco)
    for n in roots:
        __replace_node(n, TerminalNode, None)

    return list(exported_roots.values()), list(actors.values())