Ejemplo n.º 1
0
def postfix_expression(tokens, symbol_table):
    """
    : primary_expression
    (       '[' expression ']'
            |   '(' ')'
            |   '(' argument_expression_list ')'
            |   '.' IDENTIFIER
            |   '->' IDENTIFIER
            |   '++'
            |   '--'        )*
    """
    type_name, expression, initializer, primary_expression = imap(
        symbol_table.__getitem__,
        ('__ type_name __', '__ expression __', '__ initializer __', '__ primary_expression __')
    )
    # if primary_exp is None:
    #     if peek_or_terminal(tokens) == TOKENS.LEFT_PARENTHESIS and consume(tokens):
    #         # Again slight ambiguity since primary_expression may start with '(' expression ')'
    #         # can't call cast_expression since it will try to call postfix_expression.
    #         if is_type_name(peek_or_terminal(tokens), symbol_table):
    #             ctype, _ = type_name(tokens, symbol_table), error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS)
    #             primary_exp = CompoundLiteral(initializer(tokens, symbol_table), ctype, loc(ctype))
    #         else:  # if we saw a parenthesis and it wasn't a type_name then it must be primary_expr `(` expression `)`
    #             primary_exp, _ = expression(tokens, symbol_table), error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS)
    #     else:
    #         primary_exp = primary_expression(tokens, symbol_table)
    primary_exp = primary_expression(tokens, symbol_table)
    while peek_or_terminal(tokens) in rules(postfix_expression):
        primary_exp = rules(postfix_expression)[peek(tokens)](tokens, symbol_table, primary_exp)

    return primary_exp
Ejemplo n.º 2
0
def compound_assignment(expr, symbol_table):
    assert all(imap(isinstance, imap(c_type, (left_exp(expr), right_exp(expr))), repeat(NumericType)))
    assert not isinstance(c_type(left_exp(expr)), ArrayType)

    if isinstance(left_exp(expr), IdentifierExpression) and \
       base_c_type(c_type(left_exp(expr))) == base_c_type(c_type(right_exp(expr))) and \
       size(c_type(left_exp(expr))) == size(c_type(right_exp(expr))):
        # check that both operands are of the same kind (integral vs numeric) and have the same size ...
        return simple_numeric_assignment_no_casting(expr, symbol_table, rules(compound_assignment)[oper(expr)])

    max_type = max(imap(c_type, (left_exp(expr), right_exp(expr))))  # cast to largest type.
    expression = symbol_table['__ expression __']
    left_instrs = cast(  # cast to max_type
        patch_comp_left_instrs(expression(left_exp(expr), symbol_table),  loc(expr), size(c_type(left_exp(expr)))),
        c_type(left_exp(expr)),
        max_type,
        loc(expr),
    )
    right_instrs = cast(expression(right_exp(expr), symbol_table), c_type(right_exp(expr)), max_type, loc(expr))
    return patch_comp_assignment(
        cast(  # Cast the result back, swap the value and the destination address call set to save.
            rules(compound_assignment)[oper(expr)](
                left_instrs,
                right_instrs,
                loc(expr),
                (max_type, c_type(left_exp(expr)), c_type(right_exp(expr)))
            ),
            max_type,
            c_type(expr),
            loc(expr)
        ),
        c_type(expr),
        loc(expr),
    )
Ejemplo n.º 3
0
def postfix_expression(tokens, symbol_table):
    """
    : primary_expression
    (       '[' expression ']'
            |   '(' ')'
            |   '(' argument_expression_list ')'
            |   '.' IDENTIFIER
            |   '->' IDENTIFIER
            |   '++'
            |   '--'        )*
    """
    type_name, expression, initializer, primary_expression = imap(
        symbol_table.__getitem__,
        ('__ type_name __', '__ expression __', '__ initializer __',
         '__ primary_expression __'))
    # if primary_exp is None:
    #     if peek_or_terminal(tokens) == TOKENS.LEFT_PARENTHESIS and consume(tokens):
    #         # Again slight ambiguity since primary_expression may start with '(' expression ')'
    #         # can't call cast_expression since it will try to call postfix_expression.
    #         if is_type_name(peek_or_terminal(tokens), symbol_table):
    #             ctype, _ = type_name(tokens, symbol_table), error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS)
    #             primary_exp = CompoundLiteral(initializer(tokens, symbol_table), ctype, loc(ctype))
    #         else:  # if we saw a parenthesis and it wasn't a type_name then it must be primary_expr `(` expression `)`
    #             primary_exp, _ = expression(tokens, symbol_table), error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS)
    #     else:
    #         primary_exp = primary_expression(tokens, symbol_table)
    primary_exp = primary_expression(tokens, symbol_table)
    while peek_or_terminal(tokens) in rules(postfix_expression):
        primary_exp = rules(postfix_expression)[peek(tokens)](tokens,
                                                              symbol_table,
                                                              primary_exp)

    return primary_exp
Ejemplo n.º 4
0
def multiplicative_expression(tokens, symbol_table):
    # : cast_expression ('*' cast_expression | '/' cast_expression | '%' cast_expression)*
    cast_expression = symbol_table['__ cast_expression __']
    exp = cast_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(multiplicative_expression):
        exp = get_binary_expression(
            tokens, symbol_table, exp, cast_expression, rules(multiplicative_expression)[peek(tokens)]
        )
    return exp
Ejemplo n.º 5
0
def multiplicative_expression(tokens, symbol_table):
    # : cast_expression ('*' cast_expression | '/' cast_expression | '%' cast_expression)*
    cast_expression = symbol_table['__ cast_expression __']
    exp = cast_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(multiplicative_expression):
        exp = get_binary_expression(
            tokens, symbol_table, exp, cast_expression,
            rules(multiplicative_expression)[peek(tokens)])
    return exp
Ejemplo n.º 6
0
def unary_expression(tokens, symbol_table):
    """
        :   postfix_expression
            | '++' unary_expression
            | '--' unary_expression
            | unary_operator cast_expression
            | 'sizeof' (type_name | unary_expression)
    """
    error_if_empty(tokens)

    if peek_or_terminal(tokens) in rules(unary_expression) and not isinstance(peek(tokens), CONSTANT):
        return rules(unary_expression)[peek(tokens)](tokens, symbol_table)

    return symbol_table['__ postfix_expression __'](tokens, symbol_table)
Ejemplo n.º 7
0
def unary_expression(tokens, symbol_table):
    """
        :   postfix_expression
            | '++' unary_expression
            | '--' unary_expression
            | unary_operator cast_expression
            | 'sizeof' (type_name | unary_expression)
    """
    error_if_empty(tokens)

    if peek_or_terminal(tokens) in rules(unary_expression) and not isinstance(
            peek(tokens), CONSTANT):
        return rules(unary_expression)[peek(tokens)](tokens, symbol_table)

    return symbol_table['__ postfix_expression __'](tokens, symbol_table)
Ejemplo n.º 8
0
def type_name_or_unary_expression(tokens, symbol_table):
    symbol_table = push(symbol_table)
    set_rules(type_name_or_postfix_expression, rules(symbol_table['__ postfix_expression __']))
    symbol_table['__ postfix_expression __'] = type_name_or_postfix_expression
    unary_exp = symbol_table['__ unary_expression __'](tokens, symbol_table)
    _ = pop(symbol_table)
    return unary_exp
Ejemplo n.º 9
0
def exclusive_or_expression(tokens, symbol_table):
    # : and_expression ('^' and_expression)*
    exp = and_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(exclusive_or_expression):
        exp = get_binary_expression(tokens, symbol_table, exp, and_expression,
                                    IntegralType)
    return exp
Ejemplo n.º 10
0
def and_expression(tokens, symbol_table):
    # : equality_expression ('&' equality_expression)*
    exp = equality_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(and_expression):
        exp = get_binary_expression(tokens, symbol_table, exp,
                                    equality_expression, IntegralType)
    return exp
Ejemplo n.º 11
0
def equality_expression(tokens, symbol_table):
    # : relational_expression (('=='|'!=') relational_expression)*
    exp = relational_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(equality_expression):
        exp = get_binary_expression(tokens, symbol_table, exp,
                                    relational_expression, NumericType)
    return exp
Ejemplo n.º 12
0
def relational_expression(tokens, symbol_table):
    # : shift_expression (('<'|'>'|'<='|'>=') shift_expression)*
    exp = shift_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(relational_expression):
        exp = get_binary_expression(tokens, symbol_table, exp,
                                    shift_expression, NumericType)
    return exp
Ejemplo n.º 13
0
def iteration_statement(tokens, symbol_table):
    """
        : 'while' '(' expression ')' statement
        | 'do' statement 'while' '(' expression ')' ';'
        | 'for' '(' expression?; expression?; expression? ')' statement
    """
    return rules(iteration_statement)[peek(tokens, '')](tokens, symbol_table)
Ejemplo n.º 14
0
def iteration_statement(tokens, symbol_table):
    """
        : 'while' '(' expression ')' statement
        | 'do' statement 'while' '(' expression ')' ';'
        | 'for' '(' expression?; expression?; expression? ')' statement
    """
    return rules(iteration_statement)[peek(tokens, '')](tokens, symbol_table)
Ejemplo n.º 15
0
def type_qualifiers(tokens, _, defaults=None):  # : ('const' or volatile or *args)*
    values = set(takewhile(rules(type_qualifiers).__contains__, tokens))
    const, volatile = imap(values.__contains__, (TOKENS.CONST, TOKENS.VOLATILE))
    if not values and not defaults:
        raise ValueError('{l} Expected TOKENS.CONST or TOKEN.VOLATILE got {g}'.format(
            l=loc(peek(tokens, EOFLocation)), g=peek(tokens, '')
        ))
    return const or defaults[0], volatile or defaults[1]
Ejemplo n.º 16
0
def shift_expression(
        tokens, symbol_table
):  # : additive_expression (('<<'|'>>') additive_expression)*
    exp = additive_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(shift_expression):
        exp = get_binary_expression(tokens, symbol_table, exp,
                                    additive_expression, IntegralType)
    return exp
Ejemplo n.º 17
0
def unary_operator(tokens, symbol_table):
    operator = consume(tokens)
    if operator == TOKENS.LOGICAL_AND:
        return AddressOfLabelExpression(
            error_if_not_type(consume(tokens, ''), IDENTIFIER),
            void_pointer_type(loc(operator)), loc(operator))
    cast_exp = symbol_table['__ cast_expression __'](tokens, symbol_table)
    return rules(unary_operator)[operator](cast_exp, operator)
Ejemplo n.º 18
0
def pre_processor(char_stream, location):  # returns pre_processing symbol or #identifier ...
    values = consume(char_stream)
    if peek_or_terminal(char_stream) == TOKENS.NUMBER_SIGN:  # token concatenation symbol ...
        values += consume(char_stream)
    else:
        _ = exhaust(takewhile({' ', '\t', '\a'}.__contains__, char_stream))
        values += ''.join(takewhile(letters.__contains__, char_stream))
    return rules(pre_processor).get(values, IDENTIFIER)(values, location)
Ejemplo n.º 19
0
def unary_operator(tokens, symbol_table):
    operator = consume(tokens)
    if operator == TOKENS.LOGICAL_AND:
        return AddressOfLabelExpression(
            error_if_not_type(consume(tokens, ''), IDENTIFIER), void_pointer_type(loc(operator)), loc(operator)
        )
    cast_exp = symbol_table['__ cast_expression __'](tokens, symbol_table)
    return rules(unary_operator)[operator](cast_exp, operator)
Ejemplo n.º 20
0
def logical_operators(expr, symbol_table):
    expression = symbol_table['__ expression __']
    return rules(logical_operators)[oper(expr)](
        expression(left_exp(expr), symbol_table), 
        expression(right_exp(expr), symbol_table),
        loc(expr),
        tuple(imap(size_arrays_as_pointers, imap(c_type, (expr, left_exp(expr), right_exp(expr)))))
    )
Ejemplo n.º 21
0
def exhaust_remaining_blocks(token_seq):
    exhaust(
        imap(
            apply,
            imap(
                rules(exhaust_remaining_blocks).__getitem__,
                takewhile(TOKENS.PENDIF.__ne__, imap(peek,
                                                     repeat(token_seq)))),
            repeat((token_seq, ))))
Ejemplo n.º 22
0
def get_block(token_seq, terminating_with={TOKENS.PENDIF}):
    return chain.from_iterable(
        imap(
            apply,
            imap(
                rules(get_block).__getitem__,
                takewhile(lambda token: token not in terminating_with,
                          imap(peek, repeat(token_seq)))), repeat(
                              (token_seq, ))))
Ejemplo n.º 23
0
def statement(stmnt, symbol_table):
    is_expression = isinstance(stmnt, expressions.Expression)

    # Set entry point to False if its an expression or use statement function if present otherwise None.
    instrs = rules(statement)[type(stmnt)](stmnt, symbol_table)
    # Almost all Expression statements leave a value on the stack, so we must remove it.
    if stmnt and is_expression:
        instrs = chain(instrs, allocate(-size(c_type(stmnt), overrides={VoidType: 0}), loc(stmnt)))
    return instrs
Ejemplo n.º 24
0
def __calc_if(expr, token_seq, macros):
    tokens = get_block(token_seq, terminating_with={TOKENS.PELIF, TOKENS.PELSE, TOKENS.PENDIF})  # get a single block
    if not expr:  # if expression is false we have to exhaust ... and search for a true elif expression, else or endif
        _ = exhaust(tokens)
        tokens = rules(__calc_if)[peek(token_seq)](token_seq, macros)

    for t in imap(consume, repeat(tokens)):  # emit tokens which will be pre-processed ...
        yield t

    exhaust_remaining_blocks(token_seq)
Ejemplo n.º 25
0
def statement(tokens, symbol_table):
    """
        : declaration
        | labeled_statement
        | compound_statement
        | selection_statement
        | iteration_statement
        | jump_statement
        | expression_statement
        | expression ';'
        | ;
    """
    if peek_or_terminal(tokens) in rules(
            statement):  # if current token has a rule use that one first
        return rules(statement)[peek(tokens)](tokens, symbol_table)

    if is_declaration(
            tokens,
            symbol_table):  # checking for type_name is a bit expensive ...
        return declaration(tokens, symbol_table)

    # both expressions and labels may start with an identifier
    if isinstance(peek_or_terminal(tokens), IDENTIFIER):
        label_name = consume(tokens)
        if peek_or_terminal(tokens) == TOKENS.COLON:
            return symbol_table['__ labeled_statement __'](chain(
                (label_name, ), consume_all(tokens)), symbol_table)
            # return label_stmnt(label_name, statement(tokens, symbol_table))
        # it must be an expression, TODO: figure out a way without using dangerous chain!
        # tokens = chain((label_name, consume(tokens)), tokens)
        tokens = chain((label_name, ), consume_all(tokens))
        expr, _ = symbol_table['__ expression __'](
            tokens, symbol_table), error_if_not_value(tokens, TOKENS.SEMICOLON)
        return repeat(expr, 1)

    if peek_or_terminal(tokens) is not terminal:
        expr, _ = symbol_table['__ expression __'](
            tokens, symbol_table), error_if_not_value(tokens, TOKENS.SEMICOLON)
        return repeat(expr, 1)

    raise ValueError(
        '{l} No rule could be found to create statement, got {got}'.format(
            l=loc(peek(tokens, EOFLocation)), got=peek(tokens, '')))
Ejemplo n.º 26
0
def inc_dec(expr, symbol_table):
    assert not isinstance(c_type(expr), ArrayType) and isinstance(
        c_type(expr), IntegralType)
    value = rules(inc_dec)[type(expr)]
    if isinstance(c_type(expr), PointerType):
        value *= size_extended(c_type(c_type(expr)))

    return get_postfix_update(size(c_type(expr)))(all_but_last(
        symbol_table['__ expression __'](exp(expr), symbol_table), Loads,
        loc(expr)), value, loc(expr))
Ejemplo n.º 27
0
def inc_dec(expr, symbol_table):
    assert not isinstance(c_type(expr), ArrayType) and isinstance(c_type(expr), IntegralType)
    value = rules(inc_dec)[type(expr)]
    if isinstance(c_type(expr), PointerType):
        value *= size_extended(c_type(c_type(expr)))

    return get_postfix_update(size(c_type(expr)))(
        all_but_last(symbol_table['__ expression __'](exp(expr), symbol_table), Loads, loc(expr)),
        value,
        loc(expr)
    )
Ejemplo n.º 28
0
def jump_statement(tokens, symbol_table):
    """
        : 'goto' IDENTIFIER ';'
        | 'continue' ';'
        | 'break' ';'
        | 'return' ';'
        | 'return' expression ';'
    """
    stmnt = rules(jump_statement)[peek(tokens)](tokens, symbol_table)
    _ = error_if_not_value(tokens, TOKENS.SEMICOLON)
    yield stmnt
Ejemplo n.º 29
0
def get_block(token_seq, terminating_with={TOKENS.PENDIF}):
    return chain.from_iterable(
        imap(
            apply,
            imap(
                rules(get_block).__getitem__,
                takewhile(lambda token: token not in terminating_with, imap(peek, repeat(token_seq)))
            ),
            repeat((token_seq,))
        )
    )
Ejemplo n.º 30
0
def exhaust_remaining_blocks(token_seq):
    exhaust(
        imap(
            apply,
            imap(
                rules(exhaust_remaining_blocks).__getitem__,
                takewhile(TOKENS.PENDIF.__ne__, imap(peek, repeat(token_seq)))
            ),
            repeat((token_seq,))
        )
    )
Ejemplo n.º 31
0
def type_qualifiers(tokens,
                    _,
                    defaults=None):  # : ('const' or volatile or *args)*
    values = set(takewhile(rules(type_qualifiers).__contains__, tokens))
    const, volatile = imap(values.__contains__,
                           (TOKENS.CONST, TOKENS.VOLATILE))
    if not values and not defaults:
        raise ValueError(
            '{l} Expected TOKENS.CONST or TOKEN.VOLATILE got {g}'.format(
                l=loc(peek(tokens, EOFLocation)), g=peek(tokens, '')))
    return const or defaults[0], volatile or defaults[1]
Ejemplo n.º 32
0
def type_name_or_postfix_expression(tokens, symbol_table):
    symbol_table = push(symbol_table)
    symbol_table['__ compound_literal __'] = type_name_or_compound_literal
    primary_exp = symbol_table['__ primary_expression __'](tokens, symbol_table)
    _ = pop(symbol_table)
    # pop 'type_name_or_compound_literal' and type_name_or_postfix_expression ...
    postfix_expression_rules = rules(symbol_table['__ postfix_expression __'])
    if not isinstance(primary_exp, CType):  # it must have being an expression ...
        while peek_or_terminal(tokens) in postfix_expression_rules:
            primary_exp = postfix_expression_rules[peek(tokens)](tokens, symbol_table, primary_exp)
    return primary_exp  # otherwise it must have being a type_name ...
Ejemplo n.º 33
0
def type_specifier(tokens, symbol_table, default=no_default):
    """
        : 'void'
        | ['signed' | 'unsigned'] 'char' | ['signed' | 'unsigned'] 'short'
        | ['signed' | 'unsigned'] 'int' | ['signed' | 'unsigned'] 'long'
        | 'float' | 'double'
        | struct_specifier
        | union_specifier
        | enum_specifier
        | TYPE_NAME
    """
    token = peek_or_terminal(tokens)
    if token in rules(type_specifier):
        return rules(type_specifier)[token](tokens, symbol_table)
    elif isinstance(symbol_table.get(token, token), CType):
        return symbol_table[token](loc(consume(tokens)))
    elif default is not no_default:
        return default
    raise ValueError(
        '{l} Expected type_specifier or TYPE_NAME got {got}'.format(
            l=loc(peek(tokens, EOFLocation)), got=peek(tokens, '')))
Ejemplo n.º 34
0
def pre_processor(
        char_stream,
        location):  # returns pre_processing symbol or #identifier ...
    values = consume(char_stream)
    if peek_or_terminal(
            char_stream
    ) == TOKENS.NUMBER_SIGN:  # token concatenation symbol ...
        values += consume(char_stream)
    else:
        _ = exhaust(takewhile({' ', '\t', '\a'}.__contains__, char_stream))
        values += ''.join(takewhile(letters.__contains__, char_stream))
    return rules(pre_processor).get(values, IDENTIFIER)(values, location)
Ejemplo n.º 35
0
def type_specifier(tokens, symbol_table, default=no_default):
    """
        : 'void'
        | ['signed' | 'unsigned'] 'char' | ['signed' | 'unsigned'] 'short'
        | ['signed' | 'unsigned'] 'int' | ['signed' | 'unsigned'] 'long'
        | 'float' | 'double'
        | struct_specifier
        | union_specifier
        | enum_specifier
        | TYPE_NAME
    """
    token = peek_or_terminal(tokens)
    if token in rules(type_specifier):
        return rules(type_specifier)[token](tokens, symbol_table)
    elif isinstance(symbol_table.get(token, token), CType):
        return symbol_table[token](loc(consume(tokens)))
    elif default is not no_default:
        return default
    raise ValueError('{l} Expected type_specifier or TYPE_NAME got {got}'.format(
        l=loc(peek(tokens, EOFLocation)), got=peek(tokens, '')
    ))
Ejemplo n.º 36
0
def direct_declarator(tokens, symbol_table):
    """
        :   (IDENTIFIER | '(' declarator ')') declarator_suffix*

        declarator_suffix
            :   '[' constant_expression ']'
            |   '[' ']'
            |   '(' parameter_type_list ')'
            |   '(' ')'
    """
    dec = get_rule(direct_declarator, peek_or_terminal(tokens), hash_funcs=(type, identity))(tokens, symbol_table)
    _ = peek_or_terminal(tokens) in rules(declarator_suffix) and set_core_type(
        dec, declarator_suffix(tokens, symbol_table))
    return dec
Ejemplo n.º 37
0
def relational_operators(expr, symbol_table):
    max_type = max(imap(c_type, (left_exp(expr), right_exp(expr))))
    expression = symbol_table['__ expression __']
    if isinstance(max_type, ArrayType):
        max_type = PointerType(c_type(max_type), loc(max_type))
    left_instrs = expression(left_exp(expr), symbol_table)
    right_instrs = expression(right_exp(expr), symbol_table)

    return rules(relational_operators)[oper(expr)](
        cast(left_instrs, c_type(left_exp(expr)), max_type, loc(expr)),
        cast(right_instrs, c_type(right_exp(expr)), max_type, loc(expr)),
        loc(expr),
        (c_type(expr), c_type(left_exp(expr)), c_type(right_exp(expr))),
    )
Ejemplo n.º 38
0
def statement(tokens, symbol_table):
    """
        : declaration
        | labeled_statement
        | compound_statement
        | selection_statement
        | iteration_statement
        | jump_statement
        | expression_statement
        | expression ';'
        | ;
    """
    if peek_or_terminal(tokens) in rules(statement):  # if current token has a rule use that one first
        return rules(statement)[peek(tokens)](tokens, symbol_table)

    if is_declaration(tokens, symbol_table):  # checking for type_name is a bit expensive ...
        return declaration(tokens, symbol_table)

    # both expressions and labels may start with an identifier
    if isinstance(peek_or_terminal(tokens), IDENTIFIER):
        label_name = consume(tokens)
        if peek_or_terminal(tokens) == TOKENS.COLON:
            return symbol_table['__ labeled_statement __'](chain((label_name,), consume_all(tokens)), symbol_table)
            # return label_stmnt(label_name, statement(tokens, symbol_table))
        # it must be an expression, TODO: figure out a way without using dangerous chain!
        # tokens = chain((label_name, consume(tokens)), tokens)
        tokens = chain((label_name,),  consume_all(tokens))
        expr, _ = symbol_table['__ expression __'](tokens, symbol_table), error_if_not_value(tokens, TOKENS.SEMICOLON)
        return repeat(expr, 1)

    if peek_or_terminal(tokens) is not terminal:
        expr, _ = symbol_table['__ expression __'](tokens, symbol_table), error_if_not_value(tokens, TOKENS.SEMICOLON)
        return repeat(expr, 1)

    raise ValueError('{l} No rule could be found to create statement, got {got}'.format(
        l=loc(peek(tokens, EOFLocation)), got=peek(tokens, '')
    ))
Ejemplo n.º 39
0
def __calc_if(expr, token_seq, macros):
    tokens = get_block(
        token_seq,
        terminating_with={TOKENS.PELIF, TOKENS.PELSE,
                          TOKENS.PENDIF})  # get a single block
    if not expr:  # if expression is false we have to exhaust ... and search for a true elif expression, else or endif
        _ = exhaust(tokens)
        tokens = rules(__calc_if)[peek(token_seq)](token_seq, macros)

    for t in imap(
            consume,
            repeat(tokens)):  # emit tokens which will be pre-processed ...
        yield t

    exhaust_remaining_blocks(token_seq)
Ejemplo n.º 40
0
def arithmetic_and_bitwise_operators(expr, symbol_table):
    expression = symbol_table['__ expression __']
    left_instrs = expression(left_exp(expr), symbol_table)
    right_instrs = expression(right_exp(expr), symbol_table)

    to_type = c_type(expr)
    if isinstance(to_type, ArrayType):
        to_type = PointerType(c_type(to_type), loc(c_type(expr)))

    return rules(arithmetic_and_bitwise_operators)[oper(expr)](
        cast(left_instrs, c_type(left_exp(expr)), to_type, loc(expr)),
        cast(right_instrs, c_type(right_exp(expr)), to_type, loc(expr)),
        loc(expr),
        (to_type, c_type(left_exp(expr)), c_type(right_exp(expr))),
    )
Ejemplo n.º 41
0
def direct_declarator(tokens, symbol_table):
    """
        :   (IDENTIFIER | '(' declarator ')') declarator_suffix*

        declarator_suffix
            :   '[' constant_expression ']'
            |   '[' ']'
            |   '(' parameter_type_list ')'
            |   '(' ')'
    """
    dec = get_rule(direct_declarator,
                   peek_or_terminal(tokens),
                   hash_funcs=(type, identity))(tokens, symbol_table)
    _ = peek_or_terminal(tokens) in rules(declarator_suffix) and set_core_type(
        dec, declarator_suffix(tokens, symbol_table))
    return dec
Ejemplo n.º 42
0
def labeled_statement(tokens, symbol_table):
    """
        : IDENTIFIER ':' statement
        | 'case' constant_expression ':' statement
        | 'default' ':' statement
    """

    if isinstance(peek(tokens), IDENTIFIER):
        return label(tokens, symbol_table)

    try:
        _ = symbol_table['__ SWITCH STATEMENT __']
    except KeyError as _:
        raise ValueError('{l} {g} statement outside of switch'.format(
            l=loc(peek(tokens)), g=peek(tokens)))

    return rules(labeled_statement)[peek(tokens)](tokens, symbol_table)
Ejemplo n.º 43
0
def conditional_expression(tokens, symbol_table):
    # logical_or_expression ('?' expression ':' conditional_expression)?
    exp = logical_or_expression(tokens, symbol_table)
    if peek(tokens, '') in rules(conditional_expression):
        location = loc(error_if_not_value(tokens, TOKENS.QUESTION))
        _ = error_if_not_type(c_type(exp), NumericType)
        if_exp_is_true = assignment_expression(tokens, symbol_table)
        _ = error_if_not_value(tokens, TOKENS.COLON)
        if_exp_is_false = conditional_expression(tokens, symbol_table)

        ctype_1, ctype_2 = imap(c_type, (if_exp_is_true, if_exp_is_false))
        if safe_type_coercion(ctype_1, ctype_2):
            ctype = ctype_1(location)
        elif safe_type_coercion(ctype_2, ctype_1):
            ctype = ctype_2(location)
        else:
            raise ValueError('{l} Could not determine type for ternary-expr, giving the types {t1} and {t2}'.format(
                t1=ctype_1, t2=ctype_2
            ))
        return TernaryExpression(exp, if_exp_is_true, if_exp_is_false, ctype, location)
    return exp
Ejemplo n.º 44
0
def conditional_expression(tokens, symbol_table):
    # logical_or_expression ('?' expression ':' conditional_expression)?
    exp = logical_or_expression(tokens, symbol_table)
    if peek(tokens, '') in rules(conditional_expression):
        location = loc(error_if_not_value(tokens, TOKENS.QUESTION))
        _ = error_if_not_type(c_type(exp), NumericType)
        if_exp_is_true = assignment_expression(tokens, symbol_table)
        _ = error_if_not_value(tokens, TOKENS.COLON)
        if_exp_is_false = conditional_expression(tokens, symbol_table)

        ctype_1, ctype_2 = imap(c_type, (if_exp_is_true, if_exp_is_false))
        if safe_type_coercion(ctype_1, ctype_2):
            ctype = ctype_1(location)
        elif safe_type_coercion(ctype_2, ctype_1):
            ctype = ctype_2(location)
        else:
            raise ValueError(
                '{l} Could not determine type for ternary-expr, giving the types {t1} and {t2}'
                .format(t1=ctype_1, t2=ctype_2))
        return TernaryExpression(exp, if_exp_is_true, if_exp_is_false, ctype,
                                 location)
    return exp
Ejemplo n.º 45
0
    values = set(takewhile(rules(type_qualifiers).__contains__, tokens))
    const, volatile = imap(values.__contains__, (TOKENS.CONST, TOKENS.VOLATILE))
    if not values and not defaults:
        raise ValueError('{l} Expected TOKENS.CONST or TOKEN.VOLATILE got {g}'.format(
            l=loc(peek(tokens, EOFLocation)), g=peek(tokens, '')
        ))
    return const or defaults[0], volatile or defaults[1]
set_rules(type_qualifiers, {TOKENS.VOLATILE, TOKENS.CONST})


def specifier_qualifier_list(tokens, symbol_table):
    const, volatile = type_qualifiers(tokens, symbol_table, (False, False))
    base_type = type_specifier(tokens, symbol_table, IntegerType(loc(peek(tokens, EOFLocation))))
    base_type.const, base_type.volatile = type_qualifiers(tokens, symbol_table, (const, volatile))
    return base_type
set_rules(specifier_qualifier_list, set(rules(type_qualifiers)) | set(rules(type_specifier)))


def type_name(tokens, symbol_table):  #: type_specifier abstract_declarator?   # returns CType
    base_type = specifier_qualifier_list(tokens, symbol_table)
    abstract_declarator = symbol_table['__ abstract_declarator __']
    if peek_or_terminal(tokens) in {TOKENS.LEFT_PARENTHESIS, TOKENS.LEFT_BRACKET, TOKENS.STAR} \
       or isinstance(peek_or_terminal(tokens), IDENTIFIER):
        abs_decl = abstract_declarator(tokens, symbol_table)
        set_core_type(abs_decl, base_type)
        return c_type(abs_decl)

    return base_type
set_rules(type_name, set(rules(specifier_qualifier_list)))

Ejemplo n.º 46
0
def parameter_declaration(tokens, symbol_table):
    # : specifier_qualifier_list (declarator | abstract_declarator) or `...`
    return rules(parameter_declaration)[peek_or_terminal(tokens)](tokens,
                                                                  symbol_table)
Ejemplo n.º 47
0
def relational_expression(tokens, symbol_table):
    # : shift_expression (('<'|'>'|'<='|'>=') shift_expression)*
    exp = shift_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(relational_expression):
        exp = get_binary_expression(tokens, symbol_table, exp, shift_expression, NumericType)
    return exp
Ejemplo n.º 48
0
def shift_expression(tokens, symbol_table):  # : additive_expression (('<<'|'>>') additive_expression)*
    exp = additive_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(shift_expression):
        exp = get_binary_expression(tokens, symbol_table, exp, additive_expression, IntegralType)
    return exp
Ejemplo n.º 49
0
def exclusive_or_expression(tokens, symbol_table):
    # : and_expression ('^' and_expression)*
    exp = and_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(exclusive_or_expression):
        exp = get_binary_expression(tokens, symbol_table, exp, and_expression, IntegralType)
    return exp
Ejemplo n.º 50
0
def and_expression(tokens, symbol_table):
    # : equality_expression ('&' equality_expression)*
    exp = equality_expression(tokens, symbol_table)
    while peek(tokens, '') in rules(and_expression):
        exp = get_binary_expression(tokens, symbol_table, exp, equality_expression, IntegralType)
    return exp
Ejemplo n.º 51
0
def is_type_name(token, symbol_table):
    return token in rules(type_name) or isinstance(symbol_table.get(token, token), CType)
Ejemplo n.º 52
0
def assignment_expression(tokens, symbol_table):
    # : conditional_expression | conditional_expression assignment_operator assignment_expression
    left_value_exp = conditional_expression(tokens, symbol_table)
    return rules(assignment_expression)[peek(tokens)](tokens, symbol_table, left_value_exp) \
        if peek(tokens, '') in rules(assignment_expression) else left_value_exp