Esempio n. 1
0
def type_name_or_unary_expression(tokens, symbol_table):
    symbol_table = push(symbol_table)
    set_rules(type_name_or_postfix_expression, rules(symbol_table['__ postfix_expression __']))
    symbol_table['__ postfix_expression __'] = type_name_or_postfix_expression
    unary_exp = symbol_table['__ unary_expression __'](tokens, symbol_table)
    _ = pop(symbol_table)
    return unary_exp
Esempio n. 2
0
            chain(
                expression(e, symbol_table),
                allocate(
                    -size_arrays_as_pointers(c_type(e),
                                             overrides={VoidType: 0}), loc(e)))
            for e in exp(expr)[:-1]), expression(exp(expr)[-1], symbol_table))


# Entry point to all expression or expression statements
def expression(expr, symbol_table):
    return rules(expression)[type(expr)](expr, symbol_table)


expression_funcs = unary_expression, postfix_expression, ternary_expression, initializer_expression
set_rules(
    expression,
    chain((
        (EmptyExpression, lambda expr, *_: allocate(
            size(c_type(expr), overrides={VoidType: 0}), loc(expr))),
        (ConstantExpression, constant_expression),
        (CastExpression, cast_expression),
        (IdentifierExpression, identifier_expression),
        (CommaExpression, comma_expression),
        (BinaryExpression, binary_expression),
        (AssignmentExpression, binary_expression),
        (CompoundAssignmentExpression, binary_expression),
    ),
          chain.from_iterable(
              imap(izip, imap(rules, expression_funcs),
                   imap(repeat, expression_funcs)))))
Esempio n. 3
0
    if is_declaration(tokens, symbol_table):  # checking for type_name is a bit expensive ...
        return declaration(tokens, symbol_table)

    # both expressions and labels may start with an identifier
    if isinstance(peek_or_terminal(tokens), IDENTIFIER):
        label_name = consume(tokens)
        if peek_or_terminal(tokens) == TOKENS.COLON:
            return symbol_table['__ labeled_statement __'](chain((label_name,), consume_all(tokens)), symbol_table)
            # return label_stmnt(label_name, statement(tokens, symbol_table))
        # it must be an expression, TODO: figure out a way without using dangerous chain!
        # tokens = chain((label_name, consume(tokens)), tokens)
        tokens = chain((label_name,),  consume_all(tokens))
        expr, _ = symbol_table['__ expression __'](tokens, symbol_table), error_if_not_value(tokens, TOKENS.SEMICOLON)
        return repeat(expr, 1)

    if peek_or_terminal(tokens) is not terminal:
        expr, _ = symbol_table['__ expression __'](tokens, symbol_table), error_if_not_value(tokens, TOKENS.SEMICOLON)
        return repeat(expr, 1)

    raise ValueError('{l} No rule could be found to create statement, got {got}'.format(
        l=loc(peek(tokens, EOFLocation)), got=peek(tokens, '')
    ))
statement_funcs = labeled_statement, selection_statement, iteration_statement, jump_statement
set_rules(
    statement,
    chain(
        chain.from_iterable(imap(izip, imap(rules, statement_funcs), imap(repeat, statement_funcs))),
        ((TOKENS.LEFT_BRACE, _comp_stmnt), (TOKENS.SEMICOLON, _empty_statement))
    )
)
Esempio n. 4
0
    yield ElseStatement(stmnt, location)


def _if(tokens, symbol_table):
    location, _ = loc(consume(tokens)), error_if_not_value(tokens, TOKENS.LEFT_PARENTHESIS)
    expression, statement = imap(symbol_table.__getitem__, ('__ expression __', '__ statement __'))
    expr, _ = expression(tokens, symbol_table), error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS)
    yield IfStatement(expr, statement(tokens, symbol_table), else_statement(tokens, symbol_table), location)


def switch(tokens, symbol_table):
    def _pop_symbol_table(symbol_table):  # Pop symbol table once we have gone through the whole body ...
        _ = pop(symbol_table)
        yield EmptyStatement()
    expression, statement = imap(symbol_table.__getitem__, ('__ expression __', '__ statement __'))
    location, _ = loc(consume(tokens)), error_if_not_value(tokens, TOKENS.LEFT_PARENTHESIS)
    expr, _ = expression(tokens, symbol_table), error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS)
    symbol_table = push(symbol_table)
    symbol_table['__ SWITCH STATEMENT __'] = SymbolTable()  # Add dict to track cases, emit error on duplicates.
    symbol_table['__ SWITCH EXPRESSION __'] = expr
    yield SwitchStatement(expr, chain(statement(tokens, symbol_table), _pop_symbol_table(symbol_table)), location)


def selection_statement(tokens, symbol_table):
    """
        : 'if' '(' expression ')' statement ('else' statement)?
        | 'switch' '(' expression ')' statement
    """
    return rules(selection_statement)[peek_or_terminal(tokens)](tokens, symbol_table)
set_rules(selection_statement, ((TOKENS.IF, _if), (TOKENS.SWITCH, switch)))
Esempio n. 5
0
    return IdentifierDesignatedExpression(identifier, _expr_or_designated_expr(tokens, symbol_table), loc(identifier))


def _assignment_expr(tokens, symbol_table):
    return symbol_table['__ assignment_expression __'](tokens, symbol_table)


def _initializer(tokens, symbol_table):
    return symbol_table['__ initializer __'](tokens, symbol_table)


def _assignment_expression_or_initializer(tokens, symbol_table):
    return error_if_not_value(tokens, TOKENS.EQUAL) and rules(
        _assignment_expression_or_initializer
    )[peek_or_terminal(tokens)](tokens, symbol_table)
set_rules(_assignment_expression_or_initializer, ((TOKENS.LEFT_BRACE, _initializer),), _assignment_expr)


def _expr_or_designated_expr(tokens, symbol_table):
    return rules(_expr_or_designated_expr)[peek_or_terminal(tokens)](tokens, symbol_table)
set_rules(
    _expr_or_designated_expr,
    (
        (TOKENS.DOT, identifier_designated_expr),
        (TOKENS.LEFT_BRACKET, offset_designated_expr),
        (TOKENS.EQUAL, _assignment_expression_or_initializer)
    )
)


def designated_expression(tokens, symbol_table):  # (offset or range or identifier)+ = assignment_expression
Esempio n. 6
0
#
# def count_identical_expressions(exprs):
#     count, previous_expr = 0, None
#     for current_exp in imap(peek, repeat(exprs)):
#         while current_exp is peek_or_terminal(exprs):
#             current_exp = consume(exprs)
#             count += 1
#         yield count, current_exp


def _initializer_expression(expr, symbol_table):
    return rules(_initializer_expression)[type(c_type(expr))](expr, symbol_table)

set_rules(
    _initializer_expression,
    chain(
        izip(scalar_types, repeat(numeric_initializer)),
        (
            (StructType, struct_initializer),
            (UnionType, union_initializer),
            (ArrayType, array_initializer)
        ),
    )
)


def initializer_expression(expr, symbol_table):
    return _initializer_expression(expr, symbol_table)
set_rules(initializer_expression, {Initializer})

Esempio n. 7
0
        max_type = PointerType(c_type(max_type), loc(max_type))
    left_instrs = expression(left_exp(expr), symbol_table)
    right_instrs = expression(right_exp(expr), symbol_table)

    return rules(relational_operators)[oper(expr)](
        cast(left_instrs, c_type(left_exp(expr)), max_type, loc(expr)),
        cast(right_instrs, c_type(right_exp(expr)), max_type, loc(expr)),
        loc(expr),
        (c_type(expr), c_type(left_exp(expr)), c_type(right_exp(expr))),
    )
set_rules(
    relational_operators,
    (
        (TOKENS.EQUAL_EQUAL, equal),
        (TOKENS.NOT_EQUAL, not_equal),
        (TOKENS.LESS_THAN, less_than),
        (TOKENS.GREATER_THAN, greater_than),
        (TOKENS.LESS_THAN_OR_EQUAL, less_than_or_equal),
        (TOKENS.GREATER_THAN_OR_EQUAL, greater_than_or_equal)
    )
)



def logical_operators(expr, symbol_table):
    expression = symbol_table['__ expression __']
    return rules(logical_operators)[oper(expr)](
        expression(left_exp(expr), symbol_table), 
        expression(right_exp(expr), symbol_table),
        loc(expr),
        tuple(imap(size_arrays_as_pointers, imap(c_type, (expr, left_exp(expr), right_exp(expr)))))
Esempio n. 8
0
def default_parameter_declaration(tokens, symbol_table):
    base_type = symbol_table['__ specifier_qualifier_list __'](tokens, symbol_table)
    c_decl = AbstractDeclarator(base_type, loc(base_type))

    token = peek_or_terminal(tokens)
    if token in {TOKENS.STAR, TOKENS.LEFT_PARENTHESIS, TOKENS.LEFT_BRACKET} or isinstance(token, IDENTIFIER):
        c_decl = abstract_declarator(tokens, symbol_table)
        set_core_type(c_decl, base_type)

    return c_decl


def parameter_declaration(tokens, symbol_table):
    # : specifier_qualifier_list (declarator | abstract_declarator) or `...`
    return rules(parameter_declaration)[peek_or_terminal(tokens)](tokens, symbol_table)
set_rules(parameter_declaration, ((TOKENS.ELLIPSIS, ellipsis_parameter_declaration),), default_parameter_declaration)


def parameter_type_list(tokens, symbol_table):  # : parameter_declaration (',' parameter_declaration)*
    return chain(
        (parameter_declaration(tokens, symbol_table),),
        imap(
            parameter_declaration,
            takewhile(lambda tokens: peek(tokens) == TOKENS.COMMA and consume(tokens), repeat(tokens)),
            repeat(symbol_table)
        )
    )


def identifier_direct_declarator(tokens, symbol_table):
    ident = error_if_not_type(consume(tokens), IDENTIFIER)
Esempio n. 9
0
    return instrs


def label_statement(stmnt, symbol_table):
    instr = Pass(loc(stmnt))
    labels, gotos, stack, statement = imap(
        symbol_table.__getitem__, ('__ LABELS __', '__ GOTOS __', '__ stack __', '__ statement __')
    )
    labels[name(stmnt)] = (instr, symbol_table['__ stack __'].stack_pointer)

    # update all previous gotos referring to this lbl
    for alloc_instr, rel_jump_instr, goto_stack_pointer in gotos[name(stmnt)]:
        # TODO: bug! set_address uses obj.address.
        alloc_instr[0].obj.address = alloc_instr.address + (stack.stack_pointer - goto_stack_pointer)
        rel_jump_instr[0].obj = instr

    del gotos[name(stmnt)][:]
    return chain((instr,), statement(stmnt.statement, symbol_table))
set_rules(label_statement, {LabelStatement})


def jump_statement(stmnt, symbol_table):
    return rules(jump_statement)[type(stmnt)](stmnt, symbol_table)
set_rules(
    jump_statement,
    (
        (BreakStatement, break_statement), (ContinueStatement, continue_statement),
        (ReturnStatement, return_statement), (GotoStatement, goto_statement),
    )
)
Esempio n. 10
0
def non_static_pointer_typed_definition_initialized_by_array_type(stmnt, symbol_table):
    stack, expression = utils.symbol_table.get_symbols(symbol_table, '__ stack __', '__ expression __')
    expr = declarations.initialization(stmnt)
    assert not isinstance(expr, (expressions.Initializer, expressions.CompoundLiteral))
    return chain(  # evaluate stack expression, which will push values on the stack and initialized pointer with sp
        cast(expression(expr, symbol_table), c_type(expr), c_type(stmnt), loc(stmnt)), load_stack_pointer(loc(stmnt))
    )


def non_static_pointer_typed_definition(stmnt, symbol_table):
    return rules(non_static_pointer_typed_definition)[type(c_type(declarations.initialization(stmnt)))](
        stmnt, symbol_table
    )
set_rules(
    non_static_pointer_typed_definition,
    ((ArrayType, non_static_pointer_typed_definition_initialized_by_array_type),),
    non_static_default_typed_definition
)


def non_static_definition(stmnt, symbol_table):
    stmnt = stack_allocation(symbol_table['__ stack __'], stmnt)
    symbol_table[declarations.name(stmnt)] = stmnt
    return rules(non_static_definition)[type(c_type(stmnt))](stmnt, symbol_table)
set_rules(
    non_static_definition, ((PointerType, non_static_pointer_typed_definition),), non_static_default_typed_definition
)


def definition(stmnt, symbol_table):
    return rules(definition)[type(stmnt.storage_class)](stmnt, symbol_table)
Esempio n. 11
0
__author__ = 'samyvilar'

from itertools import chain

from utils.rules import set_rules
from front_end.loader.locations import loc
from front_end.parser.ast.expressions import TernaryExpression, exp, left_exp, right_exp

from back_end.virtual_machine.instructions.architecture import Pass, Offset, relative_jump, get_jump_false

from back_end.emitter.c_types import c_type, size_arrays_as_pointers


def ternary_expression(expr, symbol_table):
    if_false_instr, end_of_conditional_instr = Pass(loc(expr)), Pass(loc(expr))
    expression = symbol_table['__ expression __']
    return chain(
        get_jump_false(size_arrays_as_pointers(c_type(exp(expr))))(
            expression(exp(expr), symbol_table),
            Offset(if_false_instr, loc(expr)),
            loc(expr)
        ),
        expression(left_exp(expr), symbol_table),
        relative_jump(Offset(end_of_conditional_instr, loc(end_of_conditional_instr)), loc(expr)),
        (if_false_instr,),
        expression(right_exp(expr), symbol_table),
        (end_of_conditional_instr,),
    )
set_rules(ternary_expression, {TernaryExpression})
Esempio n. 12
0
            lambda o: isinstance(referenced_obj(o, None), (Operand, Reference, Instruction)), operns(instr, ())
        ))
        yield instr

    # for ref in references:
    #     new_obj = get_new_instr(ref, ref.obj)
    #     ref.obj = new_obj

    exhaust(imap(setattr, references, repeat('obj'), imap(get_new_instr, references, imap(referenced_obj, references))))
    # print 'done ...'
    # print '.\n'.join(imap(str, references))


def no_optimization(instrs):
    return consume(instrs),


zero_level_optimization = no_optimization


def first_level_optimization(instrs):
    return get_rule(first_level_optimization, peek(instrs), hash_funcs=(type,))(instrs)
set_rules(first_level_optimization, ((Allocate, remove_allocation), (Pass, remove_pass)), zero_level_optimization)


def optimize(instrs, level=zero_level_optimization):
    global new_instructions, old_instructions, deleted_instructions
    new_instructions = {}
    old_instructions = defaultdict(list)
    deleted_instructions = []
    return update_instruction_references(chain.from_iterable(imap(level, takewhile(peek, repeat(instrs)))))
Esempio n. 13
0
def _assignment_expr(tokens, symbol_table):
    return symbol_table['__ assignment_expression __'](tokens, symbol_table)


def _initializer(tokens, symbol_table):
    return symbol_table['__ initializer __'](tokens, symbol_table)


def _assignment_expression_or_initializer(tokens, symbol_table):
    return error_if_not_value(tokens, TOKENS.EQUAL) and rules(
        _assignment_expression_or_initializer)[peek_or_terminal(tokens)](
            tokens, symbol_table)


set_rules(_assignment_expression_or_initializer,
          ((TOKENS.LEFT_BRACE, _initializer), ), _assignment_expr)


def _expr_or_designated_expr(tokens, symbol_table):
    return rules(_expr_or_designated_expr)[peek_or_terminal(tokens)](
        tokens, symbol_table)


set_rules(_expr_or_designated_expr,
          ((TOKENS.DOT, identifier_designated_expr),
           (TOKENS.LEFT_BRACKET, offset_designated_expr),
           (TOKENS.EQUAL, _assignment_expression_or_initializer)))


def designated_expression(
        tokens, symbol_table
Esempio n. 14
0

from utils.errors import error_if_not_value, error_if_not_type, raise_error


def _assignment_expression(tokens, symbol_table):
    return symbol_table['__ assignment_expression __'](tokens, symbol_table)


def _initializer_expression(tokens, symbol_table):
    return symbol_table['__ initializer __'](tokens, symbol_table)


def initializer_or_assignment_expression(tokens, symbol_table):
    return rules(initializer_or_assignment_expression)[peek(tokens)](tokens, symbol_table)
set_rules(initializer_or_assignment_expression, ((TOKENS.LEFT_BRACE, _initializer_expression),), _assignment_expression)


def init_declarator(tokens, symbol_table, base_type=CType(''), storage_class=None):
    # : declarator ('=' assignment_expression or initializer)?
    decl = set_core_type(symbol_table['__ declarator __'](tokens, symbol_table), base_type)
    if peek_or_terminal(tokens) == TOKENS.EQUAL and consume(tokens):
        decl = Definition(name(decl), c_type(decl), EmptyExpression(c_type(decl)), loc(decl), storage_class)
        symbol_table[name(decl)] = decl  # we have to add it to the symbol table for things like `int a = a;`
        expr = initializer_or_assignment_expression(tokens, symbol_table)
        # if declaration is an array type and the expression is of string_type then convert to initializer for parsing
        if isinstance(c_type(decl), ArrayType) and isinstance(c_type(expr), StringType):
            expr = Initializer(
                enumerate(exp(expr)), ArrayType(c_type(c_type(expr)), len(c_type(expr)), loc(expr)), loc(expr)
            )
        decl.initialization = parse_initializer(expr, decl) if isinstance(expr, Initializer) else expr
Esempio n. 15
0
            if current_value == TOKENS.STAR and peek_or_terminal(char_stream) == TOKENS.FORWARD_SLASH:
                yield consume(char_stream)
                break
    _comment = ''.join(_values(char_stream))
    if _comment.endswith(TOKENS.END_OF_MULTI_LINE_COMMENT):
        return MULTI_LINE_COMMENT(''.join(_comment), location)
    raise_error('{l} Could no locate end of multi-line comment.'.format(l=location))


def comment(char_stream, location):  # all comments start with a FORWARD_SLASH ...
    _symbol = symbol(char_stream, location)
    return rules(comment).get(_symbol, lambda _, __, s=_symbol: s)(char_stream, location)
set_rules(
    comment,
    (
        (TOKENS.START_OF_COMMENT, single_line_comment),
        (TOKENS.START_OF_MULTI_LINE_COMMENT, multi_line_comment)
    )
)


def is_value_adjacent(values, current_location, value):
    next_value = peek_or_terminal(values)
    next_location = loc(next_value, EOFLocation)
    return not any(
        starmap(
            cmp,
            izip(
                (value,      current_location.line_number, current_location.column_number + len(value)),
                (next_value, next_location.line_number,     next_location.column_number)
            )
Esempio n. 16
0
def INCLUDE(token_seq, macros):
    line = get_line(token_seq)
    file_path = consume(line) and get_rule(
        INCLUDE, peek_or_terminal(line), hash_funcs=(type, identity))(line,
                                                                      macros)
    search_paths = (os.getcwd(), )
    _ = error_if_not_empty(line)
    return chain(
        macros['__ preprocess __'](tokenize(
            load(file_path, chain(macros['__ include_dirs __'],
                                  search_paths))), macros), )


set_rules(INCLUDE, ((STRING, string_file_path),
                    izip((IDENTIFIER, KEYWORD), repeat(identifier_file_path)),
                    (TOKENS.LESS_THAN, standard_lib_file_path)))


def _func_macro_arguments(line):
    symbol_table = SymbolTable()
    while peek(line, TOKENS.RIGHT_PARENTHESIS) != TOKENS.RIGHT_PARENTHESIS:
        if peek(line) == TOKENS.ELLIPSIS:
            arg = FunctionMacroVariadicArgument(
                IDENTIFIER('__VA_ARGS__', loc(consume(line))))
        else:
            arg = FunctionMacroArgument(
                error_if_not_type(consume(line, EOFLocation),
                                  (IDENTIFIER, KEYWORD)))
            if peek_or_terminal(line) == TOKENS.ELLIPSIS:
                arg = FunctionMacroVariadicArgument(
Esempio n. 17
0
        ret_exp = symbol_table['__ expression __'](tokens, symbol_table)

    if not isinstance(ret_exp, EmptyExpression) and isinstance(ret_type, VoidType):
        raise ValueError('{l} void-function returning a value ...'.format(l=loc(ret_exp)))

    if not safe_type_coercion(c_type(ret_exp), ret_type):
        raise ValueError('{l} Unable to coerce from {f} to {t}'.format(l=loc(ret_exp), f=c_type(ret_exp), t=ret_type))

    return ReturnStatement(ret_exp, location)


def _goto(tokens, symbol_table):
    location = loc(consume(tokens))
    return GotoStatement(error_if_not_type(consume(tokens, EOFLocation), IDENTIFIER), location)


def jump_statement(tokens, symbol_table):
    """
        : 'goto' IDENTIFIER ';'
        | 'continue' ';'
        | 'break' ';'
        | 'return' ';'
        | 'return' expression ';'
    """
    stmnt = rules(jump_statement)[peek(tokens)](tokens, symbol_table)
    _ = error_if_not_value(tokens, TOKENS.SEMICOLON)
    yield stmnt
set_rules(
    jump_statement,
    ((TOKENS.GOTO, _goto), (TOKENS.CONTINUE, _continue), (TOKENS.BREAK, _break), (TOKENS.RETURN, _return)),
)
Esempio n. 18
0

def tilde_operator(expr, symbol_table):
    return get_not_bitwise(size_arrays_as_pointers(c_type(expr)))(
        symbol_table['__ expression __'](exp(expr), symbol_table),
        loc(oper(expr)))


def unary_operator(expr, symbol_table):
    return rules(unary_operator)[oper(expr)](expr, symbol_table)


set_rules(unary_operator, (
    (TOKENS.AMPERSAND, address_of),
    (TOKENS.STAR, dereference),
    (TOKENS.PLUS, lambda *args: numeric_operator(1, *args)),
    (TOKENS.MINUS, lambda *args: numeric_operator(-1, *args)),
    (TOKENS.EXCLAMATION, exclamation_operator),
    (TOKENS.TILDE, tilde_operator),
))


def unary_expression(expr, symbol_table):
    return rules(unary_expression)[type(expr)](expr, symbol_table)


set_rules(unary_expression, (
    (SizeOfExpression, size_of),
    (PrefixIncrementExpression, lambda *args: inc_dec(1, *args)),
    (PrefixDecrementExpression, lambda *args: inc_dec(-1, *args)),
    (AddressOfExpression, unary_operator),
    (UnaryExpression, unary_operator),
Esempio n. 19
0
def _assignment_expression(tokens, symbol_table):
    return symbol_table['__ assignment_expression __'](tokens, symbol_table)


def _initializer_expression(tokens, symbol_table):
    return symbol_table['__ initializer __'](tokens, symbol_table)


def initializer_or_assignment_expression(tokens, symbol_table):
    return rules(initializer_or_assignment_expression)[peek(tokens)](
        tokens, symbol_table)


set_rules(initializer_or_assignment_expression,
          ((TOKENS.LEFT_BRACE, _initializer_expression), ),
          _assignment_expression)


def init_declarator(tokens,
                    symbol_table,
                    base_type=CType(''),
                    storage_class=None):
    # : declarator ('=' assignment_expression or initializer)?
    decl = set_core_type(
        symbol_table['__ declarator __'](tokens, symbol_table), base_type)
    if peek_or_terminal(tokens) == TOKENS.EQUAL and consume(tokens):
        decl = Definition(name(decl), c_type(decl),
                          EmptyExpression(c_type(decl)), loc(decl),
                          storage_class)
        symbol_table[name(
Esempio n. 20
0
    token = consume(tokens)
    return ConstantExpression(ord(token), CharType(loc(token)), loc(token))


def get_type(suffix_str, location):
    _type, _eval = IntegerType(location, unsigned=unsigned_suffix in suffix_str), int
    _type, _eval = (long_suffix in suffix_str and LongType(_type, loc(_type), unsigned(_type))) or _type, long
    _type, _eval = (long_long_suffix in suffix_str and LongType(_type, loc(_type), unsigned(_type))) or _type, long
    return _type, _eval


def integer_literal(tokens):
    token = consume(tokens)
    _type, _eval = get_type(suffix(token).lower(), loc(token))
    return ConstantExpression(_eval(token, rules(integer_literal)[type(token)]), _type, loc(token))
set_rules(integer_literal, ((INTEGER, 10), (HEXADECIMAL, 16), (OCTAL, 8)))


def float_literal(tokens):
    token = consume(tokens)
    return ConstantExpression(float(token), DoubleType(loc(token)), loc(token))


def array_identifier(tokens, symbol_table):
    name = consume(tokens)
    return IdentifierExpression(name, c_type(symbol_table[name])(loc(name)), loc(name))


def function_identifier(tokens, symbol_table):  # function identifiers expr return address to function type ...
    name = consume(tokens)
    return IdentifierExpression(name, AddressType(c_type(symbol_table[name]), loc(name)), loc(name))
Esempio n. 21
0
    _comment = ''.join(_values(char_stream))
    if _comment.endswith(TOKENS.END_OF_MULTI_LINE_COMMENT):
        return MULTI_LINE_COMMENT(''.join(_comment), location)
    raise_error(
        '{l} Could no locate end of multi-line comment.'.format(l=location))


def comment(char_stream,
            location):  # all comments start with a FORWARD_SLASH ...
    _symbol = symbol(char_stream, location)
    return rules(comment).get(_symbol, lambda _, __, s=_symbol: s)(char_stream,
                                                                   location)


set_rules(comment, ((TOKENS.START_OF_COMMENT, single_line_comment),
                    (TOKENS.START_OF_MULTI_LINE_COMMENT, multi_line_comment)))


def is_value_adjacent(values, current_location, value):
    next_value = peek_or_terminal(values)
    next_location = loc(next_value, EOFLocation)
    return not any(
        starmap(
            cmp,
            izip((value, current_location.line_number,
                  current_location.column_number + len(value)),
                 (next_value, next_location.line_number,
                  next_location.column_number))))


def symbol(char_stream, location):
Esempio n. 22
0
from front_end.parser.expressions.reduce import reduce_expression
from front_end.parser.expressions.cast import type_name_or_unary_expression

from utils.errors import error_if_not_type, error_if_empty


def increment_decrement(tokens, symbol_table):
    operator, unary_exp = consume(
        tokens), symbol_table['__ unary_expression __'](tokens, symbol_table)
    return rules(increment_decrement)[operator](unary_exp, c_type(unary_exp)(
        loc(operator)), loc(operator))


set_rules(increment_decrement,
          ((TOKENS.PLUS_PLUS, PrefixIncrementExpression),
           (TOKENS.MINUS_MINUS, PrefixDecrementExpression)))


def size_of(tokens,
            symbol_table):  # 'sizeof' unary_expression | '(' type_name ')'
    location = loc(consume(tokens))
    return SizeOfExpression(
        type_name_or_unary_expression(tokens, symbol_table), location)


def address_of(cast_exp, operator):
    return AddressOfExpression(
        cast_exp, PointerType(c_type(cast_exp)(loc(operator)), loc(cast_exp)),
        loc(operator))
Esempio n. 23
0
    if token in {TOKENS.STAR, TOKENS.LEFT_PARENTHESIS, TOKENS.LEFT_BRACKET
                 } or isinstance(token, IDENTIFIER):
        c_decl = abstract_declarator(tokens, symbol_table)
        set_core_type(c_decl, base_type)

    return c_decl


def parameter_declaration(tokens, symbol_table):
    # : specifier_qualifier_list (declarator | abstract_declarator) or `...`
    return rules(parameter_declaration)[peek_or_terminal(tokens)](tokens,
                                                                  symbol_table)


set_rules(parameter_declaration,
          ((TOKENS.ELLIPSIS, ellipsis_parameter_declaration), ),
          default_parameter_declaration)


def parameter_type_list(
        tokens,
        symbol_table):  # : parameter_declaration (',' parameter_declaration)*
    return chain((parameter_declaration(tokens, symbol_table), ),
                 imap(
                     parameter_declaration,
                     takewhile(
                         lambda tokens: peek(tokens) == TOKENS.COMMA
                         and consume(tokens), repeat(tokens)),
                     repeat(symbol_table)))

Esempio n. 24
0
def const_struct_expr(expr):
    return chain.from_iterable(imap(constant_expression, reverse(flatten(exp(expr)))))


def const_union_expr(expr):
    assert len(exp(expr)) <= 1
    return chain(                   # allocate rest ...
        allocate(size(c_type(expr)) - sum(imap(size, imap(c_type, exp(expr)))), loc(expr)),
        const_struct_expr(exp(expr))
    )


def constant_expression(expr, *_):
    return rules(constant_expression)[type(c_type(expr))](expr)
set_rules(
    constant_expression,
    chain(
        izip(integral_types, repeat(const_integral_expr)),
        (
            (FloatType, const_float_expr),
            (DoubleType, const_double_expr),

            (UnionType, const_union_expr),
            (StructType, const_struct_expr),
            (ArrayType, const_struct_expr),

            (StringType, const_string_expr),
        )
    )
)
Esempio n. 25
0
            chain(
                expression(e, symbol_table),
                allocate(-size_arrays_as_pointers(c_type(e), overrides={VoidType: 0}), loc(e))
            ) for e in exp(expr)[:-1]
        ),
        expression(exp(expr)[-1], symbol_table)
    )


# Entry point to all expression or expression statements
def expression(expr, symbol_table):
    return rules(expression)[type(expr)](expr, symbol_table)
expression_funcs = unary_expression, postfix_expression, ternary_expression, initializer_expression
set_rules(
    expression,
    chain(
        (
            (EmptyExpression, lambda expr, *_: allocate(size(c_type(expr), overrides={VoidType: 0}), loc(expr))),
            (ConstantExpression, constant_expression),
            (CastExpression, cast_expression),
            (IdentifierExpression, identifier_expression),
            (CommaExpression, comma_expression),

            (BinaryExpression, binary_expression),
            (AssignmentExpression, binary_expression),
            (CompoundAssignmentExpression, binary_expression),
        ),
        chain.from_iterable(imap(izip, imap(rules, expression_funcs), imap(repeat, expression_funcs)))
    )
)
Esempio n. 26
0
def tilde_operator(expr, symbol_table):
    return get_not_bitwise(size_arrays_as_pointers(c_type(expr)))(
        symbol_table['__ expression __'](exp(expr), symbol_table), loc(oper(expr))
    )


def unary_operator(expr, symbol_table):
    return rules(unary_operator)[oper(expr)](expr, symbol_table)
set_rules(
    unary_operator,
    (
        (TOKENS.AMPERSAND, address_of),
        (TOKENS.STAR, dereference),

        (TOKENS.PLUS, lambda *args: numeric_operator(1, *args)),
        (TOKENS.MINUS, lambda *args: numeric_operator(-1, *args)),
        (TOKENS.EXCLAMATION, exclamation_operator),

        (TOKENS.TILDE, tilde_operator),
    )
)


def unary_expression(expr, symbol_table):
    return rules(unary_expression)[type(expr)](expr, symbol_table)
set_rules(
    unary_expression,
    (
        (SizeOfExpression, size_of),
        (PrefixIncrementExpression, lambda *args: inc_dec(1, *args)),
Esempio n. 27
0
                allocation_table.append(
                    chain(
                        (start,),
                        update_stack(stmnt.stack.stack_pointer, instr.case.stack.stack_pointer, loc(instr)),
                        relative_jump(Offset(instr, loc(instr)), loc(instr)),
                    )
                )

                cases[error_if_not_type(exp(exp(instr.case)), (int, long, str))] = Offset(start, loc(instr))
                del instr.case
            switch_body_instrs.append(instr)

        max_switch_value = 2**(8*size_arrays_as_pointers(c_type(exp(stmnt)))) - 1
        for instr in jump_table(loc(stmnt), cases, allocation_table, max_switch_value, switch_body_instrs):
            yield instr
        _ = pop(symbol_table)

    return chain(
        symbol_table['__ expression __'](exp(stmnt), symbol_table), body(stmnt, symbol_table, end_switch), (end_switch,)
    )


def selection_statement(stmnt, symbol_table):
    return rules(selection_statement)[type(stmnt)](stmnt, symbol_table)
set_rules(
    selection_statement,
    (
        (IfStatement, if_statement), (SwitchStatement, switch_statement),
        (CaseStatement, case_statement), (DefaultStatement, case_statement),
    )
)
Esempio n. 28
0
    yield consume(token_seq)


def get_block(token_seq, terminating_with={TOKENS.PENDIF}):
    return chain.from_iterable(
        imap(
            apply,
            imap(
                rules(get_block).__getitem__,
                takewhile(lambda token: token not in terminating_with,
                          imap(peek, repeat(token_seq)))), repeat(
                              (token_seq, ))))


set_rules(
    get_block,
    izip((TOKENS.PIF, TOKENS.PIFDEF, TOKENS.PIFNDEF), repeat(nested_block)),
    non_nested_block)


def expand(arguments, macros):
    for arg in imap(consume, repeat(arguments)):
        if isinstance(arg, (IDENTIFIER, KEYWORD)) or arg in macros:
            for token in macros.get(arg, INTEGER(digit(0), loc(arg)),
                                    arguments):
                yield token
        else:
            yield arg


def evaluate_expression(arguments, macros):
    return error_if_not_type(
Esempio n. 29
0
    _ = error_if_not_type(c_type(primary_exp), PointerType), \
        error_if_not_type(c_type(c_type(primary_exp)), (StructType, UnionType))
    member_name = error_if_not_type(consume(tokens, EOFLocation), IDENTIFIER)
    return ElementSelectionThroughPointerExpression(
        primary_exp, member_name,
        c_type(member(c_type(c_type(primary_exp)), member_name))(l), l)


def inc_dec(tokens, symbol_table,
            primary_exp):  # Postfix unary operators ++, --
    token = consume(tokens)
    return rules(inc_dec)[token](primary_exp, c_type(primary_exp)(loc(token)),
                                 loc(token))


set_rules(inc_dec, ((TOKENS.PLUS_PLUS, PostfixIncrementExpression),
                    (TOKENS.MINUS_MINUS, PostfixDecrementExpression)))


def postfix_expression(tokens, symbol_table):
    """
    : primary_expression
    (       '[' expression ']'
            |   '(' ')'
            |   '(' argument_expression_list ')'
            |   '.' IDENTIFIER
            |   '->' IDENTIFIER
            |   '++'
            |   '--'        )*
    """
    type_name, expression, initializer, primary_expression = imap(
        symbol_table.__getitem__,
Esempio n. 30
0
                                                  references))))
    # print 'done ...'
    # print '.\n'.join(imap(str, references))


def no_optimization(instrs):
    return consume(instrs),


zero_level_optimization = no_optimization


def first_level_optimization(instrs):
    return get_rule(first_level_optimization,
                    peek(instrs),
                    hash_funcs=(type, ))(instrs)


set_rules(first_level_optimization,
          ((Allocate, remove_allocation), (Pass, remove_pass)),
          zero_level_optimization)


def optimize(instrs, level=zero_level_optimization):
    global new_instructions, old_instructions, deleted_instructions
    new_instructions = {}
    old_instructions = defaultdict(list)
    deleted_instructions = []
    return update_instruction_references(
        chain.from_iterable(imap(level, takewhile(peek, repeat(instrs)))))
Esempio n. 31
0
def do_while_stmnt(tokens, symbol_table):
    location = loc(error_if_not_value(tokens, TOKENS.DO))

    def exp(tokens, symbol_table):
        expression = symbol_table['__ expression __']
        _, _ = error_if_not_value(tokens, TOKENS.WHILE), error_if_not_value(tokens, TOKENS.LEFT_PARENTHESIS)
        expr = expression(tokens, symbol_table)
        _, _ = error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS), error_if_not_value(tokens, TOKENS.SEMICOLON)
        yield expr

    yield DoWhileStatement(exp(tokens, symbol_table), symbol_table['__ statement __'](tokens, symbol_table), location)


def while_stmnt(tokens, symbol_table):
    location = loc(error_if_not_value(tokens, TOKENS.WHILE))
    _ = error_if_not_value(tokens, TOKENS.LEFT_PARENTHESIS)
    exp = symbol_table['__ expression __'](tokens, symbol_table)
    _ = error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS)

    yield WhileStatement(exp, symbol_table['__ statement __'](tokens, symbol_table), location)


def iteration_statement(tokens, symbol_table):
    """
        : 'while' '(' expression ')' statement
        | 'do' statement 'while' '(' expression ')' ';'
        | 'for' '(' expression?; expression?; expression? ')' statement
    """
    return rules(iteration_statement)[peek(tokens, '')](tokens, symbol_table)
set_rules(iteration_statement, ((TOKENS.WHILE, while_stmnt), (TOKENS.DO, do_while_stmnt), (TOKENS.FOR, for_stmnt)))
Esempio n. 32
0
        label_name = consume(tokens)
        if peek_or_terminal(tokens) == TOKENS.COLON:
            return symbol_table['__ labeled_statement __'](chain(
                (label_name, ), consume_all(tokens)), symbol_table)
            # return label_stmnt(label_name, statement(tokens, symbol_table))
        # it must be an expression, TODO: figure out a way without using dangerous chain!
        # tokens = chain((label_name, consume(tokens)), tokens)
        tokens = chain((label_name, ), consume_all(tokens))
        expr, _ = symbol_table['__ expression __'](
            tokens, symbol_table), error_if_not_value(tokens, TOKENS.SEMICOLON)
        return repeat(expr, 1)

    if peek_or_terminal(tokens) is not terminal:
        expr, _ = symbol_table['__ expression __'](
            tokens, symbol_table), error_if_not_value(tokens, TOKENS.SEMICOLON)
        return repeat(expr, 1)

    raise ValueError(
        '{l} No rule could be found to create statement, got {got}'.format(
            l=loc(peek(tokens, EOFLocation)), got=peek(tokens, '')))


statement_funcs = labeled_statement, selection_statement, iteration_statement, jump_statement
set_rules(
    statement,
    chain(
        chain.from_iterable(
            imap(izip, imap(rules, statement_funcs),
                 imap(repeat, statement_funcs))),
        ((TOKENS.LEFT_BRACE, _comp_stmnt),
         (TOKENS.SEMICOLON, _empty_statement))))
Esempio n. 33
0
from front_end.parser.types import void_pointer_type
from front_end.parser.ast.expressions import SizeOfExpression, UnaryExpression, DereferenceExpression
from front_end.parser.ast.expressions import PrefixIncrementExpression, PrefixDecrementExpression, AddressOfExpression
from front_end.parser.ast.expressions import AddressOfLabelExpression

from front_end.parser.expressions.reduce import reduce_expression
from front_end.parser.expressions.cast import type_name_or_unary_expression

from utils.errors import error_if_not_type, error_if_empty


def increment_decrement(tokens, symbol_table):
    operator, unary_exp = consume(tokens), symbol_table['__ unary_expression __'](tokens, symbol_table)
    return rules(increment_decrement)[operator](unary_exp, c_type(unary_exp)(loc(operator)), loc(operator))
set_rules(
    increment_decrement,
    ((TOKENS.PLUS_PLUS, PrefixIncrementExpression), (TOKENS.MINUS_MINUS, PrefixDecrementExpression))
)


def size_of(tokens, symbol_table):  # 'sizeof' unary_expression | '(' type_name ')'
    location = loc(consume(tokens))
    return SizeOfExpression(type_name_or_unary_expression(tokens, symbol_table), location)


def address_of(cast_exp, operator):
    return AddressOfExpression(cast_exp, PointerType(c_type(cast_exp)(loc(operator)), loc(cast_exp)), loc(operator))


def dereference(cast_exp, operator):
    _ = error_if_not_type(c_type(cast_exp), PointerType)
    return DereferenceExpression(cast_exp, c_type(c_type(cast_exp))(loc(operator)), loc(operator))
Esempio n. 34
0
def non_nested_block(token_seq):
    yield consume(token_seq)


def get_block(token_seq, terminating_with={TOKENS.PENDIF}):
    return chain.from_iterable(
        imap(
            apply,
            imap(
                rules(get_block).__getitem__,
                takewhile(lambda token: token not in terminating_with, imap(peek, repeat(token_seq)))
            ),
            repeat((token_seq,))
        )
    )
set_rules(get_block, izip((TOKENS.PIF, TOKENS.PIFDEF, TOKENS.PIFNDEF), repeat(nested_block)), non_nested_block)


def expand(arguments, macros):
    for arg in imap(consume, repeat(arguments)):
        if isinstance(arg, (IDENTIFIER, KEYWORD)) or arg in macros:
            for token in macros.get(arg, INTEGER(digit(0), loc(arg)), arguments):
                yield token
        else:
            yield arg


def evaluate_expression(arguments, macros):
    return error_if_not_type(exp(constant_expression(expand(arguments, macros))), (long, int, float))

Esempio n. 35
0
    _type, _eval = (long_suffix in suffix_str and LongType(
        _type, loc(_type), unsigned(_type))) or _type, long
    _type, _eval = (long_long_suffix in suffix_str and LongType(
        _type, loc(_type), unsigned(_type))) or _type, long
    return _type, _eval


def integer_literal(tokens):
    token = consume(tokens)
    _type, _eval = get_type(suffix(token).lower(), loc(token))
    return ConstantExpression(
        _eval(token,
              rules(integer_literal)[type(token)]), _type, loc(token))


set_rules(integer_literal, ((INTEGER, 10), (HEXADECIMAL, 16), (OCTAL, 8)))


def float_literal(tokens):
    token = consume(tokens)
    return ConstantExpression(float(token), DoubleType(loc(token)), loc(token))


def array_identifier(tokens, symbol_table):
    name = consume(tokens)
    return IdentifierExpression(name,
                                c_type(symbol_table[name])(loc(name)),
                                loc(name))


def function_identifier(
Esempio n. 36
0

def arrow_operator(tokens, symbol_table, primary_exp):
    l = loc(consume(tokens))
    _ = error_if_not_type(c_type(primary_exp), PointerType), \
        error_if_not_type(c_type(c_type(primary_exp)), (StructType, UnionType))
    member_name = error_if_not_type(consume(tokens, EOFLocation), IDENTIFIER)
    return ElementSelectionThroughPointerExpression(
        primary_exp, member_name, c_type(member(c_type(c_type(primary_exp)), member_name))(l), l
    )


def inc_dec(tokens, symbol_table, primary_exp):  # Postfix unary operators ++, --
    token = consume(tokens)
    return rules(inc_dec)[token](primary_exp, c_type(primary_exp)(loc(token)), loc(token))
set_rules(inc_dec, ((TOKENS.PLUS_PLUS, PostfixIncrementExpression), (TOKENS.MINUS_MINUS, PostfixDecrementExpression)))


def postfix_expression(tokens, symbol_table):
    """
    : primary_expression
    (       '[' expression ']'
            |   '(' ')'
            |   '(' argument_expression_list ')'
            |   '.' IDENTIFIER
            |   '->' IDENTIFIER
            |   '++'
            |   '--'        )*
    """
    type_name, expression, initializer, primary_expression = imap(
        symbol_table.__getitem__,
Esempio n. 37
0
def INCLUDE(token_seq, macros):
    line = get_line(token_seq)
    file_path = consume(line) and get_rule(INCLUDE, peek_or_terminal(line), hash_funcs=(type, identity))(line, macros)
    search_paths = (os.getcwd(),)
    _ = error_if_not_empty(line)
    return chain(
        macros['__ preprocess __'](
            tokenize(load(file_path, chain(macros['__ include_dirs __'], search_paths))),
            macros
        ),
    )
set_rules(
    INCLUDE,
    (
        (STRING, string_file_path),
        izip((IDENTIFIER, KEYWORD), repeat(identifier_file_path)),
        (TOKENS.LESS_THAN, standard_lib_file_path)
    )
)


def _func_macro_arguments(line):
    symbol_table = SymbolTable()
    while peek(line, TOKENS.RIGHT_PARENTHESIS) != TOKENS.RIGHT_PARENTHESIS:
        if peek(line) == TOKENS.ELLIPSIS:
            arg = FunctionMacroVariadicArgument(IDENTIFIER('__VA_ARGS__', loc(consume(line))))
        else:
            arg = FunctionMacroArgument(error_if_not_type(consume(line, EOFLocation), (IDENTIFIER, KEYWORD)))
            if peek_or_terminal(line) == TOKENS.ELLIPSIS:
                arg = FunctionMacroVariadicArgument(IDENTIFIER(arg, loc(consume(line))))
        symbol_table[arg] = arg     # check for duplicate argument name
Esempio n. 38
0
from back_end.emitter.cpu import float_names, word_names, signed_word_names, word_type_sizes

from back_end.emitter.cpu import word_size, half_word_size, quarter_word_size, one_eighth_word_size
from back_end.emitter.cpu import word_type, half_word_type, quarter_word_type, one_eighth_word_type
from back_end.emitter.cpu import float_size, half_float_size, pack_binaries


def numeric_type_size(ctype):
    return rules(numeric_type_size)[type(ctype)]
set_rules(
    numeric_type_size,
    chain(
        izip((LongType, PointerType, AddressType, DoubleType), repeat(word_size)),
        izip((IntegerType, EnumType, FloatType), repeat(half_word_size)),
        (
            (CharType, one_eighth_word_size),
            (ShortType, quarter_word_size),
        )
    )
)
# numeric_type_size.rules = {
#     CharType: word_size,  # note we have to update vm system calls when working with char_arrays ...
#     ShortType: word_size,
#
#     IntegerType: word_size,
#     EnumType: word_size,
#
#     LongType: word_size,
#     PointerType: word_size,
#
Esempio n. 39
0
from back_end.emitter.expressions.cast import cast


def inc_dec(expr, symbol_table):
    assert not isinstance(c_type(expr), ArrayType) and isinstance(c_type(expr), IntegralType)
    value = rules(inc_dec)[type(expr)]
    if isinstance(c_type(expr), PointerType):
        value *= size_extended(c_type(c_type(expr)))

    return get_postfix_update(size(c_type(expr)))(
        all_but_last(symbol_table['__ expression __'](exp(expr), symbol_table), Loads, loc(expr)),
        value,
        loc(expr)
    )
set_rules(inc_dec, ((PostfixIncrementExpression, 1), (PostfixDecrementExpression, -1)))


def func_type(expr):
    if isinstance(c_type(expr), FunctionType):
        return c_type(expr)
    elif isinstance(c_type(expr), PointerType) and isinstance(c_type(c_type(expr)), FunctionType):
        return c_type(c_type(expr))
    else:
        raise ValueError('{l} Expected FunctionType or Pointer to FunctionType got {g}'.format(
            l=loc(expr), g=c_type(expr)
        ))


def call_function(function_call_expr, symbol_table):
    l, expr = loc(function_call_expr), left_exp(function_call_expr)
Esempio n. 40
0
                                        loc(expr))


def const_struct_expr(expr):
    return chain.from_iterable(
        imap(constant_expression, reverse(flatten(exp(expr)))))


def const_union_expr(expr):
    assert len(exp(expr)) <= 1
    return chain(  # allocate rest ...
        allocate(
            size(c_type(expr)) - sum(imap(size, imap(c_type, exp(expr)))),
            loc(expr)), const_struct_expr(exp(expr)))


def constant_expression(expr, *_):
    return rules(constant_expression)[type(c_type(expr))](expr)


set_rules(
    constant_expression,
    chain(izip(integral_types, repeat(const_integral_expr)), (
        (FloatType, const_float_expr),
        (DoubleType, const_double_expr),
        (UnionType, const_union_expr),
        (StructType, const_struct_expr),
        (ArrayType, const_struct_expr),
        (StringType, const_string_expr),
    )))
Esempio n. 41
0
                                     loc(instr)),
                        relative_jump(Offset(instr, loc(instr)), loc(instr)),
                    ))

                cases[error_if_not_type(exp(exp(instr.case)),
                                        (int, long, str))] = Offset(
                                            start, loc(instr))
                del instr.case
            switch_body_instrs.append(instr)

        max_switch_value = 2**(8 *
                               size_arrays_as_pointers(c_type(exp(stmnt)))) - 1
        for instr in jump_table(loc(stmnt), cases, allocation_table,
                                max_switch_value, switch_body_instrs):
            yield instr
        _ = pop(symbol_table)

    return chain(symbol_table['__ expression __'](exp(stmnt), symbol_table),
                 body(stmnt, symbol_table, end_switch), (end_switch, ))


def selection_statement(stmnt, symbol_table):
    return rules(selection_statement)[type(stmnt)](stmnt, symbol_table)


set_rules(selection_statement, (
    (IfStatement, if_statement),
    (SwitchStatement, switch_statement),
    (CaseStatement, case_statement),
    (DefaultStatement, case_statement),
))
Esempio n. 42
0
from back_end.emitter.expressions.cast import cast


def inc_dec(expr, symbol_table):
    assert not isinstance(c_type(expr), ArrayType) and isinstance(
        c_type(expr), IntegralType)
    value = rules(inc_dec)[type(expr)]
    if isinstance(c_type(expr), PointerType):
        value *= size_extended(c_type(c_type(expr)))

    return get_postfix_update(size(c_type(expr)))(all_but_last(
        symbol_table['__ expression __'](exp(expr), symbol_table), Loads,
        loc(expr)), value, loc(expr))


set_rules(inc_dec,
          ((PostfixIncrementExpression, 1), (PostfixDecrementExpression, -1)))


def func_type(expr):
    if isinstance(c_type(expr), FunctionType):
        return c_type(expr)
    elif isinstance(c_type(expr), PointerType) and isinstance(
            c_type(c_type(expr)), FunctionType):
        return c_type(c_type(expr))
    else:
        raise ValueError(
            '{l} Expected FunctionType or Pointer to FunctionType got {g}'.
            format(l=loc(expr), g=c_type(expr)))


def call_function(function_call_expr, symbol_table):
Esempio n. 43
0
set_rules(
    type_specifier, {
        TOKENS.VOID:
        lambda tokens, symbol_table: VoidType(loc(consume(tokens))),
        TOKENS.CHAR:
        lambda tokens, symbol_table: CharType(loc(consume(tokens))),
        TOKENS.INT:
        lambda tokens, symbol_table: IntegerType(loc(consume(tokens))),
        TOKENS.FLOAT:
        lambda tokens, symbol_table: FloatType(loc(consume(tokens))),
        TOKENS.DOUBLE:
        lambda tokens, symbol_table: DoubleType(loc(consume(tokens))),
        TOKENS.LONG:
        _long,
        TOKENS.SHORT:
        _short,
        TOKENS.SIGNED:
        parse_sign_token,
        TOKENS.UNSIGNED:
        parse_sign_token,
        TOKENS.STRUCT:
        composite_specifier,
        TOKENS.UNION:
        lambda tokens, symbol_table: composite_specifier(
            tokens, symbol_table, obj_type=UnionType),
        TOKENS.ENUM:
        lambda tokens, symbol_table: composite_specifier(tokens,
                                                         symbol_table,
                                                         obj_type=EnumType,
                                                         member_parse_func=
                                                         parse_enum_members),
    }, no_type_specifier)
Esempio n. 44
0
def static_def_binaries(definition, default_bins=()):
    instrs = static_binaries(initialization(definition), default_bins)
    # TODO: deal with static pointer initialized by address types ...
    assert isinstance(initialization(definition), (EmptyExpression, Initializer, ConstantExpression))
    return instrs


def static_ctype_binaries(ctype):
    return rules(static_ctype_binaries)[type(ctype)](ctype)
set_rules(
    static_ctype_binaries,
    chain(
        izip(integral_types, repeat(static_integral_ctype)),
        izip(real_types, repeat(static_real_ctype)),
        (
            (ArrayType, static_array_ctype),
            (StructType, static_struct_ctype),
            (UnionType, static_union_ctype),
            (StringType, static_array_ctype)
        )
    )
)


def static_exp_binaries(const_exp):
    return rules(static_exp_binaries)[type(c_type(const_exp))](const_exp)
set_rules(
    static_exp_binaries,
    chain(
        izip(integral_types, repeat(static_integral_exp)), izip(real_types, repeat(static_real_exp)),
        (
Esempio n. 45
0
from itertools import chain

from utils.rules import set_rules
from front_end.loader.locations import loc
from front_end.parser.ast.expressions import TernaryExpression, exp, left_exp, right_exp

from back_end.virtual_machine.instructions.architecture import Pass, Offset, relative_jump, get_jump_false

from back_end.emitter.c_types import c_type, size_arrays_as_pointers


def ternary_expression(expr, symbol_table):
    if_false_instr, end_of_conditional_instr = Pass(loc(expr)), Pass(loc(expr))
    expression = symbol_table['__ expression __']
    return chain(
        get_jump_false(size_arrays_as_pointers(c_type(exp(expr))))(expression(
            exp(expr), symbol_table), Offset(if_false_instr, loc(expr)),
                                                                   loc(expr)),
        expression(left_exp(expr), symbol_table),
        relative_jump(
            Offset(end_of_conditional_instr, loc(end_of_conditional_instr)),
            loc(expr)),
        (if_false_instr, ),
        expression(right_exp(expr), symbol_table),
        (end_of_conditional_instr, ),
    )


set_rules(ternary_expression, {TernaryExpression})
Esempio n. 46
0
                tokens, TOKENS.SEMICOLON)
        yield expr

    yield DoWhileStatement(
        exp(tokens, symbol_table),
        symbol_table['__ statement __'](tokens, symbol_table), location)


def while_stmnt(tokens, symbol_table):
    location = loc(error_if_not_value(tokens, TOKENS.WHILE))
    _ = error_if_not_value(tokens, TOKENS.LEFT_PARENTHESIS)
    exp = symbol_table['__ expression __'](tokens, symbol_table)
    _ = error_if_not_value(tokens, TOKENS.RIGHT_PARENTHESIS)

    yield WhileStatement(exp, symbol_table['__ statement __'](tokens,
                                                              symbol_table),
                         location)


def iteration_statement(tokens, symbol_table):
    """
        : 'while' '(' expression ')' statement
        | 'do' statement 'while' '(' expression ')' ';'
        | 'for' '(' expression?; expression?; expression? ')' statement
    """
    return rules(iteration_statement)[peek(tokens, '')](tokens, symbol_table)


set_rules(iteration_statement,
          ((TOKENS.WHILE, while_stmnt), (TOKENS.DO, do_while_stmnt),
           (TOKENS.FOR, for_stmnt)))
Esempio n. 47
0
    raise ValueError('{l} Expected type_specifier or TYPE_NAME got {got}'.format(
        l=loc(peek(tokens, EOFLocation)), got=peek(tokens, '')
    ))
set_rules(
    type_specifier,
    {
        TOKENS.VOID: lambda tokens, symbol_table: VoidType(loc(consume(tokens))),
        TOKENS.CHAR: lambda tokens, symbol_table: CharType(loc(consume(tokens))),
        TOKENS.INT: lambda tokens, symbol_table: IntegerType(loc(consume(tokens))),
        TOKENS.FLOAT: lambda tokens, symbol_table: FloatType(loc(consume(tokens))),
        TOKENS.DOUBLE: lambda tokens, symbol_table: DoubleType(loc(consume(tokens))),

        TOKENS.LONG: _long,
        TOKENS.SHORT: _short,

        TOKENS.SIGNED: parse_sign_token,
        TOKENS.UNSIGNED: parse_sign_token,

        TOKENS.STRUCT: composite_specifier,
        TOKENS.UNION: lambda tokens, symbol_table: composite_specifier(
            tokens, symbol_table, obj_type=UnionType),
        TOKENS.ENUM: lambda tokens, symbol_table: composite_specifier(
            tokens, symbol_table, obj_type=EnumType, member_parse_func=parse_enum_members
        ),
    },
    no_type_specifier

)


def type_qualifiers(tokens, _, defaults=None):  # : ('const' or volatile or *args)*
Esempio n. 48
0
        (start_of_loop,),
        get_jump_false(size_arrays_as_pointers(c_type(exp(stmnt))))(
            symbol_table['__ expression __'](exp(stmnt), symbol_table), Offset(end_of_loop, loc(end_of_loop)), loc(end_of_loop)
        ),
        loop_body(stmnt.statement, symbol_table, start_of_loop, end_of_loop),
        relative_jump(Offset(start_of_loop, loc(start_of_loop)), loc(end_of_loop)),
        (end_of_loop,)
    )


def do_while_statement(stmnt, symbol_table):
    start_of_loop, end_of_loop = Pass(loc(stmnt)), Pass(loc(stmnt))
    yield start_of_loop  # do while loops contain the update expression after their body ...
    for instr in loop_body(stmnt.statement, symbol_table, start_of_loop, end_of_loop):
        yield instr
    for instr in get_jump_true(size_arrays_as_pointers(c_type(exp(stmnt))))(
            symbol_table['__ expression __'](exp(stmnt), symbol_table),
            Offset(start_of_loop, loc(start_of_loop)),
            loc(start_of_loop)
    ):
        yield instr
    yield end_of_loop


def iteration_statement(stmnt, symbol_table):
    return rules(iteration_statement)[type(stmnt)](stmnt, symbol_table)
set_rules(
    iteration_statement,
    ((ForStatement, for_statement), (WhileStatement, while_statement), (DoWhileStatement, do_while_statement))
)