示例#1
0
def pre_processor(char_stream, location):  # returns pre_processing symbol or #identifier ...
    values = consume(char_stream)
    if peek_or_terminal(char_stream) == TOKENS.NUMBER_SIGN:  # token concatenation symbol ...
        values += consume(char_stream)
    else:
        _ = exhaust(takewhile({' ', '\t', '\a'}.__contains__, char_stream))
        values += ''.join(takewhile(letters.__contains__, char_stream))
    return rules(pre_processor).get(values, IDENTIFIER)(values, location)
示例#2
0
文件: parser.py 项目: mzsk/c_compiler
def pre_processor(
        char_stream,
        location):  # returns pre_processing symbol or #identifier ...
    values = consume(char_stream)
    if peek_or_terminal(
            char_stream
    ) == TOKENS.NUMBER_SIGN:  # token concatenation symbol ...
        values += consume(char_stream)
    else:
        _ = exhaust(takewhile({' ', '\t', '\a'}.__contains__, char_stream))
        values += ''.join(takewhile(letters.__contains__, char_stream))
    return rules(pre_processor).get(values, IDENTIFIER)(values, location)
示例#3
0
def remove_allocation(instrs):
    """
        optimize 1 or more sequence of allocations ...
        take their sum and if zero replace with the next instruction in case this one is referenced.
        other wise do one allocation and remove rest
        replace allocate 1 with POP, which only requires a single address translation vs 2 (instr, oprn) for allocate.
    """
    alloc_instrs = tuple(
        takewhile(
            lambda i: isinstance(i, Allocate) and isinstance(
                opern(i), (int, long)), instrs))

    if not alloc_instrs:  # Operand must be non-primitive type (Address) ... must wait for its value.
        yield consume(instrs)
    else:
        total = sum(imap(opern, alloc_instrs))

        if total:  # non-zero allocates changes the state of the stack.
            if total in pop_instrs:
                new_instr = next(pop_instrs[total](loc(alloc_instrs[0])))
            elif len(alloc_instrs) != 1:
                new_instr = alloc_instrs[0]
            else:
                new_instr = Allocate(loc(alloc_instrs[-1]), total)
            yield replace_instrs(new_instr, alloc_instrs)
        else:  # stack remains unchanged, get next instruction for referencing, it one exists ...
            if peek_or_terminal(instrs) is terminal:
                yield replace_instr(Pass(loc(alloc_instrs[-1])), alloc_instrs)
            else:
                replace_instrs(peek(instrs), alloc_instrs)
示例#4
0
def remove_allocation(instrs):
    """
        optimize 1 or more sequence of allocations ...
        take their sum and if zero replace with the next instruction in case this one is referenced.
        other wise do one allocation and remove rest
        replace allocate 1 with POP, which only requires a single address translation vs 2 (instr, oprn) for allocate.
    """
    alloc_instrs = tuple(takewhile(lambda i: isinstance(i, Allocate) and isinstance(opern(i), (int, long)), instrs))

    if not alloc_instrs:  # Operand must be non-primitive type (Address) ... must wait for its value.
        yield consume(instrs)
    else:
        total = sum(imap(opern, alloc_instrs))

        if total:  # non-zero allocates changes the state of the stack.
            if total in pop_instrs:
                new_instr = next(pop_instrs[total](loc(alloc_instrs[0])))
            elif len(alloc_instrs) != 1:
                new_instr = alloc_instrs[0]
            else:
                new_instr = Allocate(loc(alloc_instrs[-1]), total)
            yield replace_instrs(new_instr, alloc_instrs)
        else:  # stack remains unchanged, get next instruction for referencing, it one exists ...
            if peek_or_terminal(instrs) is terminal:
                yield replace_instr(Pass(loc(alloc_instrs[-1])), alloc_instrs)
            else:
                replace_instrs(peek(instrs), alloc_instrs)
示例#5
0
文件: parser.py 项目: mzsk/c_compiler
def get_line(
    values
):  # get all the tokens on the current line, being that preprocessor works on a line-by-line basis
    return takewhile(
        lambda token, initial_line_number=line_number(peek(
            values)): initial_line_number == line_number(token),
        values) if peek_or_terminal(values) is not terminal else iter(())
示例#6
0
def remove_pass(instrs):
    """ replace 1 or more sequences of Pass by the next non-Pass instruction or Pass instruction  """
    pass_instrs = tuple(takewhile(lambda instr: isinstance(instr, Pass), instrs))
    if peek_or_terminal(instrs) is terminal:
        yield replace_instrs(pass_instrs[-1], pass_instrs[:-1])
    else:
        replace_instrs(peek(instrs), pass_instrs)
示例#7
0
def optimize(instrs, level=zero_level_optimization):
    global new_instructions, old_instructions, deleted_instructions
    new_instructions = {}
    old_instructions = defaultdict(list)
    deleted_instructions = []
    return update_instruction_references(
        chain.from_iterable(imap(level, takewhile(peek, repeat(instrs)))))
示例#8
0
def string_literal(tokens):
    location = loc(peek(tokens))    # join adjacent strings into a single string ...
    token = ''.join(takewhile(lambda t: type(t) is STRING, tokens)) + '\0'
    return ConstantExpression(
        imap(char_literal, imap(iter, token)),
        StringType(len(token), location),
        location
    )
示例#9
0
def type_qualifiers(tokens, _, defaults=None):  # : ('const' or volatile or *args)*
    values = set(takewhile(rules(type_qualifiers).__contains__, tokens))
    const, volatile = imap(values.__contains__, (TOKENS.CONST, TOKENS.VOLATILE))
    if not values and not defaults:
        raise ValueError('{l} Expected TOKENS.CONST or TOKEN.VOLATILE got {g}'.format(
            l=loc(peek(tokens, EOFLocation)), g=peek(tokens, '')
        ))
    return const or defaults[0], volatile or defaults[1]
示例#10
0
文件: parser.py 项目: mzsk/c_compiler
def number(char_stream, hexadecimal_chars={'x', 'X'}):
    initial_char, _digits = '', digits
    if peek_or_terminal(char_stream) == digit(0):
        initial_char = consume(char_stream)
        if peek_or_terminal(char_stream) in hexadecimal_chars:
            initial_char += consume(char_stream)
            _digits = hexadecimal_digits
    return initial_char + ''.join(takewhile(_digits.__contains__, char_stream))
示例#11
0
def remove_pass(instrs):
    """ replace 1 or more sequences of Pass by the next non-Pass instruction or Pass instruction  """
    pass_instrs = tuple(
        takewhile(lambda instr: isinstance(instr, Pass), instrs))
    if peek_or_terminal(instrs) is terminal:
        yield replace_instrs(pass_instrs[-1], pass_instrs[:-1])
    else:
        replace_instrs(peek(instrs), pass_instrs)
示例#12
0
def number(char_stream, hexadecimal_chars={'x', 'X'}):
    initial_char, _digits = '', digits
    if peek_or_terminal(char_stream) == digit(0):
        initial_char = consume(char_stream)
        if peek_or_terminal(char_stream) in hexadecimal_chars:
            initial_char += consume(char_stream)
            _digits = hexadecimal_digits
    return initial_char + ''.join(takewhile(_digits.__contains__, char_stream))
示例#13
0
def exhaust_remaining_blocks(token_seq):
    exhaust(
        imap(
            apply,
            imap(
                rules(exhaust_remaining_blocks).__getitem__,
                takewhile(TOKENS.PENDIF.__ne__, imap(peek,
                                                     repeat(token_seq)))),
            repeat((token_seq, ))))
示例#14
0
文件: load.py 项目: qs9816/c_compiler
def get_repositioned_line(char_seq, location):  # get next line ...
    while not isinstance(peek(char_seq), NewLineStr):
        char = consume(char_seq)
        if char == '\\' and isinstance(peek(char_seq), NewLineStr):
            _ = exhaust(takewhile(lambda token: isinstance(token, NewLineStr), char_seq))
            for char in get_repositioned_line(char_seq, location):
                yield char
        else:
            yield Str(char, location)
示例#15
0
文件: load.py 项目: qs9816/c_compiler
def merge_lines(char_seq):
    while True:
        char = consume(char_seq)
        if char == '\\' and isinstance(peek(char_seq), NewLineStr):  # if current char is \ followed by end of line seq
            _ = exhaust(takewhile(lambda token: isinstance(token, NewLineStr), char_seq))
            for char in get_repositioned_line(char_seq, loc(char)):
                yield char
        else:
            yield char
示例#16
0
def get_block(token_seq, terminating_with={TOKENS.PENDIF}):
    return chain.from_iterable(
        imap(
            apply,
            imap(
                rules(get_block).__getitem__,
                takewhile(lambda token: token not in terminating_with,
                          imap(peek, repeat(token_seq)))), repeat(
                              (token_seq, ))))
示例#17
0
def type_qualifiers(tokens,
                    _,
                    defaults=None):  # : ('const' or volatile or *args)*
    values = set(takewhile(rules(type_qualifiers).__contains__, tokens))
    const, volatile = imap(values.__contains__,
                           (TOKENS.CONST, TOKENS.VOLATILE))
    if not values and not defaults:
        raise ValueError(
            '{l} Expected TOKENS.CONST or TOKEN.VOLATILE got {g}'.format(
                l=loc(peek(tokens, EOFLocation)), g=peek(tokens, '')))
    return const or defaults[0], volatile or defaults[1]
示例#18
0
def exhaust_remaining_blocks(token_seq):
    exhaust(
        imap(
            apply,
            imap(
                rules(exhaust_remaining_blocks).__getitem__,
                takewhile(TOKENS.PENDIF.__ne__, imap(peek, repeat(token_seq)))
            ),
            repeat((token_seq,))
        )
    )
示例#19
0
def get_block(token_seq, terminating_with={TOKENS.PENDIF}):
    return chain.from_iterable(
        imap(
            apply,
            imap(
                rules(get_block).__getitem__,
                takewhile(lambda token: token not in terminating_with, imap(peek, repeat(token_seq)))
            ),
            repeat((token_seq,))
        )
    )
示例#20
0
def get_repositioned_line(char_seq, location):  # get next line ...
    while not isinstance(peek(char_seq), NewLineStr):
        char = consume(char_seq)
        if char == '\\' and isinstance(peek(char_seq), NewLineStr):
            _ = exhaust(
                takewhile(lambda token: isinstance(token, NewLineStr),
                          char_seq))
            for char in get_repositioned_line(char_seq, location):
                yield char
        else:
            yield Str(char, location)
示例#21
0
def merge_lines(char_seq):
    while True:
        char = consume(char_seq)
        if char == '\\' and isinstance(
                peek(char_seq), NewLineStr
        ):  # if current char is \ followed by end of line seq
            _ = exhaust(
                takewhile(lambda token: isinstance(token, NewLineStr),
                          char_seq))
            for char in get_repositioned_line(char_seq, loc(char)):
                yield char
        else:
            yield char
示例#22
0
文件: load.py 项目: qs9816/c_compiler
def get_newline_if_possible(char_seq):
    return ''.join(takewhile(
        lambda c, new_line=iter(linesep), _seq=char_seq: c == next(new_line) and (consume(_seq) or 1),
        imap(peek, repeat(char_seq))
    ))
示例#23
0
def get_line(tokens):
    return iter(()) if peek_or_terminal(tokens) is terminal else takewhile(
        lambda t, current_line_number=line_number(peek(tokens)): line_number(
            t) == current_line_number,
        tokens)
示例#24
0
def keyword_or_identifier(char_stream, location):
    values = ''.join(takewhile(alpha_numeric.__contains__, char_stream))
    return rules(keyword_or_identifier).get(values, IDENTIFIER)(values, location)
示例#25
0
def suffix(char_stream):
    return ''.join(takewhile(letters.__contains__, char_stream))
示例#26
0
文件: parser.py 项目: mzsk/c_compiler
def keyword_or_identifier(char_stream, location):
    values = ''.join(takewhile(alpha_numeric.__contains__, char_stream))
    return rules(keyword_or_identifier).get(values, IDENTIFIER)(values,
                                                                location)
示例#27
0
def get_line(values):  # get all the tokens on the current line, being that preprocessor works on a line-by-line basis
    return takewhile(
        lambda token, initial_line_number=line_number(peek(values)): initial_line_number == line_number(token),
        values
    ) if peek_or_terminal(values) is not terminal else iter(())
示例#28
0
文件: cc.py 项目: qs9816/c_compiler
def format_token_seq(token_seq):
    return chain.from_iterable(imap(format_line, imap(get_line, takewhile(peek, repeat(token_seq)))))
示例#29
0
def standard_lib_file_path(token_seq, _):
    file_path = consume(token_seq) and ''.join(
        takewhile(TOKENS.GREATER_THAN.__ne__, token_seq))
    _ = error_if_not_value(token_seq, TOKENS.GREATER_THAN)
    return file_path
示例#30
0
文件: parser.py 项目: mzsk/c_compiler
def suffix(char_stream):
    return ''.join(takewhile(letters.__contains__, char_stream))
示例#31
0
文件: cc.py 项目: qs9816/c_compiler
def get_line(tokens):
    return iter(()) if peek_or_terminal(tokens) is terminal else takewhile(
        lambda t, current_line_number=line_number(peek(tokens)): line_number(t) == current_line_number,
        tokens
    )
示例#32
0
def standard_lib_file_path(token_seq, _):
    file_path = consume(token_seq) and ''.join(takewhile(TOKENS.GREATER_THAN.__ne__, token_seq))
    _ = error_if_not_value(token_seq, TOKENS.GREATER_THAN)
    return file_path
示例#33
0
def string_literal(tokens):
    location = loc(
        peek(tokens))  # join adjacent strings into a single string ...
    token = ''.join(takewhile(lambda t: type(t) is STRING, tokens)) + '\0'
    return ConstantExpression(imap(char_literal, imap(iter, token)),
                              StringType(len(token), location), location)
示例#34
0
def format_token_seq(token_seq):
    return chain.from_iterable(
        imap(format_line, imap(get_line, takewhile(peek, repeat(token_seq)))))
示例#35
0
def optimize(instrs, level=zero_level_optimization):
    global new_instructions, old_instructions, deleted_instructions
    new_instructions = {}
    old_instructions = defaultdict(list)
    deleted_instructions = []
    return update_instruction_references(chain.from_iterable(imap(level, takewhile(peek, repeat(instrs)))))
示例#36
0
def get_newline_if_possible(char_seq):
    return ''.join(
        takewhile(lambda c, new_line=iter(linesep), _seq=char_seq: c == next(
            new_line) and (consume(_seq) or 1),
                  imap(peek, repeat(char_seq))))