Пример #1
0
def inst_to_signals(instructions):
    const_comb_signals = list()

    # Warning checks, check if SP is written to / initialized before using push or pop
    sp_written = False
    opcodes_ordered = list(opcodes)  # opcodes is ordered dict
    alu_opcodes = opcodes_ordered[opcodes_ordered.index("ALU"):opcodes_ordered.
                                  index("XOR")]

    for inst in instructions:
        assert isinstance(inst, Instruction)
        opcode = inst.opcode.text
        if opcode.upper() not in opcodes:
            show_syntax_error("Unknown opcode {}".format(opcode), inst.opcode)

        if opcode.upper() == "LOAD" or opcode.upper() in alu_opcodes:
            if inst.operands[0].text == "SP":
                sp_written = True
        elif opcode.upper() in ["PUSH", "POP"] and not sp_written:
            show_warning_one_line(
                opcode.upper() +
                " used before stack pointer initialized. Undefined behavior.",
                inst.opcode)
            sp_written = True  # Only show one warning

        control_signals, encoding = opcodes[opcode.upper()]()
        instruction_signals = iterate_operands(inst, control_signals, encoding)

        const_comb_signals.append(instruction_signals)

    return const_comb_signals
Пример #2
0
def immediate_from_operand(operand):
    val = operand.text
    if not int_l.is_number_or_literal(val):
        show_syntax_error(
            "invalid number {}, must be on format [-][0x|0b]nnnn".format(val),
            operand)
    num = int_l.to_number_or_literal(val)
    if not int_l.verify_number_range(num):
        show_syntax_error(
            "Immediate number outside signed 32-bit range. Must be within -2^31..2^31 -1. Was: "
            + num, operand)
    return num
Пример #3
0
def iterate_operands(inst: Instruction, sig_dict, encoding):
    result_signals = dict()
    result_signals.update(sig_dict)
    operands = extract_operands(inst.operands)
    if len(operands) != len(encoding):
        t = inst.opcode
        show_syntax_error(
            "Incorrect number of operands, was {} but expected {}".format(
                len(operands), len(encoding)), t)
    for i, e in enumerate(operands):
        operand_signals = get_signals_by_operand(e, encoding[i])
        result_signals.update(operand_signals)
    return result_signals
Пример #4
0
def extract_operands(operand_tokens):
    """
    Takes in a list of tokens, and extracts the operands
    Brackets are grouped as a list.
    """
    result = list()
    active_bracket_group = None
    for t in operand_tokens:
        if t.text == "[":
            if active_bracket_group is not None:
                show_syntax_error(
                    "Can't open a bracket within another bracket.", t)
            active_bracket_group = list()
        elif t.text == "]":
            if len(active_bracket_group) == 0:
                show_syntax_error("Invalid content in bracket group", t)
            result.append(active_bracket_group)
            active_bracket_group = None
        elif t.text == ",":
            continue
        else:
            if active_bracket_group is not None:
                # append to bracket group
                active_bracket_group.append(t)
            else:
                result.append(t)
    if active_bracket_group is not None:
        t = operand_tokens[-1]
        show_syntax_error("Did not close bracket", t)
    return result
Пример #5
0
def register_from_operand(operand):
    val = operand.text
    if val.upper() not in registers.register_dict:
        show_syntax_error("Unknown register " + val, operand)
    return registers.register_dict[val.upper()]
Пример #6
0
def tokenize_file(filename):
    tokenized_lines = list()
    with open(filename, "r") as f:
        for i, line in enumerate(f):
            line_of_tokens = list()
            delimiters = "[],"

            no_comment = line.split(";", 1)[0]
            if len(no_comment.split()) == 0:
                continue

            found_opcode = False
            start_token = -1  # sentinel value -1: no start token index
            for index, c in enumerate(no_comment):
                if c.isspace() or c in delimiters or c == ":":
                    if start_token != -1:
                        token_txt = no_comment[start_token:index]
                        token_type = TokenType.OPERAND
                        if not found_opcode:
                            token_type = TokenType.OPCODE
                            found_opcode = True
                        token_i = start_token
                        start_token = -1

                        token = Token(token_txt, token_type, i + 1, line,
                                      token_i, line_of_tokens)
                        line_of_tokens.append(token)
                    if c in delimiters:
                        token_txt = c
                        token_type = TokenType.DELIMITER
                        token = Token(token_txt, token_type, i + 1, line,
                                      index, line_of_tokens)
                        line_of_tokens.append(token)
                    elif c == ":":
                        found_opcode = False
                        if len(line_of_tokens) == 0:
                            # Missing label error
                            error_token = Token(":", TokenType.DELIMITER,
                                                i + 1, line, 0, line_of_tokens)
                            show_syntax_error("Missing label", error_token)
                        if line_of_tokens[-1].t_type not in [
                                TokenType.OPCODE, TokenType.LABEL_SYMBOLIC,
                                TokenType.LABEL_NUMERIC
                        ]:
                            # Invalid label error
                            prev_token = line_of_tokens[-1]
                            show_syntax_error("Invalid label", prev_token)
                        if lb.is_numeric_label(line_of_tokens[-1].text):
                            line_of_tokens[-1].t_type = TokenType.LABEL_NUMERIC
                        else:
                            line_of_tokens[
                                -1].t_type = TokenType.LABEL_SYMBOLIC
                        token_type = TokenType.LABEL_DELIMITER

                        token = Token(c, token_type, i + 1, line, index,
                                      line_of_tokens)
                        line_of_tokens.append(token)
                else:
                    # in a token, or beginning a new token
                    if start_token == -1:
                        start_token = index
            tokenized_lines.append(line_of_tokens)

    return tokenized_lines