Ejemplo n.º 1
0
    def capture(self, m: Core, core: Core, past: int):
        comb = m.d.comb
        if past > 0:
            prefix = f"past{past}"
        else:
            prefix = "now"
        self.r = RegisterFile(core.xlen, prefix=prefix)
        for i in range(self.r.main_gpr_count()):
            comb += self.r[i].eq(Past(core.register_file.r[i], past))
        comb += self.r.pc.eq(Past(core.pc, past))

        # TODO: move to additional structure
        self.itype = IType(prefix=f"{prefix}_i")
        self.itype.elaborate(comb, Past(core.current_instruction, past))

        self.jtype = JType(prefix=f"{prefix}_j")
        self.jtype.elaborate(comb, Past(core.current_instruction, past))

        self.utype = UType(prefix=f"{prefix}_u")
        self.utype.elaborate(comb, Past(core.current_instruction, past))

        self.btype = BType(prefix=f"{prefix}_b")
        self.btype.elaborate(comb, Past(core.current_instruction, past))

        # TODO: membus
        self.input_ready = Signal.like(core.mem2core.ready,
                                       name=f"{prefix}_input_ready")
        self.input_data = Array([
            Signal(core.xlen, name=f"{prefix}_input_{i}")
            for i in range(core.look_ahead)
        ])

        self.cycle = Signal.like(core.cycle, name=f"{prefix}_cycle")
        comb += self.cycle.eq(Past(core.cycle, past))

        # TODO: move to structure
        self.mem2core_addr = Signal.like(core.mem2core.addr,
                                         name=f"{prefix}_mem2core_addr")
        self.mem2core_en = Signal.like(core.mem2core.en,
                                       name=f"{prefix}_mem2core_en")
        self.mem2core_seq = Signal.like(core.mem2core.seq,
                                        name=f"{prefix}_mem2core_seq")
        comb += self.mem2core_addr.eq(Past(core.mem2core.addr, past))
        comb += self.mem2core_en.eq(Past(core.mem2core.en, past))
        comb += self.mem2core_seq.eq(Past(core.mem2core.seq, past))
        comb += self.input_ready.eq(Past(core.mem2core.ready, past))
        comb += self.input_data[0].eq(Past(core.mem2core.value, past))
Ejemplo n.º 2
0
def main():
    my_basedir = os.path.dirname(os.path.realpath(__file__)).replace(
        '/bin', '')
    my_util = RegisterUtil("check_kafka", "{}/log".format(my_basedir))
    my_region = my_util.exec_shell_command(
        "curl -s http://169.254.169.254/latest/dynamic/instance-identity/document | grep region | awk -F\\\" '{print $4}'"
    )
    my_file = RegisterFile(my_util, my_region, my_basedir)
    params = my_file.process_conf(
        "{}/etc/register_kafka.conf".format(my_basedir))

    http_ip = '0.0.0.0'
    if 'check_broker_port' in params:
        http_port = int(params['check_broker_port'])
    else:
        http_port = 2180
    http_handler = BrokerRequestHandler
    my_util.log.info("Starting server at {}:{}".format(http_ip, http_port))
    # httpd = SocketServer.TCPServer((http_ip, http_port), http_handler)
    httpd = SimpleThreadingServer((http_ip, http_port), http_handler)
    httpd.serve_forever()
Ejemplo n.º 3
0
    def __init__(self):
        self._instruction_memory = Memory(1024, byte_size=BYTE_SIZE)
        self._data_memory = Memory(32, byte_size=BYTE_SIZE)
        self._register_file = RegisterFile(32)
        self._alu = ALU()
        self._pc = ALU.int_to_n_bit_binary(0)

        self._end = False

        # pipe line registers
        self._if_id = IF_ID()
        self._if_id_tmp = IF_ID()

        self._id_ex = ID_EX()
        self._id_ex_tmp = ID_EX()

        self._ex_mem = EX_MEM()
        self._ex_mem_tmp = EX_MEM()

        self._mem_wb = MEM_WB()
        self._mem_wb_tmp = MEM_WB()
Ejemplo n.º 4
0
def main():
    my_basedir = os.path.dirname(os.path.realpath(__file__)).replace('/bin', '')
    my_util = RegisterUtil("register_zookeeper", "{}/log".format(my_basedir))
    my_util.log.info("service_register_zookeeper: starting registration service")

    my_region = my_util.exec_shell_command("curl -s http://169.254.169.254/latest/dynamic/instance-identity/document | grep region | awk -F\\\" '{print $4}'")
    my_file = RegisterFile(my_util, my_region, my_basedir)
    params = my_file.process_conf("{}/etc/register_zookeeper.conf".format(my_basedir))
    my_file.set_bucket_name(params['s3_bucket'])

    while 1:
        state = my_file.get_zookeeper_state()
        my_util.log.info("service_register_zookeeper: state {}".format(state))
        if state == "1":
            command_shell = "python {}/bin/register_zookeeper.py".format(my_basedir)
            my_util.exec_shell_command(command_shell)
        my_util.loop_sleep(120)
def main():
    my_basedir = os.path.dirname(os.path.realpath(__file__)).replace(
        '/bin', '')
    my_util = RegisterUtil("register_zookeeper", "{}/log".format(my_basedir))
    my_util.log.info("register_zookeeper: beginning registration process")
    my_util.log.info("register_zookeeper: using basedir={}".format(my_basedir))

    # my_hostname = my_util.exec_shell_command("curl -s http://169.254.169.254/latest/meta-data/hostname")
    my_hostname = my_util.exec_shell_command(
        "curl -s http://169.254.169.254/latest/meta-data/local-ipv4")
    my_region = my_util.exec_shell_command(
        "curl -s http://169.254.169.254/latest/dynamic/instance-identity/document | grep region | awk -F\\\" '{{print $4}}'"
    )
    my_util.log.info("register_zookeeper: hostname {}".format(my_hostname))
    my_util.log.info("register_zookeeper: region {}".format(my_region))
    my_asg = RegisterASG(my_util, my_region)
    asg_name = my_asg.get_asg_name()
    asg_desired_cap = my_asg.get_desired_capacity(asg_name)
    asg_elb = my_asg.get_asg_elbs(asg_name)
    my_util.log.info(
        "register_zookeeper: instance is part of asg {}".format(asg_name))
    my_util.log.info(
        "register_zookeeper: asg desired capacity is currently {}".format(
            asg_desired_cap))
    my_util.log.info(
        "register_zookeeper: asg is behind elb {}".format(asg_elb))

    my_file = RegisterFile(my_util, my_region, my_basedir)
    params = my_file.process_conf(
        "{}/etc/register_zookeeper.conf".format(my_basedir))
    my_file.set_bucket_name(params['s3_bucket'])
    my_file.set_config_zook(params['cfg_zookeeper'])

    my_queue_zook = RegisterQueue(my_util, my_region, params['sqs_url_zook'])
    my_queue_replace = RegisterQueue(my_util, my_region,
                                     params['sqs_url_replace'])

    firstrun = my_file.get_zookeeper_firstrun()
    my_util.log.info("register_zookeeper: firstrun: {}".format(firstrun))

    # get register_zookeeper.connection file
    hosts_s3 = my_file.get_connection_file()

    # if connection file exists, ensemble has already been stood up, thus this launch is a replacement
    # therefore, this instance must initialize but also tell other instances to perform replacement
    # achieved by setting state flag to 1, service_register_zookeeper.py listens for this state change
    # state = 0, brand new cluster build
    # state = 1, existing cluster, failure replacement
    my_util.log.info(
        "register_zookeeper: remote execution check: {}, {}".format(
            firstrun, len(hosts_s3)))
    if firstrun == '1' and len(hosts_s3) > 0:
        my_file.write_zookeeper_state('1')

    # send my hostname to the queue
    my_queue_zook.send_hostname(my_hostname)

    # get online instances that registered to the sqs queue, will wait until param_total hosts are in the queue
    hosts_queue = my_queue_zook.get_hostnames(asg_desired_cap)

    # determine proper server list identities
    hosts_final, hosts_replaced, hosts_existing, hosts_new = my_queue_zook.process_hosts(
        hosts_s3, hosts_queue)
    my_util.log.info("register_zookeeper: master list: {}".format(hosts_final))
    my_util.log.info(
        "register_zookeeper: existing hosts: {}".format(hosts_existing))
    my_util.log.info(
        "register_zookeeper: replaced hosts: {}".format(hosts_replaced))
    my_util.log.info("register_zookeeper: new hosts: {}".format(hosts_new))

    if firstrun == '1':
        # scenario: brand new instantiation
        # set firstrun to '0'
        my_file.write_zookeeper_firstrun('0')

        # write new entries to zookeeper properties
        my_file.write_zookeeper_properties(hosts_final)

        # get myid from the final list and write myid file
        my_file.write_zookeeper_id(hosts_final.index(my_hostname) + 1)

        # need to wait for other replacement hosts to complete their replace process
        # the following statement sets a condition to wait for the _replace_zookeeper_property() function to complete
        if len(hosts_replaced) > 0:
            hosts_completed = my_queue_replace.get_hostnames(
                len(hosts_s3) - len(hosts_replaced))
            my_util.log.info(
                "register_zookeeper: hosts that completed replacement: {}".
                format(len(hosts_completed)))

        # write a connection string to a temp file and upload to S3
        my_file.write_connection_file(hosts_final)

        # set state to 0 now that all hosts are configured
        # state = 0, cluster is healthy
        # state = 1, cluster requires replacement
        my_file.write_zookeeper_state('0')
    else:
        # replace failed entries in zookeeper properties
        for host in hosts_replaced:
            my_file.replace_zookeeper_property(
                host, hosts_new[hosts_replaced.index(host)])
            my_queue_replace.send_hostname(my_hostname)

        my_util.set_cmd_restart_zook(params['cmd_restart_zook'])
        my_util.restart_zookeeper()

    my_iid = my_util.exec_shell_command(
        "curl -s http://169.254.169.254/latest/meta-data/instance-id")
    my_zid = my_file.get_zookeeper_id()
    my_ec2 = RegisterEC2(my_util, my_region)
    result = my_ec2.add_tag(my_iid, 'KafkaId', my_zid)
    if result == 200:
        for tag in my_ec2.describe_tags(my_iid):
            if tag['Key'] == 'KafkaId':
                my_util.log.info("register_zookeeper: tag {}".format(tag))
Ejemplo n.º 6
0
class CPU:

    def __init__(self):
        self._instruction_memory = Memory(1024, byte_size=BYTE_SIZE)
        self._data_memory = Memory(32, byte_size=BYTE_SIZE)
        self._register_file = RegisterFile(32)
        self._alu = ALU()
        self._pc = ALU.int_to_n_bit_binary(0)

        self._end = False

        # pipe line registers
        self._if_id = IF_ID()
        self._if_id_tmp = IF_ID()

        self._id_ex = ID_EX()
        self._id_ex_tmp = ID_EX()

        self._ex_mem = EX_MEM()
        self._ex_mem_tmp = EX_MEM()

        self._mem_wb = MEM_WB()
        self._mem_wb_tmp = MEM_WB()

    def load_instructions(self, instructions):
        """
        load instructions from an array read from a file
        this is a helper function to initialize the instruction memory
        :param instructions:
        :return:
        """

        memory_cell_size = self._instruction_memory.byte_size

        # times needed to access the memory to fetch one word
        n = 4

        # start putting instruction into memory from address 0
        base_addr = ALU.int_to_n_bit_binary(0)

        self._instruction_memory.set_mem_write(True)

        for j in range(len(instructions)):
            instruction = instructions[j]
            inst = [int(c) for c in instruction.replace('\n', '').replace(' ', '').split(';')[0]]

            for i in range(n):
                # calculate write addr
                self._alu.set_input_1(base_addr)
                self._alu.set_input_2(ALU.int_to_n_bit_binary(i + j * n))
                self._alu.set_op('00')
                addr = self._alu.result

                # write data
                unit_data = tuple(inst[i * memory_cell_size: (i + 1) * memory_cell_size])

                self._instruction_memory.set_write_addr(addr)
                self._instruction_memory.set_write_data(unit_data)

        self._instruction_memory.set_mem_write(False)

    def _load_w(self, mem, base_addr, memRead=True):

        if not memRead:
            return ALU.int_to_n_bit_binary(0)

        n = 4
        self._alu.set_input_1(base_addr)
        self._alu.set_op('00')

        word = []

        for i in range(n):
            self._alu.set_input_2(ALU.int_to_n_bit_binary(i))
            addr = self._alu.result
            word += list(mem.at(addr))

        return word

    def _store_w(self, base_addr, word, memWrite=True):

        if not memWrite:
            return
        self._alu.set_input_1(base_addr)
        self._alu.set_op('00')

        for i in range(4):
            self._alu.set_input_2(ALU.int_to_n_bit_binary(i))
            addr = self._alu.result

            byte = tuple(word[i * 8: 8 * (i + 1)])
            self._data_memory.put(addr, byte)

    def cycle(self):
        # pipe line register
        self._if_id = copy.deepcopy(self._if_id_tmp)
        self._id_ex = copy.deepcopy(self._id_ex_tmp)
        self._ex_mem = copy.deepcopy(self._ex_mem_tmp)
        self._mem_wb = copy.deepcopy(self._mem_wb_tmp)

        self.fetch()
        self.decode()
        self.execute()
        self.memory()
        self.write_back()

    def master_control_unit(self, op_code_decimal, stall=False):

        if stall:
            # memory signals

            self._id_ex_tmp.mem_control.MemRead = False
            self._id_ex_tmp.mem_control.MemWrite = False
            self._id_ex_tmp.wb_control.RegWrite = False

            return

        # memory signals
        self._id_ex_tmp.mem_control.Branch = False
        self._id_ex_tmp.mem_control.MemRead = False
        self._id_ex_tmp.mem_control.MemWrite = False

        # wb signals
        self._id_ex_tmp.wb_control.MemToReg = False
        self._id_ex_tmp.wb_control.RegWrite = True

        if op_code_decimal == 35:
            self._id_ex_tmp.mem_control.MemRead = True
            self._id_ex_tmp.wb_control.MemToReg = True

        elif op_code_decimal == 43:
            self._id_ex_tmp.mem_control.MemWrite = True
            self._id_ex_tmp.wb_control.RegWrite = False

        elif op_code_decimal == 4:
            self._id_ex_tmp.mem_control.Branch = True
            self._id_ex_tmp.wb_control.RegWrite = False

    def forwarding_unit(self):

        result = {
            'ForwardA': '00',
            'ForwardB': '00',
        }

        # EX Hazard
        if (self._ex_mem.wb_control.RegWrite
                and (self._ex_mem.reg_dest != ALU.int_to_n_bit_binary(0, 5))
                and (self._ex_mem.reg_dest == self._id_ex.rs)):
            result['ForwardA'] = '10'

        if (self._ex_mem.wb_control.RegWrite
                and (self._ex_mem.reg_dest != ALU.int_to_n_bit_binary(0, 5))
                and (self._ex_mem.reg_dest == self._id_ex.rt)):
            result['ForwardB'] = '10'

        # memory hazard
        if (
                self._mem_wb.wb_control.RegWrite
                and self._mem_wb.reg_dest != ALU.int_to_n_bit_binary(0, 5)
                and not (
                self._ex_mem.wb_control.RegWrite and (self._ex_mem.reg_dest != ALU.int_to_n_bit_binary(0, 5))
                and (self._ex_mem.reg_dest == self._id_ex.rs)
        )
                and (self._mem_wb.reg_dest == self._id_ex.rs)):
            result['ForwardA'] = '01'

        if (self._mem_wb.wb_control.RegWrite
                and (self._mem_wb.reg_dest != ALU.int_to_n_bit_binary(0, 5))
                and not (self._ex_mem.wb_control.RegWrite and (self._ex_mem.reg_dest != ALU.int_to_n_bit_binary(0, 5))
                         and (self._ex_mem.reg_dest == self._id_ex.rt))
                and (self._mem_wb.reg_dest == self._id_ex.rt)):
            result['ForwardB'] = '01'

        return result

    def fetch(self):
        instruction = self._load_w(self._instruction_memory, self._pc)
        self._pc = ALU.n_bit_binary_to_decimal(self._pc) + 4
        self._pc = ALU.int_to_n_bit_binary(self._pc)

        # end of program
        if instruction == list(ALU.int_to_n_bit_binary(-1)):
            self._end = True

        if self._end:
            instruction = NOOP
            self._if_id_tmp.set_inst(tuple(instruction))
        else:
            self._if_id_tmp.set_pc(self._pc)
            self._if_id_tmp.set_inst(tuple(instruction))

    def decode(self):
        pipeline_register = self._if_id

        I_TYPE = [8, 12, 13, 4, 35, 43]

        if list(pipeline_register.inst) == NOOP:
            self.master_control_unit(op_code_decimal=0, stall=True)
            return

        inst = list(pipeline_register.inst)[::-1]
        rs = inst[25:20:-1]
        rt = inst[20:15:-1]

        # hazard detection
        if self._id_ex.mem_control.MemRead and (self._id_ex.rt == tuple(rs) or self._id_ex.rt == tuple(rt)):
            self._pc = ALU.int_to_n_bit_binary(
                ALU.n_bit_binary_to_decimal(self._pc) - 4,
            )

            self._if_id_tmp.set_inst(tuple(inst[::-1]))
            self._if_id_tmp.set_pc(self._pc)
            self.master_control_unit(op_code_decimal=0, stall=True)

            return

        op_code = inst[31:25:-1]
        op_code_decimal = ALU.n_bit_binary_to_decimal(tuple(op_code), signed=False)

        self.master_control_unit(op_code_decimal)

        # common R type and I type stuff
        if op_code_decimal == 0 or op_code_decimal in I_TYPE:
            # read rt rs

            self._id_ex_tmp.set_rd1(self._register_file.at(tuple(rs)))
            self._id_ex_tmp.set_rd2(self._register_file.at(tuple(rt)))

            self._id_ex_tmp.set_rs(tuple(rs))
            self._id_ex_tmp.set_rt(tuple(rt))

        # if R type
        if op_code_decimal == 0:
            rd = inst[15:10:-1]
            func = inst[5::-1]

            # set registers

            self._id_ex_tmp.set_inst_rd(tuple(rd))

            # ex signals
            self._id_ex_tmp.ex_control.ALUOp = tuple(func)

            self._id_ex_tmp.ex_control.ALUSource = 'rt'
            self._id_ex_tmp.ex_control.RegDst = 'rd'

        # I type
        elif op_code_decimal in I_TYPE:
            imm = inst[15::-1]

            extended_imm = ALU.sign_extend_to(tuple(imm), WORD)
            self._id_ex_tmp.set_inst_imm(extended_imm)

            # ex signals
            self._id_ex_tmp.ex_control.ALUOp = ALU.alu_i_type_op_code_table()[op_code_decimal]

            if op_code_decimal == 4:
                s = 'rt'
            else:
                s = 'imm'

            self._id_ex_tmp.ex_control.ALUSource = s
            self._id_ex_tmp.ex_control.RegDst = 'rt'

        # J
        elif op_code_decimal == 2:
            addr = inst[25::-1]

        self._id_ex_tmp.set_pc(pipeline_register.pc)

    def execute(self):

        pipeline_register = copy.deepcopy(self._id_ex)

        self._alu.set_op(pipeline_register.ex_control.ALUOp)

        fu_data = self.forwarding_unit()

        if fu_data['ForwardA'] == '00':
            input1 = pipeline_register.rd1
        elif fu_data['ForwardA'] == '10':
            input1 = self._ex_mem.alu_result
        elif fu_data['ForwardA'] == '01':
            input1 = self.write_back(ex=False)
        else:
            raise Exception("Invalid input 1 ")

        self._alu.set_input_1(input1)

        alu_source = pipeline_register.ex_control.ALUSource

        # memory data forward
        if fu_data['ForwardB'] == '00':
            memory_data = pipeline_register.rd2
        elif fu_data['ForwardB'] == '10':
            memory_data = self._ex_mem.alu_result
        elif fu_data['ForwardB'] == '01':
            memory_data = self.write_back(ex=False)
        else:
            Exception("Error")

        # set the correct alu source
        if alu_source == 'imm':
            input2 = pipeline_register.inst_imm

        elif fu_data['ForwardB'] == '00':
            if alu_source == 'rt':
                input2 = pipeline_register.rd2
            elif alu_source == 'rs':
                input2 = pipeline_register.rd1
        elif fu_data['ForwardB'] == '10':
            input2 = self._ex_mem.alu_result
        elif fu_data['ForwardB'] == '01':
            input2 = self.write_back(ex=False)
        else:
            raise Exception("Invalid ALU source")

        self._alu.set_input_2(input2)
        alu_result = self._alu.result
        alu_is_zero = self._alu.zero

        # calculate possible beq addr
        pc = ALU.n_bit_binary_to_decimal(pipeline_register.pc)
        offset_4 = ALU.n_bit_binary_to_decimal(
            pipeline_register.inst_imm
        ) * 4

        jump_target = ALU.int_to_n_bit_binary(pc + offset_4)

        if pipeline_register.mem_control.Branch and alu_is_zero:
            self._id_ex_tmp = ID_EX()
            self._if_id_tmp = IF_ID()
            self._pc = jump_target

        if pipeline_register.ex_control.RegDst == 'rt':
            reg_dest = pipeline_register.rt
        elif pipeline_register.ex_control.RegDst == 'rd':
            reg_dest = pipeline_register.inst_rd

        # propagate wb m control signals
        self._ex_mem_tmp.mem_control = copy.deepcopy(pipeline_register.mem_control)
        self._ex_mem_tmp.wb_control = copy.deepcopy(pipeline_register.wb_control)

        self._ex_mem_tmp.set_alu_result(alu_result)
        self._ex_mem_tmp.set_alu_zero_flag(alu_is_zero)
        self._ex_mem_tmp.set_jump_target(jump_target)
        self._ex_mem_tmp.set_rd2(memory_data)
        self._ex_mem_tmp.set_reg_dest(reg_dest)

        self._ex_mem_tmp.pc = pipeline_register.pc

    def memory(self):
        pipeline_data = copy.deepcopy(self._ex_mem)

        if pipeline_data.mem_control.MemWrite:
            self._store_w(pipeline_data.alu_result, pipeline_data.rd2, pipeline_data.mem_control.MemWrite)

        if pipeline_data.mem_control.MemRead:
            read_result = self._load_w(self._data_memory, pipeline_data.alu_result, pipeline_data.mem_control.MemRead)
            self._mem_wb_tmp.set_read_data(tuple(read_result))

        self._mem_wb_tmp.set_alu_result(pipeline_data.alu_result)
        self._mem_wb_tmp.set_reg_dest(pipeline_data.reg_dest)
        self._mem_wb_tmp.wb_control = copy.deepcopy(pipeline_data.wb_control)
        self._mem_wb_tmp.pc = pipeline_data.pc

    def write_back(self, ex=True):
        pipeline_data = self._mem_wb

        if pipeline_data.wb_control.MemToReg:
            write_data = pipeline_data.read_data
        else:
            write_data = pipeline_data.alu_result

        # to be forwarded
        if not ex:
            return write_data

        # set register signals

        if pipeline_data.wb_control.RegWrite:
            if pipeline_data.reg_dest == (0, 0, 0, 1, 0):
                print("the: ", ALU.n_bit_binary_to_decimal(pipeline_data.pc))
            self._register_file.put(pipeline_data.reg_dest, write_data)
import os

from register_ec2 import RegisterEC2
from register_file import RegisterFile
from register_util import RegisterUtil

my_basedir = os.path.dirname(os.path.realpath(__file__)).replace('/bin', '')
my_util = RegisterUtil("register_kafka", "{}/log".format(my_basedir))
my_hostname = my_util.exec_shell_command("curl -s http://169.254.169.254/latest/meta-data/local-ipv4")
my_region = my_util.exec_shell_command("curl -s http://169.254.169.254/latest/dynamic/instance-identity/document | grep region | awk -F\\\" '{{print $4}}'")
my_iid = my_util.exec_shell_command("curl -s http://169.254.169.254/latest/meta-data/instance-id")
my_file = RegisterFile(my_util, my_region, my_basedir)
params = my_file.process_conf("{}/etc/register_kafka.conf".format(my_basedir))
my_file.set_config_brok(params['cfg_broker'])
my_bid = my_file.get_broker_id()
my_ec2 = RegisterEC2(my_util, my_region)
result = my_ec2.add_tag(my_iid, 'ApacheId', my_bid)
if result == 200:
    for tag in my_ec2.describe_tags(my_iid):
        if tag['Key'] == 'ApacheId':
            my_util.log.info("register_kafka_tag: tag {}".format(tag))
Ejemplo n.º 8
0
class VerificationRegisterFile:
    def capture(self, m: Core, core: Core, past: int):
        comb = m.d.comb
        if past > 0:
            prefix = f"past{past}"
        else:
            prefix = "now"
        self.r = RegisterFile(core.xlen, prefix=prefix)
        for i in range(self.r.main_gpr_count()):
            comb += self.r[i].eq(Past(core.register_file.r[i], past))
        comb += self.r.pc.eq(Past(core.pc, past))

        # TODO: move to additional structure
        self.itype = IType(prefix=f"{prefix}_i")
        self.itype.elaborate(comb, Past(core.current_instruction, past))

        self.jtype = JType(prefix=f"{prefix}_j")
        self.jtype.elaborate(comb, Past(core.current_instruction, past))

        self.utype = UType(prefix=f"{prefix}_u")
        self.utype.elaborate(comb, Past(core.current_instruction, past))

        self.btype = BType(prefix=f"{prefix}_b")
        self.btype.elaborate(comb, Past(core.current_instruction, past))

        # TODO: membus
        self.input_ready = Signal.like(core.mem2core.ready,
                                       name=f"{prefix}_input_ready")
        self.input_data = Array([
            Signal(core.xlen, name=f"{prefix}_input_{i}")
            for i in range(core.look_ahead)
        ])

        self.cycle = Signal.like(core.cycle, name=f"{prefix}_cycle")
        comb += self.cycle.eq(Past(core.cycle, past))

        # TODO: move to structure
        self.mem2core_addr = Signal.like(core.mem2core.addr,
                                         name=f"{prefix}_mem2core_addr")
        self.mem2core_en = Signal.like(core.mem2core.en,
                                       name=f"{prefix}_mem2core_en")
        self.mem2core_seq = Signal.like(core.mem2core.seq,
                                        name=f"{prefix}_mem2core_seq")
        comb += self.mem2core_addr.eq(Past(core.mem2core.addr, past))
        comb += self.mem2core_en.eq(Past(core.mem2core.en, past))
        comb += self.mem2core_seq.eq(Past(core.mem2core.seq, past))
        comb += self.input_ready.eq(Past(core.mem2core.ready, past))
        comb += self.input_data[0].eq(Past(core.mem2core.value, past))

    def at_instruction_start(self):
        return (self.cycle == 0) & (self.input_ready[0])

    def assert_loading_from(self, m: Core, addr, src_loc_at=1):
        comb = m.d.comb
        comb += Assert(self.mem2core_en, src_loc_at=src_loc_at)
        comb += Assert(self.mem2core_addr == addr, src_loc_at=src_loc_at)

    def assert_same_gpr(self, m: Core, other: RegisterFile, src_loc_at=1):
        comb = m.d.comb

        for i in range(self.r.main_gpr_count()):
            comb += Assert(self.r[i] == other[i], src_loc_at=src_loc_at)

    def assert_same_gpr_but_one(self,
                                m: Module,
                                other: RegisterFile,
                                skip: Value,
                                src_loc_at=1):
        comb = m.d.comb

        for i in range(self.r.main_gpr_count()):
            with m.If(skip != i):
                comb += Assert(self.r[i] == other[i], src_loc_at=src_loc_at)

    def assert_gpr_value(self,
                         m: Module,
                         idx: Value,
                         expected_value: Value,
                         src_loc_at=1):
        """ Assert GPR value (ignored for idx = 0 and zeri is checked instead) """
        comb = m.d.comb
        with m.If(idx == 0):
            comb += Assert(self.r[0] == 0, src_loc_at=src_loc_at)
        with m.Else():
            comb += Assert(self.r[idx] == expected_value,
                           src_loc_at=src_loc_at)

    def assert_pc_advanced(self,
                           m: Module,
                           previous: RegisterFile,
                           src_loc_at=1):
        comb = m.d.comb
        comb += Assert(self.r.pc == (previous.pc + 4)[:self.r.pc.width],
                       src_loc_at=src_loc_at)

    def assert_same_pc(self, m: Module, previous: RegisterFile, src_loc_at=1):
        comb = m.d.comb
        comb += Assert(self.r.pc == previous.pc, src_loc_at=src_loc_at)
Ejemplo n.º 9
0
def register_file_test():
    rg = RegisterFile()
    rg.set_register_write(True)
    i = 0
    for k in rg.data.keys():

        if i >= 10:
            rg.set_register_write(False)

        rg.set_read_r1(k)
        rg.set_read_r2(k)

        old_d1 = rg.read_d1
        old_d2 = rg.read_d2

        rg.set_write_r(k)
        rg.set_write_data(ALU.int_to_n_bit_binary(i))

        assert rg.read_d1 == rg.read_d2

        if i < 10:
            assert rg.read_d1 == ALU.int_to_n_bit_binary(i)
        else:
            assert rg.read_d1 == old_d1

        i += 1
Ejemplo n.º 10
0
def main():
    my_basedir = os.path.dirname(os.path.realpath(__file__)).replace(
        '/bin', '')
    my_util = RegisterUtil("register_kafka", "{}/log".format(my_basedir))
    my_util.log.info("register_kafka: beginning registration process")
    my_util.log.info("register_kafka: using basedir={}".format(my_basedir))

    # my_hostname = my_util.exec_shell_command("curl -s http://169.254.169.254/latest/meta-data/hostname")
    my_hostname = my_util.exec_shell_command(
        "curl -s http://169.254.169.254/latest/meta-data/local-ipv4")
    my_region = my_util.exec_shell_command(
        "curl -s http://169.254.169.254/latest/dynamic/instance-identity/document | grep region | awk -F\\\" '{{print $4}}'"
    )
    my_util.log.info("register_zookeeper: hostname {}".format(my_hostname))
    my_util.log.info("register_zookeeper: region {}".format(my_region))
    my_asg = RegisterASG(my_util, my_region)
    asg_name = my_asg.get_asg_name()
    asg_desired_cap = my_asg.get_desired_capacity(asg_name)
    asg_elb = my_asg.get_asg_elbs(asg_name)
    my_util.log.info(
        "register_kafka: instance is part of asg {}".format(asg_name))
    my_util.log.info(
        "register_kafka: asg desired capacity is currently {}".format(
            asg_desired_cap))
    my_util.log.info("register_kafka: asg is behind elb {}".format(asg_elb))

    my_file = RegisterFile(my_util, my_region, my_basedir)
    params = my_file.process_conf(
        "{}/etc/register_kafka.conf".format(my_basedir))
    my_file.set_bucket_name(params['s3_bucket'])
    my_file.set_config_brok(params['cfg_broker'])

    my_queue_brok = RegisterQueue(my_util, my_region, params['sqs_url_brok'])

    my_util.set_cmd_restart_brok(params['cmd_restart_brok'])
    cmd_brokids = '{} {}:2181 <<< "ls /brokers/ids" | grep "\["'.format(
        params['zookshell'], params['zookeeper'])

    # state = 0, brand new cluster build
    # state = 1, existing cluster or failure replacement
    state = my_file.get_broker_state()
    firstrun = my_file.get_broker_firstrun()
    my_util.log.info("register_kafka: firstrun: {}".format(firstrun))
    my_util.log.info("register_kafka: state: {}".format(state))
    if firstrun == '1' and state == '0':
        # scenario: brand new instantiation
        # need to send to broker queue
        my_queue_brok.send_hostname(my_hostname)

        # need to wait on broker queue
        hosts_queue = my_queue_brok.get_hostnames(asg_desired_cap)
        my_util.log.info("register_kafka: got {} hosts from queue".format(
            len(hosts_queue)))

        # write a ids string to a temp file and upload to S3
        # note: setting first = 1001 is the default value for reserved.broker.id.max+1
        first = int(params['broker_start'])
        ids_init = range(first, first + int(asg_desired_cap))
        my_file.write_ids_file(ids_init)

        # write not first run locally
        my_file.write_broker_firstrun('0')

        # write not first run s3/globally
        my_file.write_broker_state('1')
    elif firstrun == '1' and state == '1':
        # scenario: replacement
        # need to first get active broker.ids
        result = my_util.exec_shell_command(cmd_brokids).translate(
            None, '[]').translate(None, ' ')
        my_util.log.info("register_kafka: result: {}".format(result))
        ids_active = sorted(result.split(","))
        ids_active = [int(ident) for ident in ids_active]
        my_util.log.info("register_kafka: active ids: {}".format(ids_active))

        # need to discover bad.broker.id
        # no longer programmatically generating ids_all but getting it from s3 now
        # ids_all = range(ids_active[0], ids_active[0] + int(asg_desired_cap))
        ids_all = my_file.get_ids_file()
        ids_all = [int(ident) for ident in ids_all]
        # need to handle failure + change in asg_desired_cap
        if len(ids_all) != int(asg_desired_cap):
            for add_id in range(1, int(asg_desired_cap) - len(ids_all)):
                ids_all.append(ids_active[-1] + add_id)
        my_util.log.info("register_kafka: all ids: {}".format(ids_all))
        ids_missing = sorted(list(set(ids_all).difference(set(ids_active))))
        my_util.log.info("register_kafka: missing ids: {}".format(ids_missing))

        # setup puppet managed config file
        my_file.init_broker_config()

        # need to append the appropriate broker.id
        # need to disable auto gen id
        if len(ids_missing) > 0:
            my_file.fix_broker_id(ids_missing[0])