Ejemplo n.º 1
0
    def __init__(self, name: str, channel_manager, peer_id, channel_name,
                 level_db_identity):
        self.__channel_service: ChannelService = channel_manager
        self.__channel_name = channel_name
        self.__pre_validate_strategy = self.__pre_validate
        self.__peer_id = peer_id
        self.__level_db = None
        self.__level_db_path = ""
        self.__level_db, self.__level_db_path = util.init_level_db(
            level_db_identity=f"{level_db_identity}_{channel_name}",
            allow_rename_path=False)
        self.__txQueue = AgingCache(
            max_age_seconds=conf.MAX_TX_QUEUE_AGING_SECONDS,
            default_item_status=TransactionStatusInQueue.normal)
        self.__unconfirmedBlockQueue = queue.Queue()
        self.__blockchain = BlockChain(self.__level_db, channel_name)
        self.__peer_type = None
        self.__consensus = None
        self.__consensus_algorithm = None
        self.candidate_blocks = CandidateBlocks()
        self.__block_height_sync_lock = threading.Lock()
        self.__block_height_thread_pool = ThreadPoolExecutor(
            1, 'BlockHeightSyncThread')
        self.__block_height_future: Future = None
        self.__subscribe_target_peer_stub = None
        self.__block_generation_scheduler = BlockGenerationScheduler(
            self.__channel_name)
        self.__precommit_block: Block = None
        self.set_peer_type(loopchain_pb2.PEER)
        self.name = name
        self.__service_status = status_code.Service.online

        self.epoch: Epoch = None
Ejemplo n.º 2
0
    def __init__(self, channel_service: 'ChannelService', peer_id: str,
                 channel_name: str, store_id: str):
        self.__channel_service: ChannelService = channel_service
        self.__channel_name = channel_name
        self.__peer_id = peer_id

        self.__tx_queue = AgingCache(
            max_age_seconds=conf.MAX_TX_QUEUE_AGING_SECONDS,
            default_item_status=TransactionStatusInQueue.normal)
        self.blockchain = BlockChain(channel_name, store_id, self)
        self.__peer_type = None
        self.__consensus_algorithm = None
        self.candidate_blocks = CandidateBlocks(self.blockchain)
        self.__block_height_sync_bad_targets = {}
        self.__block_height_sync_lock = threading.Lock()
        self.__block_height_thread_pool: ThreadPoolExecutor = ThreadPoolExecutor(
            1, 'BlockHeightSyncThread')
        self.__block_height_future: Future = None
        self.set_peer_type(loopchain_pb2.PEER)
        self.__service_status = status_code.Service.online

        # old_block_hashes[height][new_block_hash] = old_block_hash
        self.__old_block_hashes: DefaultDict[int,
                                             Dict[Hash32,
                                                  Hash32]] = defaultdict(dict)
        self.epoch: Epoch = None
Ejemplo n.º 3
0
    def setUp(self):
        test_util.print_testname(self._testMethodName)
        # BlockChain 을 만듬
        test_db = test_util.make_level_db(self.db_name)
        self.assertIsNotNone(test_db, "DB생성 불가")
        self.chain = BlockChain(test_db)

        test_util.print_testname(self._testMethodName)
Ejemplo n.º 4
0
    def setUp(self):
        test_util.print_testname(self._testMethodName)
        self.peer_auth = test_util.create_default_peer_auth()

        set_mock(self)
        # BlockChain 을 만듬
        test_db = test_util.make_level_db(self.db_name)
        self.assertIsNotNone(test_db, "DB생성 불가")
        self.chain = BlockChain(test_db)
Ejemplo n.º 5
0
    def test_block_genesis(self):
        """
        제네시스 블럭 생성 테스트
        """
        db_name = 'genesis_db'
        test_db = test_util.make_level_db(db_name)
        self.assertIsNotNone(test_db, "DB생성 불가")
        chain = BlockChain(test_db)
        block = test_util.add_genesis_block()
        chain.add_block(block)

        self.assertIsNotNone(chain.last_block.block_hash, "제너릭 블럭 생성 불가")
        # 테스트 DB 제거
        leveldb.DestroyDB(db_name)
Ejemplo n.º 6
0
 def setUpClass(cls):
     """
     블럭체인 생성 및 DB입력
     """
     # BlockChain 을 만듬
     test_db = leveldb.LevelDB('./' + cls.test_block_db, create_if_missing=True)
     cls.assertIsNotNone(test_db, "DB생성 불가")
     cls.chain = BlockChain(test_db)
     cls.score = cls.SampleScore()
Ejemplo n.º 7
0
    def test_add_remove_block_to_candidate_blocks(self):
        # GIVEN
        block0 = self.__get_test_block()
        block0.header.__dict__['height'] = -1
        block = self.__get_test_block()
        blockchain = BlockChain('icon_dex', '', self)
        blockchain.__dict__['_BlockChain__last_block'] = block0
        candidate_blocks = CandidateBlocks(blockchain)

        # WHEN add
        candidate_blocks.add_block(block, [ExternalAddress.empty()])

        # THEN
        self.assertTrue(block.header.hash in candidate_blocks.blocks)

        # WHEN remove
        candidate_blocks.remove_block(block.header.hash)

        # THEN
        self.assertFalse(block.header.hash in candidate_blocks.blocks)
    def test_block_confirm(self):
        db_name = 'block_confirm_db'
        test_db = test_util.make_level_db(db_name)
        self.assertIsNotNone(test_db, "DB생성 불가")
        chain = BlockChain(test_db)
        # block을 하나 생성해서 unconfirm 상태에서 추가
        unconfirm_block = self.generate_test_block()
        unconfirm_block.generate_block(chain.last_block)

        self.assertTrue(chain.add_unconfirm_block(unconfirm_block),
                        "미검증블럭 추가에 실패하였습니다.")

        # 블럭 검증작업후 블럭을 검증완료 상태로 바꾸며, 블럭체인에 등록 해 줍니다.
        chain.confirm_block(unconfirm_block.block_hash)

        # 블럭 검증완료
        self.assertEqual(chain.last_block.block_hash,
                         unconfirm_block.block_hash, "블럭이 추가되지 않았습니다.")

        leveldb.DestroyDB(db_name)
Ejemplo n.º 9
0
    def test_block_confirm(self):
        store_identity = 'block_confirm_db'
        chain = BlockChain(self.channel_name, store_id=store_identity)

        self.assertIsNotNone(chain.get_blockchain_store(), "impossible create DB")
        block = test_util.add_genesis_block()
        chain.add_block(block)
        self.last_block = block
        # block을 하나 생성해서 unconfirm 상태에서 추가
        unconfirm_block = self.generate_test_block()
        unconfirm_block.generate_block(chain.last_block)

        self.assertTrue(chain.add_unconfirm_block(unconfirm_block), "미검증블럭 추가에 실패하였습니다.")

        # 블럭 검증작업후 블럭을 검증완료 상태로 바꾸며, 블럭체인에 등록 해 줍니다.
        chain.confirm_prev_block(unconfirm_block)

        # 블럭 검증완료
        self.assertEqual(chain.last_block.block_hash, unconfirm_block.block_hash, "블럭이 추가되지 않았습니다.")

        chain.close_blockchain_store()
Ejemplo n.º 10
0
    def setUpClass(cls):
        """
        블럭체인 생성 및 DB입력
        """
        cls.__peer_auth = test_util.create_default_peer_auth()
        cls.__peer_auth = test_util.create_default_peer_auth()

        set_mock(cls)
        # BlockChain 을 만듬
        test_db = leveldb.LevelDB('./' + cls.test_block_db,
                                  create_if_missing=True)
        cls.assertIsNotNone(test_db, "DB생성 불가")
        cls.chain = BlockChain(test_db)
        cls.score = cls.SampleScore()
Ejemplo n.º 11
0
    def test_block_genesis(self):
        """
        create genesis block
        """
        store_identity = 'genesis_db'
        chain = BlockChain(self.channel_name, store_id=store_identity)
        self.assertIsNotNone(chain.get_blockchain_store(), "impossible create DB")
        block = test_util.add_genesis_block()
        chain.add_block(block)

        self.assertIsNotNone(chain.last_block.block_hash, "impossible create genesis block")
        # remove test DB
        chain.close_blockchain_store()
Ejemplo n.º 12
0
    def test_block_add(self):
        """
        블럭 추가 테스트
        제네시스 블럭을 만든후 10개의 트랜잭션을 가진 10개의 블럭을 생성하여
        블럭체인에 추가
        """
        db_name = 'add_test_db'
        test_db = test_util.make_level_db(db_name)
        self.assertIsNotNone(test_db, "DB생성 불가")
        chain = BlockChain(test_db)

        block = test_util.add_genesis_block()
        chain.add_block(block)
        genesis_hash = chain.last_block.block_hash

        for x in range(0, 10):
            # 신규 블럭 생성 추가 x 10
            tmp_block = self.generate_test_block()
            tmp_block.block_status = BlockStatus.confirmed
            tmp_block.generate_block(chain.last_block)
            chain.add_block(tmp_block)
            logging.debug("신규 블럭 생성 및 블럭 HASH : %s",
                          chain.last_block.block_hash)

        self.assertNotEqual(genesis_hash, chain.last_block.block_hash,
                            "블럭 추가 불가")

        # 미인증 블럭 추가
        tmp_block = self.generate_test_block()
        tmp_block.block_status = BlockStatus.unconfirmed
        self.assertRaises(TypeError, "미인증 블럭 추가", chain.add_block, tmp_block)

        # Store_data Function 추가
        tmp_block.block_status = BlockStatus.confirmed
        tmp_block.generate_block(chain.last_block)
        # 블럭 저장함수
        last_block_hash = chain.last_block.block_hash

        chain.add_block(tmp_block)

        self.assertNotEqual(last_block_hash, chain.last_block.block_hash)
        self.assertIsNotNone(chain.last_block)

        # 테스트 DB 제거
        leveldb.DestroyDB(db_name)
Ejemplo n.º 13
0
class BlockManager:
    """Manage the blockchain of a channel. It has objects for consensus and db object.
    """

    MAINNET = "cf43b3fd45981431a0e64f79d07bfcf703e064b73b802c5f32834eec72142190"
    TESTNET = "885b8021826f7e741be7f53bb95b48221e9ab263f377e997b2e47a7b8f4a2a8b"

    def __init__(self, channel_service: 'ChannelService', peer_id: str,
                 channel_name: str, store_id: str):
        self.__channel_service: ChannelService = channel_service
        self.__channel_name = channel_name
        self.__peer_id = peer_id

        self.__tx_queue = AgingCache(
            max_age_seconds=conf.MAX_TX_QUEUE_AGING_SECONDS,
            default_item_status=TransactionStatusInQueue.normal)
        self.blockchain = BlockChain(channel_name, store_id, self)
        self.__peer_type = None
        self.__consensus_algorithm = None
        self.candidate_blocks = CandidateBlocks(self.blockchain)
        self.__block_height_sync_bad_targets = {}
        self.__block_height_sync_lock = threading.Lock()
        self.__block_height_thread_pool: ThreadPoolExecutor = ThreadPoolExecutor(
            1, 'BlockHeightSyncThread')
        self.__block_height_future: Future = None
        self.set_peer_type(loopchain_pb2.PEER)
        self.__service_status = status_code.Service.online

        # old_block_hashes[height][new_block_hash] = old_block_hash
        self.__old_block_hashes: DefaultDict[int,
                                             Dict[Hash32,
                                                  Hash32]] = defaultdict(dict)
        self.epoch: Epoch = None

    @property
    def channel_name(self):
        return self.__channel_name

    @property
    def service_status(self):
        # Return string for compatibility.
        if self.__service_status >= 0:
            return "Service is online: " + \
                   str(1 if self.__channel_service.state_machine.state == "BlockGenerate" else 0)
        else:
            return "Service is offline: " + status_code.get_status_reason(
                self.__service_status)

    def update_service_status(self, status):
        self.__service_status = status

    @property
    def peer_type(self):
        return self.__peer_type

    @property
    def consensus_algorithm(self):
        return self.__consensus_algorithm

    def set_peer_type(self, peer_type):
        self.__peer_type = peer_type

    def set_old_block_hash(self, block_height: int, new_block_hash: Hash32,
                           old_block_hash: Hash32):
        self.__old_block_hashes[block_height][new_block_hash] = old_block_hash

    def get_old_block_hash(self, block_height: int, new_block_hash: Hash32):
        return self.__old_block_hashes[block_height][new_block_hash]

    def pop_old_block_hashes(self, block_height: int):
        self.__old_block_hashes.pop(block_height)

    def get_total_tx(self):
        """
        블럭체인의 Transaction total 리턴합니다.

        :return: 블럭체인안의 transaction total count
        """
        return self.blockchain.total_tx

    def broadcast_send_unconfirmed_block(self, block_: Block, round_: int):
        """broadcast unconfirmed block for getting votes form reps
        """
        last_block: Block = self.blockchain.last_block
        if (self.__channel_service.state_machine.state != "BlockGenerate"
                and last_block.header.height > block_.header.height):
            util.logger.debug(
                f"Last block has reached a sufficient height. Broadcast will stop! ({block_.header.hash.hex()})"
            )
            ConsensusSiever.stop_broadcast_send_unconfirmed_block_timer()
            return

        if last_block.header.revealed_next_reps_hash:
            if block_.header.is_unrecorded:
                self._send_unconfirmed_block(block_,
                                             last_block.header.reps_hash,
                                             round_)
            else:
                self._send_unconfirmed_block(block_, block_.header.reps_hash,
                                             round_)
        else:
            self._send_unconfirmed_block(block_,
                                         ChannelProperty().crep_root_hash,
                                         round_)

    def _send_unconfirmed_block(self, block_: Block, target_reps_hash,
                                round_: int):
        util.logger.debug(
            f"BroadCast AnnounceUnconfirmedBlock "
            f"height({block_.header.height}) round({round_}) block({block_.header.hash}) peers: "
            f"target_reps_hash({target_reps_hash})")

        block_dumped = self.blockchain.block_dumps(block_)
        ObjectManager().channel_service.broadcast_scheduler.schedule_broadcast(
            "AnnounceUnconfirmedBlock",
            loopchain_pb2.BlockSend(block=block_dumped,
                                    round_=round_,
                                    channel=self.__channel_name),
            reps_hash=target_reps_hash)

    def add_tx_obj(self, tx):
        """전송 받은 tx 를 Block 생성을 위해서 큐에 입력한다. load 하지 않은 채 입력한다.

        :param tx: transaction object
        """
        self.__tx_queue[tx.hash.hex()] = tx

    def get_tx(self, tx_hash) -> Transaction:
        """Get transaction from block_db by tx_hash

        :param tx_hash: tx hash
        :return: tx object or None
        """
        return self.blockchain.find_tx_by_key(tx_hash)

    def get_tx_info(self, tx_hash) -> dict:
        """Get transaction info from block_db by tx_hash

        :param tx_hash: tx hash
        :return: {'block_hash': "", 'block_height': "", "transaction": "", "result": {"code": ""}}
        """
        return self.blockchain.find_tx_info(tx_hash)

    def get_invoke_result(self, tx_hash):
        """ get invoke result by tx

        :param tx_hash:
        :return:
        """
        return self.blockchain.find_invoke_result_by_tx_hash(tx_hash)

    def get_tx_queue(self):
        return self.__tx_queue

    def get_count_of_unconfirmed_tx(self):
        """BlockManager 의 상태를 확인하기 위하여 현재 입력된 unconfirmed_tx 의 카운트를 구한다.

        :return: 현재 입력된 unconfirmed tx 의 갯수
        """
        return len(self.__tx_queue)

    async def relay_all_txs(self):
        rs_client = ObjectManager().channel_service.rs_client
        if not rs_client:
            return

        items = list(self.__tx_queue.d.values())
        self.__tx_queue.d.clear()

        for item in items:
            tx = item.value
            if not util.is_in_time_boundary(tx.timestamp,
                                            conf.TIMESTAMP_BOUNDARY_SECOND,
                                            util.get_now_time_stamp()):
                continue

            ts = TransactionSerializer.new(tx.version, tx.type(),
                                           self.blockchain.tx_versioner)
            if tx.version == v2.version:
                rest_method = RestMethod.SendTransaction2
            elif tx.version == v3.version:
                rest_method = RestMethod.SendTransaction3
            else:
                continue

            raw_data = ts.to_raw_data(tx)
            raw_data["from_"] = raw_data.pop("from")
            for i in range(conf.RELAY_RETRY_TIMES):
                try:
                    await rs_client.call_async(
                        rest_method, rest_method.value.params(**raw_data))
                except Exception as e:
                    util.logger.warning(f"Relay failed. Tx({tx}), {e}")
                else:
                    break

    def restore_tx_status(self, tx: Transaction):
        util.logger.debug(f"restore_tx_status() tx : {tx}")
        self.__tx_queue.set_item_status(tx.hash.hex(),
                                        TransactionStatusInQueue.normal)

    def __validate_duplication_of_unconfirmed_block(self,
                                                    unconfirmed_block: Block):
        if self.blockchain.last_block.header.height >= unconfirmed_block.header.height:
            raise InvalidUnconfirmedBlock(
                "The unconfirmed block has height already added.")

        try:
            candidate_block = self.candidate_blocks.blocks[
                unconfirmed_block.header.hash].block
        except KeyError:
            # When an unconfirmed block confirmed previous block, the block become last unconfirmed block,
            # But if the block is failed to verify, the block doesn't be added into candidate block.
            candidate_block: Block = self.blockchain.last_unconfirmed_block

        if candidate_block is None or unconfirmed_block.header.hash != candidate_block.header.hash:
            return

        raise DuplicationUnconfirmedBlock(
            "Unconfirmed block has already been added.")

    def __validate_epoch_of_unconfirmed_block(self, unconfirmed_block: Block,
                                              round_: int):
        current_state = self.__channel_service.state_machine.state
        block_header = unconfirmed_block.header
        last_u_block = self.blockchain.last_unconfirmed_block

        if self.epoch.height == block_header.height and self.epoch.round < round_:
            raise InvalidUnconfirmedBlock(
                f"The unconfirmed block has invalid round. Expected({self.epoch.round}), Unconfirmed_block({round_})"
            )

        if not self.epoch.complained_result:
            if last_u_block and (last_u_block.header.hash == block_header.hash
                                 or last_u_block.header.prep_changed):
                # TODO do not validate epoch in this case.
                expected_leader = block_header.peer_id.hex_hx()
            else:
                expected_leader = self.epoch.leader_id

            if expected_leader != block_header.peer_id.hex_hx():
                raise UnexpectedLeader(
                    f"The unconfirmed block({block_header.hash}) is made by an unexpected leader. "
                    f"Expected({expected_leader}), Unconfirmed_block({block_header.peer_id.hex_hx()})"
                )

        if current_state == 'LeaderComplain' and self.epoch.leader_id == block_header.peer_id.hex_hx(
        ):
            raise InvalidUnconfirmedBlock(
                f"The unconfirmed block is made by complained leader.\n{block_header})"
            )

    def add_unconfirmed_block(self, unconfirmed_block: Block, round_: int):
        """

        :param unconfirmed_block:
        :param round_:
        :return:
        """
        self.__validate_epoch_of_unconfirmed_block(unconfirmed_block, round_)
        self.__validate_duplication_of_unconfirmed_block(unconfirmed_block)

        last_unconfirmed_block: Block = self.blockchain.last_unconfirmed_block

        # TODO After the v0.4 update, remove this version parsing.
        if parse_version(
                unconfirmed_block.header.version) >= parse_version("0.4"):
            ratio = conf.VOTING_RATIO
        else:
            ratio = conf.LEADER_COMPLAIN_RATIO

        if unconfirmed_block.header.reps_hash:
            reps = self.blockchain.find_preps_addresses_by_roothash(
                unconfirmed_block.header.reps_hash)
            version = self.blockchain.block_versioner.get_version(
                unconfirmed_block.header.height)
            leader_votes = Votes.get_leader_votes_class(version)(
                reps, ratio, unconfirmed_block.header.height, None,
                unconfirmed_block.body.leader_votes)
            need_to_confirm = leader_votes.get_result() is None
        elif unconfirmed_block.body.confirm_prev_block:
            need_to_confirm = True
        else:
            need_to_confirm = False

        try:
            if need_to_confirm:
                self.blockchain.confirm_prev_block(unconfirmed_block)
                if unconfirmed_block.header.is_unrecorded:
                    self.blockchain.last_unconfirmed_block = None
                    raise UnrecordedBlock("It's an unnecessary block to vote.")
            elif last_unconfirmed_block is None:
                if self.blockchain.last_block.header.hash != unconfirmed_block.header.prev_hash:
                    raise BlockchainError(
                        f"last block is not previous block. block={unconfirmed_block}"
                    )

                self.blockchain.last_unconfirmed_block = unconfirmed_block
        except BlockchainError as e:
            util.logger.warning(
                f"BlockchainError while confirm_block({e}), retry block_height_sync"
            )
            self.__channel_service.state_machine.block_sync()
            raise InvalidUnconfirmedBlock(e)

    def add_confirmed_block(self, confirmed_block: Block, confirm_info=None):
        if self.__channel_service.state_machine.state != "Watch":
            util.logger.info(
                f"Can't add confirmed block if state is not Watch. {confirmed_block.header.hash.hex()}"
            )
            return

        self.blockchain.add_block(confirmed_block, confirm_info=confirm_info)

    def rebuild_block(self):
        self.blockchain.rebuild_transaction_count()
        self.blockchain.rebuild_made_block_count()
        self.new_epoch()

        nid = self.blockchain.find_nid()
        if nid is None:
            genesis_block = self.blockchain.find_block_by_height(0)
            self.__rebuild_nid(genesis_block)
        else:
            ChannelProperty().nid = nid

    def __rebuild_nid(self, block: Block):
        nid = NID.unknown.value
        if block.header.hash.hex() == BlockManager.MAINNET:
            nid = NID.mainnet.value
        elif block.header.hash.hex() == BlockManager.TESTNET:
            nid = NID.testnet.value
        elif len(block.body.transactions) > 0:
            tx = next(iter(block.body.transactions.values()))
            nid = tx.nid
            if nid is None:
                nid = NID.unknown.value

        if isinstance(nid, int):
            nid = hex(nid)

        self.blockchain.put_nid(nid)
        ChannelProperty().nid = nid

    def block_height_sync(self):
        def _print_exception(fut):
            exc = fut.exception()
            if exc:
                traceback.print_exception(type(exc), exc, exc.__traceback__)

        with self.__block_height_sync_lock:
            need_to_sync = (self.__block_height_future is None
                            or self.__block_height_future.done())

            if need_to_sync:
                self.__channel_service.stop_leader_complain_timer()
                self.__block_height_future = self.__block_height_thread_pool.submit(
                    self.__block_height_sync)
                self.__block_height_future.add_done_callback(_print_exception)
            else:
                util.logger.warning(
                    'Tried block_height_sync. But failed. The thread is already running'
                )

            return need_to_sync, self.__block_height_future

    def __block_request(self, peer_stub, block_height):
        """request block by gRPC or REST

        :param peer_stub:
        :param block_height:
        :return block, max_block_height, confirm_info, response_code
        """
        if ObjectManager().channel_service.is_support_node_function(
                conf.NodeFunction.Vote):
            return self.__block_request_by_voter(block_height, peer_stub)
        else:
            # request REST(json-rpc) way to RS peer
            return self.__block_request_by_citizen(block_height)

    def __block_request_by_voter(self, block_height, peer_stub):
        response = peer_stub.BlockSync(
            loopchain_pb2.BlockSyncRequest(block_height=block_height,
                                           channel=self.__channel_name),
            conf.GRPC_TIMEOUT)

        if response.response_code == message_code.Response.fail_no_confirm_info:
            raise NoConfirmInfo(
                f"The peer has not confirm_info of the block by height({block_height})."
            )
        else:
            try:
                block = self.blockchain.block_loads(response.block)
            except Exception as e:
                traceback.print_exc()
                raise exception.BlockError(
                    f"Received block is invalid: original exception={e}")

            votes_dumped: bytes = response.confirm_info
            try:
                votes_serialized = json.loads(votes_dumped)
                version = self.blockchain.block_versioner.get_version(
                    block_height)
                votes = Votes.get_block_votes_class(version).deserialize_votes(
                    votes_serialized)
            except json.JSONDecodeError:
                votes = votes_dumped

        return block, response.max_block_height, response.unconfirmed_block_height, votes, response.response_code

    def __block_request_by_citizen(self, block_height):
        rs_client = ObjectManager().channel_service.rs_client
        get_block_result = rs_client.call(
            RestMethod.GetBlockByHeight,
            RestMethod.GetBlockByHeight.value.params(height=str(block_height)))
        last_block = rs_client.call(RestMethod.GetLastBlock)
        if not last_block:
            raise exception.InvalidBlockSyncTarget(
                "The Radiostation may not be ready. It will retry after a while."
            )

        max_height = self.blockchain.block_versioner.get_height(last_block)
        block_version = self.blockchain.block_versioner.get_version(
            block_height)
        block_serializer = BlockSerializer.new(block_version,
                                               self.blockchain.tx_versioner)
        block = block_serializer.deserialize(get_block_result['block'])
        votes_dumped: str = get_block_result.get('confirm_info', '')
        try:
            votes_serialized = json.loads(votes_dumped)
            version = self.blockchain.block_versioner.get_version(block_height)
            votes = Votes.get_block_votes_class(version).deserialize_votes(
                votes_serialized)
        except json.JSONDecodeError:
            votes = votes_dumped
        return block, max_height, -1, votes, message_code.Response.success

    def __start_block_height_sync_timer(self, is_run_at_start=False):
        timer_key = TimerService.TIMER_KEY_BLOCK_HEIGHT_SYNC
        timer_service: TimerService = self.__channel_service.timer_service

        if timer_key not in timer_service.timer_list:
            util.logger.spam(
                f"add timer for block_request_call to radiostation...")
            timer_service.add_timer(
                timer_key,
                Timer(target=timer_key,
                      duration=conf.GET_LAST_BLOCK_TIMER,
                      callback=self.block_height_sync,
                      is_repeat=True,
                      is_run_at_start=is_run_at_start))

    def stop_block_height_sync_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_HEIGHT_SYNC
        timer_service: TimerService = self.__channel_service.timer_service
        if timer_key in timer_service.timer_list:
            timer_service.stop_timer(timer_key)

    def start_block_generate_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_GENERATE
        timer_service: TimerService = self.__channel_service.timer_service

        if timer_key not in timer_service.timer_list:
            if self.__consensus_algorithm:
                self.__consensus_algorithm.stop()

        self.__consensus_algorithm = ConsensusSiever(self)
        self.__consensus_algorithm.start_timer(timer_service)

    def stop_block_generate_timer(self):
        if self.__consensus_algorithm:
            self.__consensus_algorithm.stop()

    def __add_block_by_sync(self, block_, confirm_info=None):
        util.logger.debug(
            f"__add_block_by_sync :: height({block_.header.height}) hash({block_.header.hash})"
        )

        block_version = self.blockchain.block_versioner.get_version(
            block_.header.height)
        block_verifier = BlockVerifier.new(block_version,
                                           self.blockchain.tx_versioner,
                                           raise_exceptions=False)
        block_verifier.invoke_func = self.blockchain.get_invoke_func(
            block_.header.height)

        reps_getter = self.blockchain.find_preps_addresses_by_roothash
        block_verifier.verify_loosely(block_,
                                      self.blockchain.last_block,
                                      self.blockchain,
                                      reps_getter=reps_getter)
        need_to_write_tx_info, need_to_score_invoke = True, True
        for exc in block_verifier.exceptions:
            if isinstance(exc, TransactionDuplicatedHashError):
                need_to_write_tx_info = False
            if isinstance(exc, ScoreInvokeError) and not need_to_write_tx_info:
                need_to_score_invoke = False

        exc = next((exc for exc in block_verifier.exceptions
                    if not isinstance(exc, TransactionDuplicatedHashError)),
                   None)
        if exc:
            if isinstance(exc, ScoreInvokeError) and not need_to_score_invoke:
                pass
            else:
                raise exc

        if parse_version(block_.header.version) >= parse_version("0.3"):
            reps = reps_getter(block_.header.reps_hash)
            round_ = next(vote for vote in confirm_info if vote).round
            votes = Votes.get_block_votes_class(block_.header.version)(
                reps, conf.VOTING_RATIO, block_.header.height, round_,
                block_.header.hash, confirm_info)
            votes.verify()
        return self.blockchain.add_block(block_, confirm_info,
                                         need_to_write_tx_info,
                                         need_to_score_invoke)

    def __confirm_prev_block_by_sync(self, block_):
        prev_block = self.blockchain.last_unconfirmed_block
        confirm_info = block_.body.confirm_prev_block

        util.logger.debug(
            f"confirm_prev_block_by_sync :: height({prev_block.header.height})"
        )

        block_version = self.blockchain.block_versioner.get_version(
            prev_block.header.height)
        block_verifier = BlockVerifier.new(block_version,
                                           self.blockchain.tx_versioner)
        block_verifier.invoke_func = self.blockchain.get_invoke_func(
            prev_block.header.height)

        reps_getter = self.blockchain.find_preps_addresses_by_roothash
        block_verifier.verify_loosely(prev_block,
                                      self.blockchain.last_block,
                                      self.blockchain,
                                      reps_getter=reps_getter)
        return self.blockchain.add_block(prev_block, confirm_info)

    def __block_request_to_peers_in_sync(self, peer_stubs, my_height,
                                         unconfirmed_block_height, max_height):
        """Extracted func from __block_height_sync.
        It has block request loop with peer_stubs for block height sync.

        :param peer_stubs:
        :param my_height:
        :param unconfirmed_block_height:
        :param max_height:
        :return: my_height, max_height
        """
        peer_index = 0

        while max_height > my_height:
            if self.__channel_service.state_machine.state != 'BlockSync':
                break

            peer_target, peer_stub = peer_stubs[peer_index]
            util.logger.info(
                f"Block Height Sync Target : {peer_target} / request height({my_height + 1})"
            )
            try:
                block, max_block_height, current_unconfirmed_block_height, confirm_info, response_code = \
                    self.__block_request(peer_stub, my_height + 1)
            except NoConfirmInfo as e:
                util.logger.warning(f"{e}")
                response_code = message_code.Response.fail_no_confirm_info
            except Exception as e:
                util.logger.warning(
                    f"There is a bad peer, I hate you: {type(e), e}")
                traceback.print_exc()
                response_code = message_code.Response.fail

            if response_code == message_code.Response.success:
                util.logger.debug(
                    f"try add block height: {block.header.height}")

                max_block_height = max(max_block_height,
                                       current_unconfirmed_block_height)
                if max_block_height > max_height:
                    util.logger.spam(
                        f"set max_height :{max_height} -> {max_block_height}")
                    max_height = max_block_height
                    if current_unconfirmed_block_height == max_block_height:
                        unconfirmed_block_height = current_unconfirmed_block_height

                try:
                    if (max_height == unconfirmed_block_height ==
                            block.header.height and max_height > 0
                            and not confirm_info):
                        self.candidate_blocks.add_block(
                            block,
                            self.blockchain.find_preps_addresses_by_header(
                                block.header))
                        self.blockchain.last_unconfirmed_block = block
                    else:
                        self.__add_block_by_sync(block, confirm_info)

                    if block.header.height == 0:
                        self.__rebuild_nid(block)
                    elif self.blockchain.find_nid() is None:
                        genesis_block = self.blockchain.find_block_by_height(0)
                        self.__rebuild_nid(genesis_block)

                except KeyError as e:
                    util.logger.error(
                        f"{type(e)} during block height sync: {e, e.__traceback__}"
                    )
                    raise
                except exception.BlockError:
                    util.exit_and_msg(
                        "Block Error Clear all block and restart peer.")
                    raise
                except Exception as e:
                    util.logger.warning(
                        f"fail block height sync: {type(e), e}")

                    if self.blockchain.last_block.header.hash != block.header.prev_hash:
                        raise exception.PreviousBlockMismatch
                    else:
                        self.__block_height_sync_bad_targets[
                            peer_target] = max_block_height
                        raise
                else:
                    peer_index = (peer_index + 1) % len(peer_stubs)
                    my_height += 1
            else:
                if len(peer_stubs) == 1:
                    raise ConnectionError

                peer_index = (peer_index + 1) % len(peer_stubs)

        return my_height, max_height

    def request_rollback(self) -> bool:
        """Request block data rollback behind to 1 block

        :return: if rollback success return True, else return False
        """
        target_block = self.blockchain.find_block_by_hash32(
            self.blockchain.last_block.header.prev_hash)
        if not self.blockchain.check_rollback_possible(target_block):
            util.logger.warning(
                f"request_rollback() The request cannot be "
                f"rolled back to the target block({target_block}).")
            return False

        request_origin = {
            'blockHeight': target_block.header.height,
            'blockHash': target_block.header.hash.hex_0x()
        }

        request = convert_params(request_origin, ParamType.roll_back)
        stub = StubCollection().icon_score_stubs[ChannelProperty().name]

        util.logger.debug(f"request_roll_back() Rollback request({request})")
        response: dict = cast(dict, stub.sync_task().rollback(request))
        try:
            response_to_json_query(response)
        except GenericJsonRpcServerError as e:
            util.logger.warning(f"request_rollback() response error = {e}")
        else:
            result_height = response.get("blockHeight")
            if hex(target_block.header.height) == result_height:
                util.logger.info(
                    f"request_rollback() Rollback Success. result height = {result_height}"
                )
                self.blockchain.rollback(target_block)
                self.rebuild_block()
                return True

        util.logger.warning(
            f"request_rollback() Rollback Fail. response = {response}")
        return False

    def __block_height_sync(self):
        # Make Peer Stub List [peer_stub, ...] and get max_height of network
        try:
            max_height, unconfirmed_block_height, peer_stubs = self.__get_peer_stub_list(
            )

            if self.blockchain.last_unconfirmed_block is not None:
                self.candidate_blocks.remove_block(
                    self.blockchain.last_unconfirmed_block.header.hash)
            self.blockchain.last_unconfirmed_block = None

            my_height = self.blockchain.block_height
            util.logger.debug(
                f"in __block_height_sync max_height({max_height}), my_height({my_height})"
            )

            # prevent_next_block_mismatch until last_block_height in block DB.
            # (excludes last_unconfirmed_block_height)
            self.blockchain.prevent_next_block_mismatch(
                self.blockchain.block_height + 1)
            self.__block_request_to_peers_in_sync(peer_stubs, my_height,
                                                  unconfirmed_block_height,
                                                  max_height)
        except exception.PreviousBlockMismatch as e:
            util.logger.warning(
                f"There is a previous block hash mismatch! :: {type(e)}, {e}")
            self.request_rollback()
            self.__start_block_height_sync_timer(is_run_at_start=True)
        except Exception as e:
            util.logger.warning(
                f"exception during block_height_sync :: {type(e)}, {e}")
            traceback.print_exc()
            self.__start_block_height_sync_timer()
        else:
            util.logger.debug(f"block_height_sync is complete.")
            self.__channel_service.state_machine.complete_sync()

    def get_next_leader(self) -> Optional[str]:
        """get next leader from last_block of BlockChain. for new_epoch and set_peer_type_in_channel

        :return:
        """

        block = self.blockchain.last_block

        if block.header.prep_changed_reason is NextRepsChangeReason.TermEnd:
            next_leader = self.blockchain.get_first_leader_of_next_reps(block)
        elif self.blockchain.made_block_count_reached_max(block):
            reps_hash = block.header.revealed_next_reps_hash or ChannelProperty(
            ).crep_root_hash
            reps = self.blockchain.find_preps_addresses_by_roothash(reps_hash)
            next_leader = self.blockchain.get_next_rep_string_in_reps(
                block.header.peer_id, reps)

            if next_leader is None:
                next_leader = self.__get_next_leader_by_block(block)
        else:
            next_leader = self.__get_next_leader_by_block(block)

        util.logger.spam(
            f"next_leader({next_leader}) from block({block.header.height})")
        return next_leader

    def __get_next_leader_by_block(self, block: Block) -> str:
        if block.header.next_leader is None:
            if block.header.peer_id:
                return block.header.peer_id.hex_hx()
            else:
                return ExternalAddress.empty().hex_hx()
        else:
            return block.header.next_leader.hex_hx()

    def __get_peer_stub_list(self) -> Tuple[int, int, List[Tuple]]:
        """It updates peer list for block manager refer to peer list on the loopchain network.
        This peer list is not same to the peer list of the loopchain network.

        :return max_height: a height of current blockchain
        :return unconfirmed_block_height: unconfirmed_block_height on the network
        :return peer_stubs: current peer list on the network (target, peer_stub)
        """
        max_height = -1  # current max height
        unconfirmed_block_height = -1
        peer_stubs = []  # peer stub list for block height synchronization

        if not ObjectManager().channel_service.is_support_node_function(
                conf.NodeFunction.Vote):
            rs_client = ObjectManager().channel_service.rs_client
            status_response = rs_client.call(RestMethod.Status)
            max_height = status_response['block_height']
            peer_stubs.append((rs_client.target, rs_client))
            return max_height, unconfirmed_block_height, peer_stubs

        # Make Peer Stub List [peer_stub, ...] and get max_height of network
        self.__block_height_sync_bad_targets = {
            k: v
            for k, v in self.__block_height_sync_bad_targets.items()
            if v > self.blockchain.block_height
        }
        util.logger.info(
            f"Bad Block Sync Peer : {self.__block_height_sync_bad_targets}")
        peer_target = ChannelProperty().peer_target
        my_height = self.blockchain.block_height

        if self.blockchain.last_block:
            reps_hash = self.blockchain.get_reps_hash_by_header(
                self.blockchain.last_block.header)
        else:
            reps_hash = ChannelProperty().crep_root_hash
        rep_targets = self.blockchain.find_preps_targets_by_roothash(reps_hash)
        target_list = list(rep_targets.values())
        for target in target_list:
            if target == peer_target:
                continue
            if target in self.__block_height_sync_bad_targets:
                continue
            util.logger.debug(f"try to target({target})")
            channel = GRPCHelper().create_client_channel(target)
            stub = loopchain_pb2_grpc.PeerServiceStub(channel)
            try:
                response = stub.GetStatus(
                    loopchain_pb2.StatusRequest(
                        request='block_sync',
                        channel=self.__channel_name,
                    ), conf.GRPC_TIMEOUT_SHORT)
                target_block_height = max(response.block_height,
                                          response.unconfirmed_block_height)

                if target_block_height > my_height:
                    peer_stubs.append((target, stub))
                    max_height = max(max_height, target_block_height)
                    unconfirmed_block_height = max(
                        unconfirmed_block_height,
                        response.unconfirmed_block_height)

            except Exception as e:
                util.logger.warning(
                    f"This peer has already been removed from the block height target node. {e}"
                )

        return max_height, unconfirmed_block_height, peer_stubs

    def new_epoch(self):
        new_leader_id = self.get_next_leader()
        self.epoch = Epoch(self, new_leader_id)
        util.logger.info(
            f"Epoch height({self.epoch.height}), leader ({self.epoch.leader_id})"
        )

    def stop(self):
        self.__block_height_thread_pool.shutdown()

        if self.consensus_algorithm:
            self.consensus_algorithm.stop()

        # close store(aka. leveldb) after cleanup all threads
        # because hard crashes may occur.
        # https://plyvel.readthedocs.io/en/latest/api.html#DB.close
        self.blockchain.close_blockchain_store()

    def add_complain(self, vote: LeaderVote):
        util.logger.spam(f"add_complain vote({vote})")

        if not self.epoch:
            util.logger.debug(f"Epoch is not initialized.")
            return

        if self.epoch.height == vote.block_height:
            if self.epoch.round == vote.round:
                self.epoch.add_complain(vote)
                elected_leader = self.epoch.complain_result()
                if elected_leader:
                    self.__channel_service.reset_leader(elected_leader,
                                                        complained=True)
            elif self.epoch.round > vote.round:
                if vote.new_leader != ExternalAddress.empty():
                    self.__send_fail_leader_vote(vote)
                else:
                    return
            else:
                # TODO: do round sync
                return
        elif self.epoch.height < vote.block_height:
            self.__channel_service.state_machine.block_sync()

    def __send_fail_leader_vote(self, leader_vote: LeaderVote):
        version = self.blockchain.block_versioner.get_version(
            leader_vote.block_height)
        fail_vote = Vote.get_leader_vote_class(version).new(
            signer=ChannelProperty().peer_auth,
            block_height=leader_vote.block_height,
            round_=leader_vote.round,
            old_leader=leader_vote.old_leader,
            new_leader=ExternalAddress.empty(),
            timestamp=util.get_time_stamp())

        fail_vote_dumped = json.dumps(fail_vote.serialize())
        request = loopchain_pb2.ComplainLeaderRequest(
            complain_vote=fail_vote_dumped, channel=self.channel_name)

        reps_hash = self.blockchain.last_block.header.revealed_next_reps_hash or ChannelProperty(
        ).crep_root_hash
        rep_id = leader_vote.rep.hex_hx()
        target = self.blockchain.find_preps_targets_by_roothash(
            reps_hash)[rep_id]

        util.logger.debug(f"fail leader complain "
                          f"complained_leader_id({leader_vote.old_leader}), "
                          f"new_leader_id({ExternalAddress.empty()}),"
                          f"round({leader_vote.round}),"
                          f"target({target})")

        self.__channel_service.broadcast_scheduler.schedule_send_failed_leader_complain(
            "ComplainLeader", request, target=target)

    def get_leader_ids_for_complaint(self) -> Tuple[str, str]:
        """
        :return: Return complained_leader_id and new_leader_id for the Leader Complaint.
        """
        complained_leader_id = self.epoch.leader_id

        new_leader = self.blockchain.get_next_rep_in_reps(
            ExternalAddress.fromhex(complained_leader_id), self.epoch.reps)
        new_leader_id = new_leader.hex_hx() if new_leader else None

        if not isinstance(new_leader_id, str):
            new_leader_id = ""

        if not isinstance(complained_leader_id, str):
            complained_leader_id = ""

        return complained_leader_id, new_leader_id

    def leader_complain(self):
        complained_leader_id, new_leader_id = self.get_leader_ids_for_complaint(
        )
        version = self.blockchain.block_versioner.get_version(
            self.epoch.height)
        leader_vote = Vote.get_leader_vote_class(version).new(
            signer=ChannelProperty().peer_auth,
            block_height=self.epoch.height,
            round_=self.epoch.round,
            old_leader=ExternalAddress.fromhex_address(complained_leader_id),
            new_leader=ExternalAddress.fromhex_address(new_leader_id),
            timestamp=util.get_time_stamp())
        util.logger.info(
            f"LeaderVote : old_leader({complained_leader_id}), new_leader({new_leader_id}), round({self.epoch.round})"
        )
        self.add_complain(leader_vote)

        leader_vote_serialized = leader_vote.serialize()
        leader_vote_dumped = json.dumps(leader_vote_serialized)
        request = loopchain_pb2.ComplainLeaderRequest(
            complain_vote=leader_vote_dumped, channel=self.channel_name)

        util.logger.debug(f"leader complain "
                          f"complained_leader_id({complained_leader_id}), "
                          f"new_leader_id({new_leader_id})")

        reps_hash = self.blockchain.get_next_reps_hash_by_header(
            self.blockchain.last_block.header)
        self.__channel_service.broadcast_scheduler.schedule_broadcast(
            "ComplainLeader", request, reps_hash=reps_hash)

    def vote_unconfirmed_block(self, block: Block, round_: int, is_validated):
        util.logger.debug(
            f"vote_unconfirmed_block() ({block.header.height}/{block.header.hash}/{is_validated})"
        )
        vote = Vote.get_block_vote_class(block.header.version).new(
            signer=ChannelProperty().peer_auth,
            block_height=block.header.height,
            round_=round_,
            block_hash=block.header.hash if is_validated else Hash32.empty(),
            timestamp=util.get_time_stamp())
        self.candidate_blocks.add_vote(vote)

        vote_serialized = vote.serialize()
        vote_dumped = json.dumps(vote_serialized)
        block_vote = loopchain_pb2.BlockVote(vote=vote_dumped,
                                             channel=ChannelProperty().name)

        target_reps_hash = block.header.reps_hash or ChannelProperty(
        ).crep_root_hash

        self.__channel_service.broadcast_scheduler.schedule_broadcast(
            "VoteUnconfirmedBlock", block_vote, reps_hash=target_reps_hash)

        return vote

    def verify_confirm_info(self, unconfirmed_block: Block):
        unconfirmed_header = unconfirmed_block.header
        my_height = self.blockchain.block_height
        if my_height < (unconfirmed_header.height - 2):
            raise ConfirmInfoInvalidNeedBlockSync(
                f"trigger block sync: my_height({my_height}), "
                f"unconfirmed_block.header.height({unconfirmed_header.height})"
            )

        is_rep = ObjectManager().channel_service.is_support_node_function(
            conf.NodeFunction.Vote)
        if is_rep and my_height == unconfirmed_header.height - 2 and not self.blockchain.last_unconfirmed_block:
            raise ConfirmInfoInvalidNeedBlockSync(
                f"trigger block sync: my_height({my_height}), "
                f"unconfirmed_block.header.height({unconfirmed_header.height})"
            )

        # a block is already added that same height unconfirmed_block height
        if my_height >= unconfirmed_header.height:
            raise ConfirmInfoInvalidAddedBlock(
                f"block is already added my_height({my_height}), "
                f"unconfirmed_block.header.height({unconfirmed_header.height})"
            )

        block_verifier = BlockVerifier.new(unconfirmed_header.version,
                                           self.blockchain.tx_versioner)
        prev_block = self.blockchain.get_prev_block(unconfirmed_block)
        reps_getter = self.blockchain.find_preps_addresses_by_roothash

        util.logger.spam(
            f"prev_block: {prev_block.header.hash if prev_block else None}")
        if not prev_block:
            raise NotReadyToConfirmInfo(
                "There is no prev block or not ready to confirm block (Maybe node is starting)"
            )

        try:
            if prev_block and prev_block.header.reps_hash and unconfirmed_header.height > 1:
                prev_reps = reps_getter(prev_block.header.reps_hash)
                block_verifier.verify_prev_votes(unconfirmed_block, prev_reps)
        except Exception as e:
            util.logger.warning(e)
            traceback.print_exc()
            raise ConfirmInfoInvalid(
                "Unconfirmed block has no valid confirm info for previous block"
            )

    async def _vote(self, unconfirmed_block: Block, round_: int):
        exc = None
        try:
            block_version = self.blockchain.block_versioner.get_version(
                unconfirmed_block.header.height)
            block_verifier = BlockVerifier.new(block_version,
                                               self.blockchain.tx_versioner)
            block_verifier.invoke_func = self.blockchain.score_invoke
            reps_getter = self.blockchain.find_preps_addresses_by_roothash

            util.logger.debug(
                f"unconfirmed_block.header({unconfirmed_block.header})")

            block_verifier.verify(
                unconfirmed_block,
                self.blockchain.last_block,
                self.blockchain,
                generator=self.blockchain.get_expected_generator(
                    unconfirmed_block),
                reps_getter=reps_getter)
        except NotInReps as e:
            util.logger.debug(
                f"in _vote Not In Reps({e}) state({self.__channel_service.state_machine.state})"
            )
        except BlockHeightMismatch as e:
            exc = e
            util.logger.warning(
                f"Don't vote to the block of unexpected height.\n{e}")
        except Exception as e:
            exc = e
            util.logger.error(e)
            traceback.print_exc()
        else:
            self.candidate_blocks.add_block(
                unconfirmed_block,
                self.blockchain.find_preps_addresses_by_header(
                    unconfirmed_block.header))
        finally:
            if isinstance(exc, BlockHeightMismatch):
                return

            is_validated = exc is None
            vote = self.vote_unconfirmed_block(unconfirmed_block, round_,
                                               is_validated)
            if self.__channel_service.state_machine.state == "BlockGenerate" and self.consensus_algorithm:
                self.consensus_algorithm.vote(vote)

    async def vote_as_peer(self, unconfirmed_block: Block, round_: int):
        """Vote to AnnounceUnconfirmedBlock
        """
        util.logger.debug(
            f"in vote_as_peer "
            f"height({unconfirmed_block.header.height}) "
            f"round({round_}) "
            f"unconfirmed_block({unconfirmed_block.header.hash.hex()})")

        try:
            self.add_unconfirmed_block(unconfirmed_block, round_)
        except InvalidUnconfirmedBlock as e:
            self.candidate_blocks.remove_block(unconfirmed_block.header.hash)
            util.logger.warning(e)
        except RoundMismatch as e:
            self.candidate_blocks.remove_block(
                unconfirmed_block.header.prev_hash)
            util.logger.warning(e)
        except UnrecordedBlock as e:
            util.logger.info(e)
        except DuplicationUnconfirmedBlock as e:
            util.logger.debug(e)
            await self._vote(unconfirmed_block, round_)
        else:
            await self._vote(unconfirmed_block, round_)
Ejemplo n.º 14
0
class BlockManager:
    """Manage the blockchain of a channel. It has objects for consensus and db object.
    """

    MAINNET = "cf43b3fd45981431a0e64f79d07bfcf703e064b73b802c5f32834eec72142190"
    TESTNET = "885b8021826f7e741be7f53bb95b48221e9ab263f377e997b2e47a7b8f4a2a8b"

    def __init__(self, name: str, channel_manager, peer_id, channel_name,
                 level_db_identity):
        self.__channel_service: ChannelService = channel_manager
        self.__channel_name = channel_name
        self.__pre_validate_strategy = self.__pre_validate
        self.__peer_id = peer_id
        self.__level_db = None
        self.__level_db_path = ""
        self.__level_db, self.__level_db_path = util.init_level_db(
            level_db_identity=f"{level_db_identity}_{channel_name}",
            allow_rename_path=False)
        self.__txQueue = AgingCache(
            max_age_seconds=conf.MAX_TX_QUEUE_AGING_SECONDS,
            default_item_status=TransactionStatusInQueue.normal)
        self.__unconfirmedBlockQueue = queue.Queue()
        self.__blockchain = BlockChain(self.__level_db, channel_name)
        self.__peer_type = None
        self.__consensus = None
        self.__consensus_algorithm = None
        self.candidate_blocks = CandidateBlocks()
        self.__block_height_sync_lock = threading.Lock()
        self.__block_height_thread_pool = ThreadPoolExecutor(
            1, 'BlockHeightSyncThread')
        self.__block_height_future: Future = None
        self.__subscribe_target_peer_stub = None
        self.__block_generation_scheduler = BlockGenerationScheduler(
            self.__channel_name)
        self.__precommit_block: Block = None
        self.set_peer_type(loopchain_pb2.PEER)
        self.name = name
        self.__service_status = status_code.Service.online

        self.epoch: Epoch = None

    @property
    def channel_name(self):
        return self.__channel_name

    @property
    def service_status(self):
        # Return string for compatibility.
        if self.__service_status >= 0:
            return "Service is online: " + \
                   str(1 if self.__channel_service.state_machine.state == "BlockGenerate" else 0)
        else:
            return "Service is offline: " + status_code.get_status_reason(
                self.__service_status)

    def init_epoch(self):
        """Call this after peer list update

        :return:
        """
        self.epoch = Epoch(self.__blockchain.last_block.header.height +
                           1 if self.__blockchain.last_block else 1)

    def update_service_status(self, status):
        self.__service_status = status
        StubCollection().peer_stub.sync_task().update_status(
            self.__channel_name, {"status": self.service_status})

    @property
    def peer_type(self):
        return self.__peer_type

    @property
    def made_block_count(self):
        if self.__consensus_algorithm:
            return self.__consensus_algorithm.made_block_count
        return 0

    @property
    def consensus(self):
        return self.__consensus

    @consensus.setter
    def consensus(self, consensus):
        self.__consensus = consensus

    @property
    def consensus_algorithm(self):
        return self.__consensus_algorithm

    @property
    def precommit_block(self):
        return self.__precommit_block

    @precommit_block.setter
    def precommit_block(self, block):
        self.__precommit_block = block

    @property
    def block_generation_scheduler(self):
        return self.__block_generation_scheduler

    @property
    def subscribe_target_peer_stub(self):
        return self.__subscribe_target_peer_stub

    def get_level_db(self):
        return self.__level_db

    def clear_all_blocks(self):
        logging.debug(f"clear level db({self.__level_db_path})")
        shutil.rmtree(self.__level_db_path)

    def set_peer_type(self, peer_type):
        self.__peer_type = peer_type

    async def __create_block_generation_schedule(self):
        # util.logger.spam(f"__create_block_generation_schedule:: CREATE BLOCK GENERATION SCHEDULE")
        if conf.CONSENSUS_ALGORITHM == conf.ConsensusAlgorithm.lft:
            Schedule = namedtuple("Schedule", "callback kwargs")
            schedule = Schedule(self.__consensus_algorithm.consensus, {})
            self.__block_generation_scheduler.add_schedule(schedule)
        else:
            await self.__consensus_algorithm.consensus()

    def set_invoke_results(self, block_hash, invoke_results):
        self.__blockchain.set_invoke_results(block_hash, invoke_results)

    def get_total_tx(self):
        """
        블럭체인의 Transaction total 리턴합니다.

        :return: 블럭체인안의 transaction total count
        """
        return self.__blockchain.total_tx

    def get_blockchain(self):
        return self.__blockchain

    def pre_validate(self, tx: Transaction):
        return self.__pre_validate_strategy(tx)

    def __pre_validate(self, tx: Transaction):
        if tx.hash.hex() in self.__txQueue:
            raise TransactionInvalidDuplicatedHash(tx.hash.hex())

        if not util.is_in_time_boundary(tx.timestamp,
                                        conf.ALLOW_TIMESTAMP_BOUNDARY_SECOND):
            raise TransactionInvalidOutOfTimeBound(tx.hash.hex(), tx.timestamp,
                                                   util.get_now_time_stamp())

    def __pre_validate_pass(self, tx: Transaction):
        pass

    def broadcast_send_unconfirmed_block(self, block_: Block):
        """생성된 unconfirmed block 을 피어들에게 broadcast 하여 검증을 요청한다.
        """
        if self.__channel_service.state_machine.state == "BlockGenerate":
            logging.debug(
                f"BroadCast AnnounceUnconfirmedBlock "
                f"height({block_.header.height}) block({block_.header.hash}) peers: "
                f"{ObjectManager().channel_service.peer_manager.get_peer_count()}"
            )

            # util.logger.spam(f'block_manager:zip_test num of tx is {block_.confirmed_tx_len}')
            block_dump = util.block_dumps(block_)

            ObjectManager(
            ).channel_service.broadcast_scheduler.schedule_broadcast(
                "AnnounceUnconfirmedBlock",
                loopchain_pb2.BlockSend(block=block_dump,
                                        channel=self.__channel_name))

    def add_tx_obj(self, tx):
        """전송 받은 tx 를 Block 생성을 위해서 큐에 입력한다. load 하지 않은 채 입력한다.

        :param tx: transaction object
        """
        self.__txQueue[tx.hash.hex()] = tx

    def get_tx(self, tx_hash) -> Transaction:
        """Get transaction from block_db by tx_hash

        :param tx_hash: tx hash
        :return: tx object or None
        """
        return self.__blockchain.find_tx_by_key(tx_hash)

    def get_tx_info(self, tx_hash) -> dict:
        """Get transaction info from block_db by tx_hash

        :param tx_hash: tx hash
        :return: {'block_hash': "", 'block_height': "", "transaction": "", "result": {"code": ""}}
        """
        return self.__blockchain.find_tx_info(tx_hash)

    def get_invoke_result(self, tx_hash):
        """ get invoke result by tx

        :param tx_hash:
        :return:
        """
        return self.__blockchain.find_invoke_result_by_tx_hash(tx_hash)

    def get_tx_queue(self):
        if conf.CONSENSUS_ALGORITHM == conf.ConsensusAlgorithm.lft:
            return self.__consensus.get_tx_queue()

        return self.__txQueue

    def get_count_of_unconfirmed_tx(self):
        """BlockManager 의 상태를 확인하기 위하여 현재 입력된 unconfirmed_tx 의 카운트를 구한다.

        :return: 현재 입력된 unconfirmed tx 의 갯수
        """
        return len(self.__txQueue)

    def confirm_prev_block(self, current_block: Block):
        try:
            self.__blockchain.confirm_prev_block(current_block)
        except BlockchainError as e:
            logging.warning(
                f"BlockchainError while confirm_block({e}), retry block_height_sync"
            )
            self.block_height_sync()

    def add_unconfirmed_block(self, unconfirmed_block):
        logging.info(
            f"unconfirmed_block {unconfirmed_block.header.height}, {unconfirmed_block.body.confirm_prev_block}"
        )
        # util.logger.debug(f"-------------------add_unconfirmed_block---before confirm_prev_block, "
        #                    f"tx count({len(unconfirmed_block.body.transactions)}), "
        #                    f"height({unconfirmed_block.header.height})")
        if unconfirmed_block.body.confirm_prev_block:
            self.confirm_prev_block(unconfirmed_block)

        self.epoch.set_epoch_leader(
            unconfirmed_block.header.next_leader.hex_hx())

        self.__unconfirmedBlockQueue.put(unconfirmed_block)

    def add_confirmed_block(self, confirmed_block: Block):
        result = self.__blockchain.add_block(confirmed_block)
        if not result:
            self.block_height_sync(target_peer_stub=ObjectManager().
                                   channel_service.radio_station_stub)

    # TODO The current block height sync message does not include voting.
    #  You need to change it and remove the default None parameter here.
    def add_block(self, block_: Block, vote_: Vote = None) -> bool:
        """

        :param block_: block to add
        :param vote_: additional info for this block, but It came from next block
        :return:
        """
        result = self.__blockchain.add_block(block_, vote_)

        last_block = self.__blockchain.last_block

        peer_id = ChannelProperty().peer_id
        util.apm_event(
            peer_id, {
                'event_type': 'TotalTx',
                'peer_id': peer_id,
                'peer_name': conf.PEER_NAME,
                'channel_name': self.__channel_name,
                'data': {
                    'block_hash': block_.header.hash.hex(),
                    'total_tx': self.__blockchain.total_tx
                }
            })

        return result

    def rebuild_block(self):
        self.__blockchain.rebuild_transaction_count()

        nid = self.get_blockchain().find_nid()
        if nid is None:
            genesis_block = self.get_blockchain().find_block_by_height(0)
            self.__rebuild_nid(genesis_block)
        else:
            ChannelProperty().nid = nid

    def __rebuild_nid(self, block: Block):
        nid = NID.unknown.value
        if block.header.hash.hex() == BlockManager.MAINNET:
            nid = NID.mainnet.value
        elif block.header.hash.hex() == BlockManager.TESTNET:
            nid = NID.testnet.value
        elif len(block.body.transactions) > 0:
            tx = next(iter(block.body.transactions.values()))
            nid = tx.nid
            if nid is None:
                nid = NID.unknown.value

        if isinstance(nid, int):
            nid = hex(16)

        self.get_blockchain().put_nid(nid)
        ChannelProperty().nid = nid

    def block_height_sync(self, target_peer_stub=None):
        with self.__block_height_sync_lock:
            need_to_sync = (self.__block_height_future is None
                            or self.__block_height_future.done())

            if need_to_sync:
                self.__block_height_future = self.__block_height_thread_pool.submit(
                    self.__block_height_sync, target_peer_stub)
            else:
                logging.warning(
                    'Tried block_height_sync. But failed. The thread is already running'
                )

            return need_to_sync, self.__block_height_future

    def __block_request(self, peer_stub, block_height):
        """request block by gRPC or REST

        :param peer_stub:
        :param block_height:
        :return block, max_block_height, response_code
        """
        if ObjectManager().channel_service.is_support_node_function(
                conf.NodeFunction.Vote):
            response = peer_stub.BlockSync(
                loopchain_pb2.BlockSyncRequest(block_height=block_height,
                                               channel=self.__channel_name),
                conf.GRPC_TIMEOUT)
            return util.block_loads(
                response.block
            ), response.max_block_height, response.response_code
        else:
            # request REST(json-rpc) way to radiostation (mother peer)
            return self.__block_request_by_citizen(
                block_height,
                ObjectManager().channel_service.radio_station_stub)

    def __block_request_by_citizen(self, block_height, rs_rest_stub):
        try:
            get_block_result = rs_rest_stub.call("GetBlockByHeight", {
                'channel': self.__channel_name,
                'height': str(block_height)
            })
            max_height_result = rs_rest_stub.call("Status")

            if max_height_result.status_code != 200:
                raise ConnectionError

            block_version = self.get_blockchain().block_versioner.get_version(
                block_height)
            block_serializer = BlockSerializer.new(
                block_version,
                self.get_blockchain().tx_versioner)
            block = block_serializer.deserialize(get_block_result['block'])

            return block, json.loads(
                max_height_result.text
            )['block_height'], message_code.Response.success

        except ReceivedErrorResponse as e:
            rs_rest_stub.update_methods_version()
            return self.__block_request_by_citizen(block_height, rs_rest_stub)

    def __precommit_block_request(self, peer_stub, last_block_height):
        """request precommit block by gRPC

        :param peer_stub:
        :param block_height:
        :return block, max_block_height, response_code
        """
        response = peer_stub.GetPrecommitBlock(
            loopchain_pb2.PrecommitBlockRequest(
                last_block_height=last_block_height,
                channel=self.__channel_name), conf.GRPC_TIMEOUT)

        if response.block == b"":
            return None, response.response_code, response.response_message
        else:
            precommit_block = pickle.loads(response.block)
            # util.logger.spam(
            #     f"GetPrecommitBlock:response::{response.response_code}/{response.response_message}/"
            #     f"{precommit_block}/{precommit_block.confirmed_transaction_list}")
            return precommit_block, response.response_code, response.response_message

    def __start_block_height_sync_timer(self, target_peer_stub):
        timer_key = TimerService.TIMER_KEY_BLOCK_HEIGHT_SYNC
        timer_service: TimerService = self.__channel_service.timer_service

        if timer_key not in timer_service.timer_list:
            util.logger.spam(
                f"add timer for block_request_call to radiostation...")
            timer_service.add_timer(
                timer_key,
                Timer(target=timer_key,
                      duration=conf.GET_LAST_BLOCK_TIMER,
                      is_repeat=True,
                      callback=self.block_height_sync,
                      callback_kwargs={'target_peer_stub': target_peer_stub}))

    def stop_block_height_sync_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_HEIGHT_SYNC
        timer_service: TimerService = self.__channel_service.timer_service
        if timer_key in timer_service.timer_list:
            timer_service.stop_timer(timer_key)

    def start_block_generate_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_GENERATE
        timer_service: TimerService = self.__channel_service.timer_service

        if timer_key not in timer_service.timer_list:
            if self.__consensus_algorithm:
                self.__consensus_algorithm.stop()

        self.__consensus_algorithm = ConsensusSiever(self)
        self.__consensus_algorithm.start_timer(timer_service)

    def stop_block_generate_timer(self):
        if self.__consensus_algorithm:
            self.__consensus_algorithm.stop()

    def __current_block_height(self):
        if self.__blockchain.last_unconfirmed_block and \
                self.__blockchain.last_unconfirmed_block.header.height == self.__blockchain.block_height + 1:
            return self.__blockchain.block_height + 1
        else:
            return self.__blockchain.block_height

    def __add_block_by_sync(self, block_):
        commit_state = block_.header.commit_state
        logging.debug(
            f"block_manager.py >> block_height_sync :: "
            f"height({block_.header.height}) commit_state({commit_state})")

        block_version = self.get_blockchain().block_versioner.get_version(
            block_.header.height)
        block_verifier = BlockVerifier.new(block_version,
                                           self.get_blockchain().tx_versioner)
        if block_.header.height == 0:
            block_verifier.invoke_func = self.__channel_service.genesis_invoke
        else:
            block_verifier.invoke_func = self.__channel_service.score_invoke
        invoke_results = block_verifier.verify_loosely(
            block_, self.__blockchain.last_block, self.__blockchain)
        self.__blockchain.set_invoke_results(block_.header.hash.hex(),
                                             invoke_results)
        return self.add_block(block_)

    def __block_height_sync(self, target_peer_stub=None, target_height=None):
        """synchronize block height with other peers"""
        channel_service = ObjectManager().channel_service
        peer_manager = channel_service.peer_manager

        if target_peer_stub is None:
            target_peer_stub = peer_manager.get_leader_stub_manager()
        self.__subscribe_target_peer_stub = target_peer_stub

        # The adjustment of block height and the process for data synchronization of peer
        # === Love&Hate Algorithm === #
        util.logger.debug("try block height sync...with love&hate")

        # Make Peer Stub List [peer_stub, ...] and get max_height of network
        # max_height: current max height
        # peer_stubs: peer stub list for block height synchronization
        max_height, peer_stubs = self.__get_peer_stub_list(target_peer_stub)
        if target_height is not None:
            max_height = target_height

        my_height = self.__current_block_height()
        retry_number = 0
        util.logger.spam(
            f"block_manager:block_height_sync my_height({my_height})")

        if len(peer_stubs) == 0:
            util.logger.warning(
                "peer_service:block_height_sync there is no other peer to height sync!"
            )
            return False

        logging.info(
            f"In block height sync max: {max_height} yours: {my_height}")

        self.get_blockchain().prevent_next_block_mismatch(
            self.__blockchain.block_height)

        try:
            while max_height > my_height:
                for peer_stub in peer_stubs:
                    response_code = message_code.Response.fail
                    try:
                        block, max_block_height, response_code = self.__block_request(
                            peer_stub, my_height + 1)
                    except Exception as e:
                        logging.warning("There is a bad peer, I hate you: " +
                                        str(e))
                        traceback.print_exc()

                    if response_code == message_code.Response.success:
                        logging.debug(
                            f"try add block height: {block.header.height}")

                        try:
                            result = False
                            if max_height > 0 and max_height == block.header.height:
                                self.candidate_blocks.add_block(block)
                                self.__blockchain.last_unconfirmed_block = block
                                result = True
                            else:
                                result = self.__add_block_by_sync(block)

                            if result:
                                if block.header.height == 0:
                                    self.__rebuild_nid(block)
                                elif self.__blockchain.find_nid() is None:
                                    genesis_block = self.get_blockchain(
                                    ).find_block_by_height(0)
                                    self.__rebuild_nid(genesis_block)

                        except KeyError as e:
                            result = False
                            logging.error("fail block height sync: " + str(e))
                            break
                        except exception.BlockError:
                            result = False
                            logging.error(
                                "Block Error Clear all block and restart peer."
                            )
                            self.clear_all_blocks()
                            util.exit_and_msg(
                                "Block Error Clear all block and restart peer."
                            )
                            break
                        finally:
                            if result:
                                my_height += 1
                                retry_number = 0
                            else:
                                retry_number += 1
                                logging.warning(
                                    f"Block height({my_height}) synchronization is fail. "
                                    f"{retry_number}/{conf.BLOCK_SYNC_RETRY_NUMBER}"
                                )
                                if retry_number >= conf.BLOCK_SYNC_RETRY_NUMBER:
                                    util.exit_and_msg(
                                        f"This peer already tried to synchronize {my_height} block "
                                        f"for max retry number({conf.BLOCK_SYNC_RETRY_NUMBER}). "
                                        f"Peer will be down.")

                        if target_height is None:
                            if max_block_height > max_height:
                                util.logger.spam(
                                    f"set max_height :{max_height} -> {max_block_height}"
                                )
                                max_height = max_block_height
                    else:
                        peer_stubs.remove(peer_stub)
                        logging.warning(
                            f"Not responding peer({peer_stub}) is removed from the peer stubs target."
                        )

                        if len(peer_stubs) < 1:
                            raise ConnectionError
        except Exception as e:
            logging.warning(f"block_manager.py >>> block_height_sync :: {e}")
            traceback.print_exc()
            self.__start_block_height_sync_timer(target_peer_stub)
            return False

        if my_height >= max_height:
            util.logger.debug(f"block_manager:block_height_sync is complete.")
            self.epoch.set_epoch_leader(
                self.__channel_service.peer_manager.get_leader_id(
                    conf.ALL_GROUP_ID))
            self.__channel_service.state_machine.subscribe_network()
        else:
            logging.warning(
                f"it's not completed block height synchronization in once ...\n"
                f"try block_height_sync again... my_height({my_height}) in channel({self.__channel_name})"
            )
            self.__channel_service.state_machine.block_sync()

        if conf.CONSENSUS_ALGORITHM == conf.ConsensusAlgorithm.lft \
                and channel_service.is_support_node_function(conf.NodeFunction.Vote):
            last_block = self.__blockchain.last_block
            precommit_block = None
            for peer_stub in peer_stubs:
                if peer_stub is not None:
                    precommit_block, response_code, response_message = \
                        self.__precommit_block_request(peer_stub, last_block.height)
                    util.logger.spam(
                        f"block_manager:block_height_sync::precommit_block("
                        f"{precommit_block if precommit_block else None})")
                    break

            if precommit_block:
                if last_block.height + 1 == precommit_block.height:
                    self.__blockchain.invoke_for_precommit(precommit_block)
                    self.__channel_service.score_write_precommit_state(
                        precommit_block)
                    self.__blockchain.put_precommit_block(precommit_block)
                    self.__precommit_block = precommit_block
                    self.consensus.leader_id = precommit_block.peer_id
                    self.consensus.precommit_block = None
                    util.logger.spam(
                        f"set precommit bock {self.__precommit_block.block_hash}/"
                        f"{self.__precommit_block.height} after block height synchronization."
                    )
                    self.__consensus.change_epoch(
                        prev_epoch=None,
                        precommit_block=self.__precommit_block)
                else:
                    util.logger.warning(
                        f"precommit block is weird, an expected block height is {last_block.height+1}, "
                        f"but it's {precommit_block.height}")

            else:
                util.logger.spam(
                    f"precommit bock is None after block height synchronization."
                )

        return True

    def __get_peer_stub_list(self, target_peer_stub=None):
        """It updates peer list for block manager refer to peer list on the loopchain network.
        This peer list is not same to the peer list of the loopchain network.

        :return max_height: a height of current blockchain
        :return peer_stubs: current peer list on the loopchain network
        """
        peer_target = ChannelProperty().peer_target
        peer_manager = ObjectManager().channel_service.peer_manager

        # Make Peer Stub List [peer_stub, ...] and get max_height of network
        max_height = -1  # current max height
        peer_stubs = []  # peer stub list for block height synchronization

        if ObjectManager().channel_service.is_support_node_function(
                conf.NodeFunction.Vote):
            target_dict = peer_manager.get_IP_of_peers_dict()
            target_list = [
                peer_target for peer_id, peer_target in target_dict.items()
                if peer_id != ChannelProperty().peer_id
            ]
        else:
            target_list = [f"{target_peer_stub.target}"]

        for target in target_list:
            if target != peer_target:
                logging.debug(f"try to target({target})")
                channel = GRPCHelper().create_client_channel(target)
                stub = loopchain_pb2_grpc.PeerServiceStub(channel)
                try:
                    if ObjectManager(
                    ).channel_service.is_support_node_function(
                            conf.NodeFunction.Vote):
                        response = stub.GetStatus(
                            loopchain_pb2.StatusRequest(
                                request="",
                                channel=self.__channel_name,
                            ), conf.GRPC_TIMEOUT_SHORT)
                    else:
                        response = target_peer_stub.call("Status")
                        util.logger.spam('{/api/v1/status/peer} response: ' +
                                         response.text)
                        response.block_height = int(
                            json.loads(response.text)["block_height"])
                        response.unconfirmed_block_height = int(
                            json.loads(response.text).get(
                                "unconfirmed_block_height", -1))
                        stub.target = target

                    response.block_height = max(
                        response.block_height,
                        response.unconfirmed_block_height)

                    if response.block_height > max_height:
                        # Add peer as higher than this
                        max_height = response.block_height
                        peer_stubs.append(stub)

                except Exception as e:
                    logging.warning(
                        f"This peer has already been removed from the block height target node. {e}"
                    )

        return max_height, peer_stubs

    def __close_level_db(self):
        del self.__level_db
        self.__level_db = None
        self.__blockchain.close_blockchain_db()

    def stop(self):
        # for reuse level db when restart channel.
        self.__close_level_db()

        if conf.ALLOW_MAKE_EMPTY_BLOCK:
            self.__block_generation_scheduler.stop()

        if self.consensus_algorithm:
            self.consensus_algorithm.stop()

    def leader_complain(self):
        complained_leader_id = self.epoch.leader_id
        new_leader = self.__channel_service.peer_manager.get_next_leader_peer(
            current_leader_peer_id=self.epoch.leader_id)
        new_leader_id = new_leader.peer_id if new_leader else None

        if not isinstance(new_leader_id, str):
            new_leader_id = ""

        if not isinstance(complained_leader_id, str):
            complained_leader_id = ""

        self.epoch.add_complain(complained_leader_id, new_leader_id,
                                self.epoch.height, self.__peer_id,
                                ChannelProperty().group_id)

        request = loopchain_pb2.ComplainLeaderRequest(
            complained_leader_id=complained_leader_id,
            channel=self.channel_name,
            new_leader_id=new_leader_id,
            block_height=self.epoch.height,
            message="I'm your father.",
            peer_id=self.__peer_id,
            group_id=ChannelProperty().group_id)

        util.logger.debug(f"complain group_id({ChannelProperty().group_id})")

        self.__channel_service.broadcast_scheduler.schedule_broadcast(
            "ComplainLeader", request)

    def vote_unconfirmed_block(self, block_hash, is_validated):
        logging.debug(
            f"block_manager:vote_unconfirmed_block ({self.channel_name}/{is_validated})"
        )

        if is_validated:
            vote_code, message = message_code.get_response(
                message_code.Response.success_validate_block)
        else:
            vote_code, message = message_code.get_response(
                message_code.Response.fail_validate_block)

        block_vote = loopchain_pb2.BlockVote(
            vote_code=vote_code,
            channel=self.channel_name,
            message=message,
            block_hash=block_hash,
            peer_id=self.__peer_id,
            group_id=ChannelProperty().group_id)

        self.candidate_blocks.add_vote(block_hash,
                                       ChannelProperty().group_id,
                                       ChannelProperty().peer_id, is_validated)
        self.__channel_service.broadcast_scheduler.schedule_broadcast(
            "VoteUnconfirmedBlock", block_vote)

    def vote_as_peer(self):
        """Vote to AnnounceUnconfirmedBlock
        """
        if self.__unconfirmedBlockQueue.empty():
            return

        unconfirmed_block: Block = self.__unconfirmedBlockQueue.get()
        logging.debug(
            f"we got unconfirmed block ....{unconfirmed_block.header.hash.hex()}"
        )

        my_height = self.__blockchain.block_height
        if my_height < (unconfirmed_block.header.height - 1):
            self.__channel_service.state_machine.block_sync()
            return

        # a block is already added that same height unconfirmed_block height
        if my_height >= unconfirmed_block.header.height:
            return

        logging.info("PeerService received unconfirmed block: " +
                     unconfirmed_block.header.hash.hex())

        block_version = self.__blockchain.block_versioner.get_version(
            unconfirmed_block.header.height)
        block_verifier = BlockVerifier.new(block_version,
                                           self.__blockchain.tx_versioner)
        block_verifier.invoke_func = self.__channel_service.score_invoke

        exception = None
        try:
            invoke_results = block_verifier.verify(
                unconfirmed_block, self.__blockchain.last_block,
                self.__blockchain,
                self.__blockchain.last_block.header.next_leader)
        except Exception as e:
            exception = e
            logging.error(e)
            traceback.print_exc()
        else:
            self.set_invoke_results(unconfirmed_block.header.hash.hex(),
                                    invoke_results)
            self.candidate_blocks.add_block(unconfirmed_block)
        finally:
            self.vote_unconfirmed_block(unconfirmed_block.header.hash,
                                        exception is None)
Ejemplo n.º 15
0
class TestBlockChain(unittest.TestCase):
    chain = None
    db_name = 'blockchain_db'

    def setUp(self):
        test_util.print_testname(self._testMethodName)
        # BlockChain 을 만듬
        test_db = test_util.make_level_db(self.db_name)
        self.assertIsNotNone(test_db, "DB생성 불가")
        self.chain = BlockChain(test_db)

        test_util.print_testname(self._testMethodName)

    def tearDown(self):
        # Blockchain을 삭제
        leveldb.DestroyDB(self.db_name)

    def generate_test_block(self):
        """
        임시 블럭 생성하는 메소드
        :return: 임시 블럭
        """

        block = Block()
        for x in range(0, 10):
            tx = Transaction()
            hashed_value = tx.put_data("{args:[]}")
            self.assertNotEqual(hashed_value, "", "트랜잭션 생성 실패")
            self.assertTrue(block.put_transaction(tx), "Block에 트랜잭션 추가 실패")

        return block

    def test_genesis_block_by_key(self):
        """
        제네시스 블럭을 찾는다
        """
        # 키를 통하여 블럭을 찾는다
        last_block_hash = self.chain.last_block.block_hash
        logging.debug("LAST BLOCK : %s", last_block_hash)
        last_block = self.chain.find_block_by_hash(last_block_hash)
        self.assertIsNotNone(last_block, "제네시스 블럭을 가져올 수 없습니다.")

    def test_find_do_not_exist_block(self):
        """
        블럭체인에 없는 블럭을 찾는다
        """
        none_block_key = "bf5570f5a1810b7af78caf4bc70a660f0df51e42baf91d4de5b2328de0e83dfc"
        none_block = self.chain.find_block_by_hash(none_block_key)
        self.assertIsNot(none_block, "존재하지 않는 블럭이 출력되었습니다.")

    def test_find_block_by_height(self):
        # GIVEN
        size = 10
        find_block_index = int(size*random.random())
        find_block_height = 0
        find_block_hash = None
        for x in range(size):
            last_block = self.chain.last_block
            n_block = self.generate_test_block()
            n_block.generate_block(last_block)
            n_block.block_status = BlockStatus.confirmed
            if find_block_index == x:
                find_block_hash = n_block.block_hash
                find_block_height = n_block.height
            self.chain.add_block(n_block)

        logging.debug("find block hash : %s ", find_block_hash)
        logging.debug("find block height : %d ", find_block_height)

        # WHEN
        find_block_by_hash = self.chain.find_block_by_hash(find_block_hash)
        find_block_by_height = self.chain.find_block_by_height(find_block_height)

        # THEN
        self.assertEqual(find_block_by_hash.block_hash, find_block_by_height.block_hash)

    def test_add_some_block_and_find_by_key(self):
        """몇개의 블럭을 추가한 후 임의의 블럭을 찾는다
        """
        # GIVEN
        size = 10
        find_block_index = int(size*random.random())
        find_block_hash = None
        for x in range(size):
            last_block = self.chain.last_block
            n_block = self.generate_test_block()
            n_block.generate_block(last_block)
            n_block.block_status = BlockStatus.confirmed
            if find_block_index == x:
                find_block_hash = n_block.block_hash
            logging.debug("new block height : %i", n_block.height)
            self.chain.add_block(n_block)

        logging.debug("find block index : %i ", find_block_index)
        logging.debug("find block hash : %s ", find_block_hash)

        # WHEN
        find_block = self.chain.find_block_by_hash(find_block_hash)
        logging.debug("find block height : %i", find_block.height)

        # THEN
        self.assertEqual(find_block_hash, find_block.block_hash)

    def test_add_and_find_tx(self):
        """block db 에 block_hash - block_object 를 저장할때, tx_hash - tx_object 도 저장한다.
        get tx by tx_hash 시 해당 block 을 효율적으로 찾기 위해서
        """
        tx = self.__add_single_tx_to_block_return_tx_with_test()

        saved_tx = self.chain.find_tx_by_key(tx.get_tx_hash())
        logging.debug("saved_tx: " + str(saved_tx.get_tx_hash()))

        self.assertEqual(tx.get_tx_hash(), saved_tx.get_tx_hash(), "Fail Find Transaction")

    def test_add_and_verify_results(self):
        """invoke_result = "{"code" : "invoke_result_code" , "error_message": "message" }"

        """
        tx = self.__add_single_tx_to_block_return_tx_with_test()

        invoke_result = self.chain.find_invoke_result_by_tx_hash(tx.get_tx_hash())
        self.assertEqual(invoke_result['code'], message_code.Response.success)

    def __add_single_tx_to_block_return_tx_with_test(self):
        last_block = self.chain.last_block
        block = Block()
        tx = Transaction()
        hashed_value = tx.put_data("1234")
        self.assertNotEqual(hashed_value, "", "트랜잭션 생성 실패")
        self.assertTrue(block.put_transaction(tx), "Block에 트랜잭션 추가 실패")

        logging.debug("tx_hash: " + tx.get_tx_hash())

        block.generate_block(last_block)
        block.block_status = BlockStatus.confirmed
        # add_block include __add_tx_to_block_db what we want to test
        self.assertTrue(self.chain.add_block(block),
                        "Fail Add Block to BlockChain in test_add_tx_to_block_db")
        return tx

    def test_unicode_decode_error(self):
        """ Transaction hash 는 UTF-8 인코딩이나 block hash 값은 sha256 hex byte array 이므로 인코딩 에러가 발생함
        """

        last_block = self.chain.last_block
        unexpected_transaction = self.chain.find_tx_by_key(last_block.block_hash)
        self.assertIsNone(unexpected_transaction, "unexpected_transaction is not None")

    def test_blockchain_is_singleton(self):
        x = BlockChain(test_util.make_level_db())
        y = BlockChain(test_util.make_level_db())

        self.assertTrue((x is y))
Ejemplo n.º 16
0
    def test_blockchain_is_singleton(self):
        x = BlockChain(test_util.make_level_db())
        y = BlockChain(test_util.make_level_db())

        self.assertTrue((x is y))
Ejemplo n.º 17
0
class BlockManager:
    """Manage the blockchain of a channel. It has objects for consensus and db object.
    """

    MAINNET = "cf43b3fd45981431a0e64f79d07bfcf703e064b73b802c5f32834eec72142190"
    TESTNET = "885b8021826f7e741be7f53bb95b48221e9ab263f377e997b2e47a7b8f4a2a8b"

    def __init__(self, name: str, channel_manager, peer_id, channel_name,
                 level_db_identity):
        self.__channel_service: ChannelService = channel_manager
        self.__channel_name = channel_name
        self.__pre_validate_strategy = self.__pre_validate
        self.__peer_id = peer_id
        self.__level_db = None
        self.__level_db_path = ""
        self.__level_db, self.__level_db_path = util.init_level_db(
            level_db_identity=f"{level_db_identity}_{channel_name}",
            allow_rename_path=False)
        self.__txQueue = AgingCache(
            max_age_seconds=conf.MAX_TX_QUEUE_AGING_SECONDS,
            default_item_status=TransactionStatusInQueue.normal)
        self.__blockchain = BlockChain(self.__level_db, channel_name)
        self.__peer_type = None
        self.__consensus = None
        self.__consensus_algorithm = None
        self.candidate_blocks = CandidateBlocks()
        self.__block_height_sync_lock = threading.Lock()
        self.__block_height_thread_pool = ThreadPoolExecutor(
            1, 'BlockHeightSyncThread')
        self.__block_height_future: Future = None
        self.__precommit_block: Block = None
        self.set_peer_type(loopchain_pb2.PEER)
        self.name = name
        self.__service_status = status_code.Service.online

        self.epoch: Epoch = None

    @property
    def channel_name(self):
        return self.__channel_name

    @property
    def service_status(self):
        # Return string for compatibility.
        if self.__service_status >= 0:
            return "Service is online: " + \
                   str(1 if self.__channel_service.state_machine.state == "BlockGenerate" else 0)
        else:
            return "Service is offline: " + status_code.get_status_reason(
                self.__service_status)

    def init_epoch(self):
        """Call this after peer list update

        :return:
        """
        self.epoch = Epoch(self)

    def update_service_status(self, status):
        self.__service_status = status
        StubCollection().peer_stub.sync_task().update_status(
            self.__channel_name, {"status": self.service_status})

    @property
    def peer_type(self):
        return self.__peer_type

    @property
    def made_block_count(self):
        if self.__consensus_algorithm:
            return self.__consensus_algorithm.made_block_count
        return 0

    @property
    def consensus(self):
        return self.__consensus

    @consensus.setter
    def consensus(self, consensus):
        self.__consensus = consensus

    @property
    def consensus_algorithm(self):
        return self.__consensus_algorithm

    @property
    def precommit_block(self):
        return self.__precommit_block

    @precommit_block.setter
    def precommit_block(self, block):
        self.__precommit_block = block

    def get_level_db(self):
        return self.__level_db

    def clear_all_blocks(self):
        logging.debug(f"clear level db({self.__level_db_path})")
        shutil.rmtree(self.__level_db_path)

    def set_peer_type(self, peer_type):
        self.__peer_type = peer_type

    def set_invoke_results(self, block_hash, invoke_results):
        self.__blockchain.set_invoke_results(block_hash, invoke_results)

    def get_total_tx(self):
        """
        블럭체인의 Transaction total 리턴합니다.

        :return: 블럭체인안의 transaction total count
        """
        return self.__blockchain.total_tx

    def get_blockchain(self):
        return self.__blockchain

    def pre_validate(self, tx: Transaction):
        return self.__pre_validate_strategy(tx)

    def __pre_validate(self, tx: Transaction):
        if tx.hash.hex() in self.__txQueue:
            raise TransactionInvalidDuplicatedHash(tx.hash.hex())

        if not util.is_in_time_boundary(tx.timestamp,
                                        conf.ALLOW_TIMESTAMP_BOUNDARY_SECOND):
            raise TransactionInvalidOutOfTimeBound(tx.hash.hex(), tx.timestamp,
                                                   util.get_now_time_stamp())

    def __pre_validate_pass(self, tx: Transaction):
        pass

    def broadcast_send_unconfirmed_block(self, block_: Block):
        """생성된 unconfirmed block 을 피어들에게 broadcast 하여 검증을 요청한다.
        """
        if self.__channel_service.state_machine.state == "BlockGenerate":
            logging.debug(
                f"BroadCast AnnounceUnconfirmedBlock "
                f"height({block_.header.height}) block({block_.header.hash}) peers: "
                f"{ObjectManager().channel_service.peer_manager.get_peer_count()}"
            )

            # util.logger.spam(f'block_manager:zip_test num of tx is {block_.confirmed_tx_len}')
            block_dumped = self.__blockchain.block_dumps(block_)

            ObjectManager(
            ).channel_service.broadcast_scheduler.schedule_broadcast(
                "AnnounceUnconfirmedBlock",
                loopchain_pb2.BlockSend(block=block_dumped,
                                        channel=self.__channel_name))

    def add_tx_obj(self, tx):
        """전송 받은 tx 를 Block 생성을 위해서 큐에 입력한다. load 하지 않은 채 입력한다.

        :param tx: transaction object
        """
        self.__txQueue[tx.hash.hex()] = tx

    def get_tx(self, tx_hash) -> Transaction:
        """Get transaction from block_db by tx_hash

        :param tx_hash: tx hash
        :return: tx object or None
        """
        return self.__blockchain.find_tx_by_key(tx_hash)

    def get_tx_info(self, tx_hash) -> dict:
        """Get transaction info from block_db by tx_hash

        :param tx_hash: tx hash
        :return: {'block_hash': "", 'block_height': "", "transaction": "", "result": {"code": ""}}
        """
        return self.__blockchain.find_tx_info(tx_hash)

    def get_invoke_result(self, tx_hash):
        """ get invoke result by tx

        :param tx_hash:
        :return:
        """
        return self.__blockchain.find_invoke_result_by_tx_hash(tx_hash)

    def get_tx_queue(self):
        if conf.CONSENSUS_ALGORITHM == conf.ConsensusAlgorithm.lft:
            return self.__consensus.get_tx_queue()

        return self.__txQueue

    def get_count_of_unconfirmed_tx(self):
        """BlockManager 의 상태를 확인하기 위하여 현재 입력된 unconfirmed_tx 의 카운트를 구한다.

        :return: 현재 입력된 unconfirmed tx 의 갯수
        """
        return len(self.__txQueue)

    def confirm_prev_block(self, current_block: Block):
        confirmed_block = self.__blockchain.confirm_prev_block(current_block)
        if confirmed_block is None:
            return

        # stop leader complain timer
        self.__channel_service.stop_leader_complain_timer()

        # start new epoch
        if not (current_block.header.complained
                and self.epoch.complained_result):
            self.epoch = Epoch.new_epoch()

        # reset leader
        self.__channel_service.reset_leader(
            current_block.header.next_leader.hex_hx())

    def __validate_duplication_unconfirmed_block(self,
                                                 unconfirmed_block: Block):
        last_unconfirmed_block: Block = self.__blockchain.last_unconfirmed_block
        try:
            candidate_block = self.candidate_blocks.blocks[
                unconfirmed_block.header.hash].block
        except KeyError:
            # When an unconfirmed block confirmed previous block, the block become last unconfirmed block,
            # But if the block is failed to verify, the block doesn't be added into candidate block.
            candidate_block: Block = last_unconfirmed_block

        if candidate_block is None or unconfirmed_block.header.hash != candidate_block.header.hash:
            return

        if self.__channel_service.state_machine.state == 'LeaderComplain' \
                and self.epoch.leader_id == unconfirmed_block.header.peer_id.hex_hx():
            raise InvalidUnconfirmedBlock(
                f"Unconfirmed block is made by complained leader. {unconfirmed_block})"
            )

        raise DuplicationUnconfirmedBlock(
            "Unconfirmed block has already been added.")

    def add_unconfirmed_block(self, unconfirmed_block):
        """

        :param unconfirmed_block:
        """
        logging.info(
            f"unconfirmed_block {unconfirmed_block.header.height}, {unconfirmed_block.body.confirm_prev_block}"
        )

        self.__validate_duplication_unconfirmed_block(unconfirmed_block)

        last_unconfirmed_block: Block = self.__blockchain.last_unconfirmed_block

        try:
            if unconfirmed_block.body.confirm_prev_block:
                self.confirm_prev_block(unconfirmed_block)
            elif last_unconfirmed_block is None:
                if self.__blockchain.last_block.header.hash != unconfirmed_block.header.prev_hash:
                    raise BlockchainError(
                        f"last block is not previous block. block={unconfirmed_block}"
                    )

                self.__blockchain.last_unconfirmed_block = unconfirmed_block
                self.__channel_service.stop_leader_complain_timer()
        except BlockchainError as e:
            logging.warning(
                f"BlockchainError while confirm_block({e}), retry block_height_sync"
            )
            self.__channel_service.state_machine.block_sync()
            raise InvalidUnconfirmedBlock(e)

    def add_confirmed_block(self, confirmed_block: Block, confirm_info=None):
        if self.__channel_service.state_machine.state != "Watch":
            util.logger.info(
                f"Can't add confirmed block if state is not Watch. {confirmed_block.header.hash.hex()}"
            )
            return

        self.__blockchain.add_block(confirmed_block, confirm_info=confirm_info)

    def rebuild_block(self):
        self.__blockchain.rebuild_transaction_count()

        nid = self.get_blockchain().find_nid()
        if nid is None:
            genesis_block = self.get_blockchain().find_block_by_height(0)
            self.__rebuild_nid(genesis_block)
        else:
            ChannelProperty().nid = nid

    def __rebuild_nid(self, block: Block):
        nid = NID.unknown.value
        if block.header.hash.hex() == BlockManager.MAINNET:
            nid = NID.mainnet.value
        elif block.header.hash.hex() == BlockManager.TESTNET:
            nid = NID.testnet.value
        elif len(block.body.transactions) > 0:
            tx = next(iter(block.body.transactions.values()))
            nid = tx.nid
            if nid is None:
                nid = NID.unknown.value

        if isinstance(nid, int):
            nid = hex(nid)

        self.get_blockchain().put_nid(nid)
        ChannelProperty().nid = nid

    def block_height_sync(self):
        with self.__block_height_sync_lock:
            need_to_sync = (self.__block_height_future is None
                            or self.__block_height_future.done())

            if need_to_sync:
                self.__channel_service.stop_leader_complain_timer()
                self.__block_height_future = self.__block_height_thread_pool.submit(
                    self.__block_height_sync)
            else:
                logging.warning(
                    'Tried block_height_sync. But failed. The thread is already running'
                )

            return need_to_sync, self.__block_height_future

    def __block_request(self, peer_stub, block_height):
        """request block by gRPC or REST

        :param peer_stub:
        :param block_height:
        :return block, max_block_height, confirm_info, response_code
        """
        if ObjectManager().channel_service.is_support_node_function(
                conf.NodeFunction.Vote):
            response = peer_stub.BlockSync(
                loopchain_pb2.BlockSyncRequest(block_height=block_height,
                                               channel=self.__channel_name),
                conf.GRPC_TIMEOUT)
            try:
                block = self.__blockchain.block_loads(response.block)
            except Exception as e:
                traceback.print_exc()
                raise exception.BlockError(
                    f"Received block is invalid: original exception={e}")
            return block, response.max_block_height, response.unconfirmed_block_height,\
                response.confirm_info, response.response_code
        else:
            # request REST(json-rpc) way to RS peer
            return self.__block_request_by_citizen(
                block_height,
                ObjectManager().channel_service.radio_station_stub)

    def __block_request_by_citizen(self, block_height, rs_rest_stub):
        get_block_result = rs_rest_stub.call("GetBlockByHeight", {
            'channel': self.__channel_name,
            'height': str(block_height)
        })
        last_block = rs_rest_stub.call("GetLastBlock")
        max_height = self.__blockchain.block_versioner.get_height(last_block)
        block_version = self.__blockchain.block_versioner.get_version(
            block_height)
        block_serializer = BlockSerializer.new(
            block_version,
            self.get_blockchain().tx_versioner)
        block = block_serializer.deserialize(get_block_result['block'])
        confirm_info = get_block_result.get('confirm_info', '')
        if isinstance(confirm_info, str):
            confirm_info = confirm_info.encode('utf-8')

        return block, max_height, -1, confirm_info, message_code.Response.success

    def __precommit_block_request(self, peer_stub, last_block_height):
        """request precommit block by gRPC

        :param peer_stub:
        :param block_height:
        :return block, max_block_height, response_code
        """
        response = peer_stub.GetPrecommitBlock(
            loopchain_pb2.PrecommitBlockRequest(
                last_block_height=last_block_height,
                channel=self.__channel_name), conf.GRPC_TIMEOUT)

        if response.block == b"":
            return None, response.response_code, response.response_message
        else:
            try:
                precommit_block = self.__blockchain.block_loads(response.block)
            except Exception as e:
                traceback.print_exc()
                raise exception.BlockError(
                    f"Received block is invalid: original exception={e}")
            # util.logger.spam(
            #     f"GetPrecommitBlock:response::{response.response_code}/{response.response_message}/"
            #     f"{precommit_block}/{precommit_block.confirmed_transaction_list}")
            return precommit_block, response.response_code, response.response_message

    def __start_block_height_sync_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_HEIGHT_SYNC
        timer_service: TimerService = self.__channel_service.timer_service

        if timer_key not in timer_service.timer_list:
            util.logger.spam(
                f"add timer for block_request_call to radiostation...")
            timer_service.add_timer(
                timer_key,
                Timer(target=timer_key,
                      duration=conf.GET_LAST_BLOCK_TIMER,
                      callback=self.block_height_sync))

    def stop_block_height_sync_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_HEIGHT_SYNC
        timer_service: TimerService = self.__channel_service.timer_service
        if timer_key in timer_service.timer_list:
            timer_service.stop_timer(timer_key)

    def start_block_generate_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_GENERATE
        timer_service: TimerService = self.__channel_service.timer_service

        if timer_key not in timer_service.timer_list:
            if self.__consensus_algorithm:
                self.__consensus_algorithm.stop()

        self.__consensus_algorithm = ConsensusSiever(self)
        self.__consensus_algorithm.start_timer(timer_service)

    def stop_block_generate_timer(self):
        if self.__consensus_algorithm:
            self.__consensus_algorithm.stop()

    def __current_block_height(self):
        if self.__blockchain.last_unconfirmed_block and \
                self.__blockchain.last_unconfirmed_block.header.height == self.__blockchain.block_height + 1:
            return self.__blockchain.block_height + 1
        else:
            return self.__blockchain.block_height

    def __current_last_block(self):
        return self.__blockchain.last_unconfirmed_block or self.__blockchain.last_block

    def __add_block_by_sync(self, block_, confirm_info=None):
        logging.debug(
            f"block_manager.py >> block_height_sync :: "
            f"height({block_.header.height}) confirm_info({confirm_info})")

        block_version = self.get_blockchain().block_versioner.get_version(
            block_.header.height)
        block_verifier = BlockVerifier.new(block_version,
                                           self.get_blockchain().tx_versioner,
                                           raise_exceptions=False)
        if block_.header.height == 0:
            block_verifier.invoke_func = self.__channel_service.genesis_invoke
        else:
            block_verifier.invoke_func = self.__channel_service.score_invoke

        reps = self.__channel_service.get_rep_ids()
        invoke_results = block_verifier.verify_loosely(
            block_, self.__blockchain.last_block, self.__blockchain, reps=reps)
        need_to_write_tx_info, need_to_score_invoke = True, True
        for exc in block_verifier.exceptions:
            if isinstance(exc, TransactionInvalidDuplicatedHash):
                need_to_write_tx_info = False
            if isinstance(exc, ScoreInvokeError) and not need_to_write_tx_info:
                need_to_score_invoke = False

        exc = next((exc for exc in block_verifier.exceptions
                    if not isinstance(exc, TransactionInvalidDuplicatedHash)),
                   None)
        if exc:
            if isinstance(exc, ScoreInvokeError) and not need_to_score_invoke:
                pass
            else:
                raise exc

        self.__blockchain.set_invoke_results(block_.header.hash.hex(),
                                             invoke_results)
        return self.__blockchain.add_block(block_, confirm_info,
                                           need_to_write_tx_info,
                                           need_to_score_invoke)

    def __confirm_prev_block_by_sync(self, block_):
        prev_block = self.__blockchain.last_unconfirmed_block
        confirm_info = block_.body.confirm_prev_block

        logging.debug(
            f"block_manager.py >> block_height_sync :: height({prev_block.header.height})"
        )

        block_version = self.get_blockchain().block_versioner.get_version(
            prev_block.header.height)
        block_verifier = BlockVerifier.new(block_version,
                                           self.get_blockchain().tx_versioner)
        if prev_block.header.height == 0:
            block_verifier.invoke_func = self.__channel_service.genesis_invoke
        else:
            block_verifier.invoke_func = self.__channel_service.score_invoke

        reps = self.__channel_service.get_rep_ids()
        invoke_results = block_verifier.verify_loosely(
            prev_block,
            self.__blockchain.last_block,
            self.__blockchain,
            reps=reps)
        self.__blockchain.set_invoke_results(prev_block.header.hash.hex(),
                                             invoke_results)
        return self.__blockchain.add_block(prev_block, confirm_info)

    def __block_request_to_peers_in_sync(self, peer_stubs, my_height,
                                         unconfirmed_block_height, max_height):
        """Extracted func from __block_height_sync.
        It has block request loop with peer_stubs for block height sync.

        :param peer_stubs:
        :param my_height:
        :param unconfirmed_block_height:
        :param max_height:
        :return: my_height, max_height
        """
        peer_stubs_len = len(peer_stubs)
        peer_index = 0
        retry_number = 0

        while max_height > my_height:
            if self.__channel_service.state_machine.state != 'BlockSync':
                break

            peer_stub = peer_stubs[peer_index]
            try:
                block, max_block_height, current_unconfirmed_block_height, confirm_info, response_code = \
                    self.__block_request(peer_stub, my_height + 1)
            except Exception as e:
                logging.warning("There is a bad peer, I hate you: " + str(e))
                traceback.print_exc()
                response_code = message_code.Response.fail

            if response_code == message_code.Response.success:
                logging.debug(f"try add block height: {block.header.height}")

                max_block_height = max(max_block_height,
                                       current_unconfirmed_block_height)
                if max_block_height > max_height:
                    util.logger.spam(
                        f"set max_height :{max_height} -> {max_block_height}")
                    max_height = max_block_height
                    if current_unconfirmed_block_height == max_block_height:
                        unconfirmed_block_height = current_unconfirmed_block_height

                try:
                    result = True
                    if max_height == unconfirmed_block_height == block.header.height \
                            and max_height > 0 and not confirm_info:
                        self.candidate_blocks.add_block(block)
                        self.__blockchain.last_unconfirmed_block = block
                        result = True
                    else:
                        result = self.__add_block_by_sync(block, confirm_info)

                    if result:
                        if block.header.height == 0:
                            self.__rebuild_nid(block)
                        elif self.__blockchain.find_nid() is None:
                            genesis_block = self.get_blockchain(
                            ).find_block_by_height(0)
                            self.__rebuild_nid(genesis_block)

                except KeyError as e:
                    result = False
                    logging.error("fail block height sync: " + str(e))
                    break
                except exception.BlockError:
                    result = False
                    logging.error(
                        "Block Error Clear all block and restart peer.")
                    self.clear_all_blocks()
                    util.exit_and_msg(
                        "Block Error Clear all block and restart peer.")
                    break
                finally:
                    peer_index = (peer_index + 1) % peer_stubs_len
                    if result:
                        my_height += 1
                        retry_number = 0
                    else:
                        retry_number += 1
                        logging.warning(
                            f"Block height({my_height}) synchronization is fail. "
                            f"{retry_number}/{conf.BLOCK_SYNC_RETRY_NUMBER}")
                        if retry_number >= conf.BLOCK_SYNC_RETRY_NUMBER:
                            util.exit_and_msg(
                                f"This peer already tried to synchronize {my_height} block "
                                f"for max retry number({conf.BLOCK_SYNC_RETRY_NUMBER}). "
                                f"Peer will be down.")
            else:
                logging.warning(
                    f"Not responding peer({peer_stub}) is removed from the peer stubs target."
                )
                if peer_stubs_len == 1:
                    raise ConnectionError
                del peer_stubs[peer_index]
                peer_stubs_len -= 1
                peer_index %= peer_stubs_len  # If peer_index is last index, go to first

        return my_height, max_height

    def __block_height_sync(self):
        def _handle_exception(e):
            logging.warning(
                f"exception during block_height_sync :: {type(e)}, {e}")
            traceback.print_exc()
            self.__start_block_height_sync_timer()

        # Make Peer Stub List [peer_stub, ...] and get max_height of network
        try:
            max_height, unconfirmed_block_height, peer_stubs = self.__get_peer_stub_list(
            )
        except ConnectionError as exc:
            _handle_exception(exc)
            return False

        if self.__blockchain.last_unconfirmed_block is not None:
            self.candidate_blocks.remove_block(
                self.__blockchain.last_unconfirmed_block.header.hash)
        self.__blockchain.last_unconfirmed_block = None

        my_height = self.__current_block_height()
        logging.debug(
            f"in __block_height_sync max_height({max_height}), my_height({my_height})"
        )

        # prevent_next_block_mismatch until last_block_height in block DB. (excludes last_unconfirmed_block_height)
        self.get_blockchain().prevent_next_block_mismatch(
            self.__blockchain.block_height + 1)

        try:
            if peer_stubs:
                my_height, max_height = self.__block_request_to_peers_in_sync(
                    peer_stubs, my_height, unconfirmed_block_height,
                    max_height)
        except Exception as exc:
            _handle_exception(exc)
            return False

        curr_state = self.__channel_service.state_machine.state
        if curr_state != 'BlockSync':
            util.logger.info(f"Current state{curr_state} is not BlockSync")
            return True

        if my_height >= max_height:
            util.logger.debug(f"block_manager:block_height_sync is complete.")
            next_leader = self.__current_last_block().header.next_leader
            leader_peer = self.__channel_service.peer_manager.get_peer(
                next_leader.hex_hx()) if next_leader else None

            if leader_peer:
                self.__channel_service.peer_manager.set_leader_peer(
                    leader_peer, None)
                self.epoch = Epoch.new_epoch(leader_peer.peer_id)
            elif self.epoch.height < my_height:
                self.epoch = Epoch.new_epoch()

            self.__channel_service.state_machine.complete_sync()
        else:
            logging.warning(
                f"it's not completed block height synchronization in once ...\n"
                f"try block_height_sync again... my_height({my_height}) in channel({self.__channel_name})"
            )
            self.__channel_service.state_machine.block_sync()

        return True

    def __get_peer_stub_list(self):
        """It updates peer list for block manager refer to peer list on the loopchain network.
        This peer list is not same to the peer list of the loopchain network.

        :return max_height: a height of current blockchain
        :return peer_stubs: current peer list on the loopchain network
        """
        max_height = -1  # current max height
        unconfirmed_block_height = -1
        peer_stubs = []  # peer stub list for block height synchronization

        if not ObjectManager().channel_service.is_support_node_function(
                conf.NodeFunction.Vote):
            rest_stub = ObjectManager().channel_service.radio_station_stub
            peer_stubs.append(rest_stub)
            last_block = rest_stub.call("GetLastBlock")
            max_height = self.__blockchain.block_versioner.get_height(
                last_block)

            return max_height, unconfirmed_block_height, peer_stubs

        # Make Peer Stub List [peer_stub, ...] and get max_height of network
        peer_target = ChannelProperty().peer_target
        peer_manager = ObjectManager().channel_service.peer_manager
        target_dict = peer_manager.get_IP_of_peers_dict()
        target_list = [
            peer_target for peer_id, peer_target in target_dict.items()
            if peer_id != ChannelProperty().peer_id
        ]

        for target in target_list:
            if target != peer_target:
                logging.debug(f"try to target({target})")
                channel = GRPCHelper().create_client_channel(target)
                stub = loopchain_pb2_grpc.PeerServiceStub(channel)
                try:
                    response = stub.GetStatus(
                        loopchain_pb2.StatusRequest(
                            request="",
                            channel=self.__channel_name,
                        ), conf.GRPC_TIMEOUT_SHORT)

                    response.block_height = max(
                        response.block_height,
                        response.unconfirmed_block_height)

                    if response.block_height > max_height:
                        # Add peer as higher than this
                        max_height = response.block_height
                        unconfirmed_block_height = response.unconfirmed_block_height
                        peer_stubs.append(stub)

                except Exception as e:
                    logging.warning(
                        f"This peer has already been removed from the block height target node. {e}"
                    )

        return max_height, unconfirmed_block_height, peer_stubs

    def __close_level_db(self):
        del self.__level_db
        self.__level_db = None
        self.__blockchain.close_blockchain_db()

    def stop(self):
        # for reuse level db when restart channel.
        self.__close_level_db()

        if self.consensus_algorithm:
            self.consensus_algorithm.stop()

    def add_complain(self, complained_leader_id, new_leader_id, block_height,
                     peer_id, group_id):
        if new_leader_id == self.epoch.leader_id:
            util.logger.info(
                f"Complained new leader is current leader({new_leader_id})")
            return

        if self.epoch.height == block_height:
            self.epoch.add_complain(complained_leader_id, new_leader_id,
                                    block_height, peer_id, group_id)

            elected_leader = self.epoch.complain_result()
            if elected_leader:
                self.__channel_service.reset_leader(elected_leader,
                                                    complained=True)
                self.__channel_service.reset_leader_complain_timer()
            elif elected_leader is False:
                util.logger.warning(
                    f"Fail to elect the next leader on {self.epoch.round} round."
                )
                # In this case, a new leader can't be elected by the consensus of leader complaint.
                # That's why the leader of current `round` is set to the next `round` again.
                self.epoch.new_round(self.epoch.leader_id)
        elif self.epoch.height < block_height:
            self.__channel_service.state_machine.block_sync()

    def leader_complain(self):
        # util.logger.notice(f"do leader complain.")
        new_leader_id = self.epoch.pop_complained_candidate_leader()
        complained_leader_id = self.epoch.leader_id

        if not new_leader_id:
            new_leader = self.__channel_service.peer_manager.get_next_leader_peer(
                current_leader_peer_id=complained_leader_id)
            new_leader_id = new_leader.peer_id if new_leader else None

            if not isinstance(new_leader_id, str):
                new_leader_id = ""

        if not isinstance(complained_leader_id, str):
            complained_leader_id = ""

        self.add_complain(complained_leader_id, new_leader_id,
                          self.epoch.height, self.__peer_id,
                          ChannelProperty().group_id)

        request = loopchain_pb2.ComplainLeaderRequest(
            complained_leader_id=complained_leader_id,
            channel=self.channel_name,
            new_leader_id=new_leader_id,
            block_height=self.epoch.height,
            message="I'm your father.",
            peer_id=self.__peer_id,
            group_id=ChannelProperty().group_id)

        util.logger.debug(f"leader complain "
                          f"complained_leader_id({complained_leader_id}), "
                          f"new_leader_id({new_leader_id})")

        self.__channel_service.broadcast_scheduler.schedule_broadcast(
            "ComplainLeader", request)

    def vote_unconfirmed_block(self, block_hash, is_validated):
        logging.debug(
            f"block_manager:vote_unconfirmed_block ({self.channel_name}/{is_validated})"
        )

        if is_validated:
            vote_code, message = message_code.get_response(
                message_code.Response.success_validate_block)
        else:
            vote_code, message = message_code.get_response(
                message_code.Response.fail_validate_block)

        block_vote = loopchain_pb2.BlockVote(
            vote_code=vote_code,
            channel=self.channel_name,
            message=message,
            block_hash=block_hash,
            peer_id=self.__peer_id,
            group_id=ChannelProperty().group_id)

        self.candidate_blocks.add_vote(block_hash,
                                       ChannelProperty().group_id,
                                       ChannelProperty().peer_id, is_validated)
        self.__channel_service.broadcast_scheduler.schedule_broadcast(
            "VoteUnconfirmedBlock", block_vote)

    def verify_confirm_info(self, unconfirmed_block: Block):
        # TODO set below variable with right result.
        check_unconfirmed_block_has_valid_confirm_info_for_prev_block = True
        if not check_unconfirmed_block_has_valid_confirm_info_for_prev_block:
            raise ConfirmInfoInvalid(
                "Unconfirmed block has no valid confirm info for previous block"
            )

        my_height = self.__blockchain.block_height
        if my_height < (unconfirmed_block.header.height - 2):
            raise ConfirmInfoInvalidNeedBlockSync(
                f"trigger block sync in _vote my_height({my_height}), "
                f"unconfirmed_block.header.height({unconfirmed_block.header.height})"
            )

        # a block is already added that same height unconfirmed_block height
        if my_height >= unconfirmed_block.header.height:
            raise ConfirmInfoInvalidAddedBlock(
                f"block is already added my_height({my_height}), "
                f"unconfirmed_block.header.height({unconfirmed_block.header.height})"
            )

    async def _vote(self, unconfirmed_block: Block):
        exc = None
        try:
            block_version = self.__blockchain.block_versioner.get_version(
                unconfirmed_block.header.height)
            block_verifier = BlockVerifier.new(block_version,
                                               self.__blockchain.tx_versioner)
            block_verifier.invoke_func = self.__channel_service.score_invoke
            reps = self.__channel_service.get_rep_ids()
            logging.debug(
                f"unconfirmed_block.header({unconfirmed_block.header})")
            invoke_results = block_verifier.verify(
                unconfirmed_block,
                self.__blockchain.last_block,
                self.__blockchain,
                self.__blockchain.last_block.header.next_leader,
                reps=reps)
        except Exception as e:
            exc = e
            logging.error(e)
            traceback.print_exc()
        else:
            self.set_invoke_results(unconfirmed_block.header.hash.hex(),
                                    invoke_results)
            self.candidate_blocks.add_block(unconfirmed_block)
        finally:
            self.vote_unconfirmed_block(unconfirmed_block.header.hash,
                                        exc is None)

    async def vote_as_peer(self, unconfirmed_block: Block):
        """Vote to AnnounceUnconfirmedBlock
        """
        util.logger.debug(
            f"in vote_as_peer "
            f"height({unconfirmed_block.header.height}) "
            f"unconfirmed_block({unconfirmed_block.header.hash.hex()})")

        try:
            self.add_unconfirmed_block(unconfirmed_block)
        except InvalidUnconfirmedBlock as e:
            util.logger.warning(e)
        except DuplicationUnconfirmedBlock as e:
            util.logger.debug(e)
            await self._vote(unconfirmed_block)
        else:
            await self._vote(unconfirmed_block)

        self.__channel_service.turn_on_leader_complain_timer()
Ejemplo n.º 18
0
class TestBlockChain(unittest.TestCase):
    chain = None
    db_name = 'blockchain_db'
    __peer_id = 'aaa'

    def setUp(self):
        test_util.print_testname(self._testMethodName)
        self.peer_auth = test_util.create_default_peer_auth()

        set_mock(self)
        # BlockChain 을 만듬
        test_db = test_util.make_level_db(self.db_name)
        self.assertIsNotNone(test_db, "DB생성 불가")
        self.chain = BlockChain(test_db)

    def tearDown(self):
        # Blockchain을 삭제
        ObjectManager().peer_service = None
        leveldb.DestroyDB(self.db_name)
        os.system("rm -rf ./blockchain_db*")

    def generate_test_block(self):
        """
        임시 블럭 생성하는 메소드
        :return: 임시 블럭
        """

        block = Block(channel_name=conf.LOOPCHAIN_DEFAULT_CHANNEL)
        for x in range(0, 10):
            tx = test_util.create_basic_tx(self.__peer_id, self.peer_auth)
            self.assertNotEqual(tx.tx_hash, "", "트랜잭션 생성 실패")
            self.assertTrue(block.put_transaction(tx), "Block에 트랜잭션 추가 실패")

        return block

    def test_genesis_block_by_key(self):
        """
        제네시스 블럭을 찾는다
        """
        # 키를 통하여 블럭을 찾는다
        block = test_util.add_genesis_block()
        self.chain.add_block(block)

        last_block_hash = self.chain.last_block.block_hash
        logging.debug("LAST BLOCK : %s", last_block_hash)
        last_block = self.chain.find_block_by_hash(last_block_hash)
        self.assertIsNotNone(last_block, "제네시스 블럭을 가져올 수 없습니다.")

    def test_find_do_not_exist_block(self):
        """
        블럭체인에 없는 블럭을 찾는다
        """
        none_block_key = "bf5570f5a1810b7af78caf4bc70a660f0df51e42baf91d4de5b2328de0e83dfc"
        none_block = self.chain.find_block_by_hash(none_block_key)
        self.assertIsNot(none_block, "존재하지 않는 블럭이 출력되었습니다.")

    @unittest.skip
    def test_nonce(self):
        """test get, verify, set nonce

        :return:
        """
        # GIVEN
        new_nonce = self.chain.get_new_nonce_by_address("ABC")
        util.logger.spam(f"test_block_chain:test_nonce new_nonce({new_nonce})")

        # WHEN
        verify_result = self.chain.verify_nonce_by_address("ABC", new_nonce)
        util.logger.spam(f"test_block_chain:test_nonce verify_result({verify_result})")
        self.assertTrue(verify_result)
        set_result = self.chain._BlockChain__set_nonce_by_address("ABC", new_nonce)
        self.assertTrue(set_result)

        # THEN
        next_new_nonce = self.chain.get_new_nonce_by_address("ABC")
        util.logger.spam(f"test_block_chain:test_nonce new_nonce({next_new_nonce})")
        self.assertEqual(hex(int(new_nonce, 16) + 1), next_new_nonce)

    def test_tx_list_by_address(self):
        """test add tx_hash to tx_list by address

        :return:
        """
        # GIVEN
        for i in range(201):
            self.chain.add_tx_to_list_by_address("ABC", "112233_" + str(i))

        # WHEN
        current_tx_list, last_list_index = self.chain.get_tx_list_by_address("ABC")
        util.logger.spam(f"test_get_current_tx_list_by_address "
                         f"length of tx_list({len(current_tx_list)}) next_index({last_list_index})")

        oldest_tx_list, first_index = self.chain.get_tx_list_by_address("ABC", 1)
        util.logger.spam(f"test_get_oldest_tx_list_by_address "
                         f"length of tx_list({len(oldest_tx_list)}) next_index({first_index})")

        # THEN
        self.assertEqual(first_index, 0)
        self.assertEqual(last_list_index, 2)
        self.assertEqual(len(oldest_tx_list), conf.MAX_TX_LIST_SIZE_BY_ADDRESS + 1)

    def test_find_block_by_height(self):
        # GIVEN
        size = 10
        find_block_index = int(size*random.random())
        find_block_height = 0
        find_block_hash = None
        for x in range(size):
            last_block = self.chain.last_block
            n_block = self.generate_test_block()
            n_block.generate_block(last_block)
            n_block.block_status = BlockStatus.confirmed
            if find_block_index == x:
                find_block_hash = n_block.block_hash
                find_block_height = n_block.height
            self.chain.add_block(n_block)

        logging.debug("find block hash : %s ", find_block_hash)
        logging.debug("find block height : %d ", find_block_height)

        # WHEN
        find_block_by_hash = self.chain.find_block_by_hash(find_block_hash)
        find_block_by_height = self.chain.find_block_by_height(find_block_height)

        # THEN
        self.assertEqual(find_block_by_hash.block_hash, find_block_by_height.block_hash)

    def test_add_some_block_and_find_by_key(self):
        """몇개의 블럭을 추가한 후 임의의 블럭을 찾는다
        """
        # GIVEN
        size = 10
        find_block_index = int(size*random.random())
        find_block_hash = None
        for x in range(size):
            last_block = self.chain.last_block
            n_block = self.generate_test_block()
            n_block.generate_block(last_block)
            n_block.block_status = BlockStatus.confirmed
            if find_block_index == x:
                find_block_hash = n_block.block_hash
            logging.debug("new block height : %i", n_block.height)
            self.chain.add_block(n_block)

        logging.debug("find block index : %i ", find_block_index)
        logging.debug("find block hash : %s ", find_block_hash)

        # WHEN
        find_block = self.chain.find_block_by_hash(find_block_hash)
        logging.debug("find block height : %i", find_block.height)

        # THEN
        self.assertEqual(find_block_hash, find_block.block_hash)

    def test_add_and_find_tx(self):
        """block db 에 block_hash - block_object 를 저장할때, tx_hash - tx_object 도 저장한다.
        get tx by tx_hash 시 해당 block 을 효율적으로 찾기 위해서
        """
        tx = self.__add_single_tx_block_blockchain_return_tx()
        logging.debug("add tx hash : " + tx.tx_hash)

        saved_tx = self.chain.find_tx_by_key(tx.tx_hash)
        logging.debug("saved_tx: " + str(saved_tx.tx_hash))

        self.assertEqual(tx.tx_hash, saved_tx.tx_hash, "Fail Find Transaction")

    def test_add_and_verify_results(self):
        """invoke_result = "{"code" : "invoke_result_code" , "error_message": "message" }"

        """
        test_util.add_genesis_block()
        block = test_util.add_genesis_block()
        self.chain.add_block(block)

        tx = self.__add_single_tx_block_blockchain_return_tx()

        invoke_result = self.chain.find_invoke_result_by_tx_hash(tx.tx_hash)
        self.assertEqual(invoke_result['code'], ScoreResponse.SUCCESS)

    def __add_single_tx_block_blockchain_return_tx(self):
        last_block = self.chain.last_block
        block = Block(channel_name=conf.LOOPCHAIN_DEFAULT_CHANNEL)
        tx = test_util.create_basic_tx(self.__peer_id, self.peer_auth)
        block.put_transaction(tx)

        logging.debug("tx_hash: " + tx.tx_hash)

        block.generate_block(last_block)
        block.block_status = BlockStatus.confirmed

        # add_block to blockchain
        self.assertTrue(self.chain.add_block(block),
                        "Fail Add Block to BlockChain")
        return tx

    def test_unicode_decode_error(self):
        """ Transaction hash 는 UTF-8 인코딩이나 block hash 값은 sha256 hex byte array 이므로 인코딩 에러가 발생함
        """
        test_util.add_genesis_block()
        block = test_util.add_genesis_block()
        self.chain.add_block(block)

        last_block = self.chain.last_block
        unexpected_transaction = self.chain.find_tx_by_key(last_block.block_hash)
        self.assertIsNone(unexpected_transaction, "unexpected_transaction is not None")

    # blockchain is no more singleton. (for multi chain)
    @unittest.skip
    def test_blockchain_is_singleton(self):
        x = BlockChain(test_util.make_level_db())
        y = BlockChain(test_util.make_level_db())

        self.assertTrue((x is y))
Ejemplo n.º 19
0
    def test_block_add(self):
        """
        블럭 추가 테스트
        제네시스 블럭을 만든후 10개의 트랜잭션을 가진 10개의 블럭을 생성하여
        블럭체인에 추가
        """
        store_identity = 'add_test_db'
        # test_store = test_util.make_key_value_store(store_identity)
        chain = BlockChain(self.channel_name, store_id=store_identity)
        self.assertIsNotNone(chain.get_blockchain_store(), "impossible create DB")

        block = test_util.add_genesis_block()
        chain.add_block(block)
        genesis_hash = chain.last_block.block_hash

        for x in range(0,10):
            # 신규 블럭 생성 추가 x 10
            tmp_block = self.generate_test_block()
            tmp_block.block_status = BlockStatus.confirmed
            tmp_block.generate_block(chain.last_block)
            chain.add_block(tmp_block)
            logging.debug("신규 블럭 생성 및 블럭 HASH : %s", chain.last_block.block_hash)

        self.assertNotEqual(genesis_hash, chain.last_block.block_hash, "블럭 추가 불가")

        # 미인증 블럭 추가
        tmp_block = self.generate_test_block()
        tmp_block.block_status = BlockStatus.unconfirmed
        self.assertRaises(TypeError, "미인증 블럭 추가", chain.add_block, tmp_block)

        # Store_data Function 추가
        tmp_block.block_status = BlockStatus.confirmed
        tmp_block.generate_block(chain.last_block)
        # 블럭 저장함수
        last_block_hash = chain.last_block.block_hash

        chain.add_block(tmp_block)

        self.assertNotEqual(last_block_hash, chain.last_block.block_hash)
        self.assertIsNotNone(chain.last_block)

        # remove test DB
        chain.close_blockchain_store()
Ejemplo n.º 20
0
class BlockManager:
    """Manage the blockchain of a channel. It has objects for consensus and db object.
    """

    MAINNET = "cf43b3fd45981431a0e64f79d07bfcf703e064b73b802c5f32834eec72142190"
    TESTNET = "885b8021826f7e741be7f53bb95b48221e9ab263f377e997b2e47a7b8f4a2a8b"

    def __init__(self, channel_service: 'ChannelService', peer_id: str, channel_name: str, store_id: str):
        self.__channel_service: ChannelService = channel_service
        self.__channel_name = channel_name
        self.__peer_id = peer_id

        self.__tx_queue = AgingCache(max_age_seconds=conf.MAX_TX_QUEUE_AGING_SECONDS,
                                     default_item_status=TransactionStatusInQueue.normal)
        self.blockchain = BlockChain(channel_name, store_id, self)
        self.__peer_type = None
        self.__consensus_algorithm = None
        self.candidate_blocks = CandidateBlocks(self.blockchain)

        self.set_peer_type(loopchain_pb2.PEER)
        self.__service_status = status_code.Service.online

        # old_block_hashes[height][new_block_hash] = old_block_hash
        self.__old_block_hashes: DefaultDict[int, Dict[Hash32, Hash32]] = defaultdict(dict)
        self.epoch: Epoch = None

        self._block_sync = BlockSync(self, channel_service)

    @property
    def channel_name(self):
        return self.__channel_name

    @property
    def service_status(self):
        # Return string for compatibility.
        if self.__service_status >= 0:
            return "Service is online: " + \
                   str(1 if self.__channel_service.state_machine.state == "BlockGenerate" else 0)
        else:
            return "Service is offline: " + status_code.get_status_reason(self.__service_status)

    def update_service_status(self, status):
        self.__service_status = status

    @property
    def peer_type(self):
        return self.__peer_type

    @property
    def consensus_algorithm(self):
        return self.__consensus_algorithm

    def set_peer_type(self, peer_type):
        self.__peer_type = peer_type

    def set_old_block_hash(self, block_height: int, new_block_hash: Hash32, old_block_hash: Hash32):
        self.__old_block_hashes[block_height][new_block_hash] = old_block_hash

    def get_old_block_hash(self,  block_height: int, new_block_hash: Hash32):
        return self.__old_block_hashes[block_height][new_block_hash]

    def pop_old_block_hashes(self, block_height: int):
        self.__old_block_hashes.pop(block_height)

    def get_total_tx(self):
        """
        블럭체인의 Transaction total 리턴합니다.

        :return: 블럭체인안의 transaction total count
        """
        return self.blockchain.total_tx

    def broadcast_send_unconfirmed_block(self, block_: Block, round_: int):
        """broadcast unconfirmed block for getting votes form reps
        """
        last_block: Block = self.blockchain.last_block
        if (self.__channel_service.state_machine.state != "BlockGenerate" and
                last_block.header.height > block_.header.height):
            util.logger.debug(
                f"Last block has reached a sufficient height. Broadcast will stop! ({block_.header.hash.hex()})")
            ConsensusSiever.stop_broadcast_send_unconfirmed_block_timer()
            return

        if last_block.header.revealed_next_reps_hash:
            if block_.header.is_unrecorded:
                self._send_unconfirmed_block(block_, last_block.header.reps_hash, round_)
            else:
                self._send_unconfirmed_block(block_, block_.header.reps_hash, round_)
        else:
            self._send_unconfirmed_block(block_, ChannelProperty().crep_root_hash, round_)

    def _send_unconfirmed_block(self, block_: Block, target_reps_hash, round_: int):
        util.logger.debug(
            f"BroadCast AnnounceUnconfirmedBlock "
            f"height({block_.header.height}) round({round_}) block({block_.header.hash}) peers: "
            f"target_reps_hash({target_reps_hash})")

        block_dumped = self.blockchain.block_dumps(block_)
        send_kwargs = {
            "block": block_dumped,
            "round_": round_,
            "channel": self.__channel_name,
            "peer_id": block_.header.peer_id.hex_hx(),
            "height": block_.header.height,
            "hash": block_.header.hash.hex()
        }

        release_recovery_mode = False
        if conf.RECOVERY_MODE:
            from loopchain.tools.recovery import Recovery
            if self.blockchain.block_height <= Recovery.release_block_height():
                util.logger.info(f"broadcast block({block_.header.height}) from recovery node")
                send_kwargs["from_recovery"] = True

            if self.blockchain.block_height >= Recovery.release_block_height():
                release_recovery_mode = True

        self.__channel_service.broadcast_scheduler.schedule_broadcast(
            "AnnounceUnconfirmedBlock",
            loopchain_pb2.BlockSend(**send_kwargs),
            reps_hash=target_reps_hash
        )

        if release_recovery_mode:
            conf.RECOVERY_MODE = False
            util.logger.info(f"recovery mode released at {self.blockchain.block_height}")

    def add_tx_obj(self, tx):
        """전송 받은 tx 를 Block 생성을 위해서 큐에 입력한다. load 하지 않은 채 입력한다.

        :param tx: transaction object
        """
        self.__tx_queue[tx.hash.hex()] = tx

    def get_tx(self, tx_hash) -> Transaction:
        """Get transaction from block_db by tx_hash

        :param tx_hash: tx hash
        :return: tx object or None
        """
        return self.blockchain.find_tx_by_key(tx_hash)

    def get_tx_info(self, tx_hash) -> dict:
        """Get transaction info from block_db by tx_hash

        :param tx_hash: tx hash
        :return: {'block_hash': "", 'block_height': "", "transaction": "", "result": {"code": ""}}
        """
        return self.blockchain.find_tx_info(tx_hash)

    def get_invoke_result(self, tx_hash):
        """ get invoke result by tx

        :param tx_hash:
        :return:
        """
        return self.blockchain.find_invoke_result_by_tx_hash(tx_hash)

    def get_tx_queue(self):
        return self.__tx_queue

    def get_count_of_unconfirmed_tx(self):
        """Monitors the node's tx_queue status and, if necessary, changes the properties of the sub-service according to the policy.

        :return: count of unconfirmed tx
        """
        return len(self.__tx_queue)

    async def relay_all_txs(self):
        rs_client = ObjectManager().channel_service.rs_client
        if not rs_client:
            return

        items = list(self.__tx_queue.d.values())
        self.__tx_queue.d.clear()

        for item in items:
            tx = item.value
            if not util.is_in_time_boundary(tx.timestamp, conf.TIMESTAMP_BOUNDARY_SECOND, util.get_now_time_stamp()):
                continue

            ts = TransactionSerializer.new(tx.version, tx.type(), self.blockchain.tx_versioner)
            if tx.version == v2.version:
                rest_method = RestMethod.SendTransaction2
            elif tx.version == v3.version:
                rest_method = RestMethod.SendTransaction3
            else:
                continue

            raw_data = ts.to_raw_data(tx)
            raw_data["from_"] = raw_data.pop("from")
            for i in range(conf.RELAY_RETRY_TIMES):
                try:
                    await rs_client.call_async(rest_method,
                                               rest_method.value.params(**raw_data))
                except Exception as e:
                    util.logger.warning(f"Relay failed. Tx({tx}), {e!r}")
                else:
                    break

    def restore_tx_status(self, tx: Transaction):
        util.logger.debug(f"tx : {tx}")
        self.__tx_queue.set_item_status(tx.hash.hex(), TransactionStatusInQueue.normal)

    def __validate_duplication_of_unconfirmed_block(self, unconfirmed_block: Block):
        if self.blockchain.last_block.header.height >= unconfirmed_block.header.height:
            raise InvalidUnconfirmedBlock("The unconfirmed block has height already added.")

        try:
            candidate_block = self.candidate_blocks.blocks[unconfirmed_block.header.hash].block
        except KeyError:
            # When an unconfirmed block confirmed previous block, the block become last unconfirmed block,
            # But if the block is failed to verify, the block doesn't be added into candidate block.
            candidate_block: Block = self.blockchain.last_unconfirmed_block

        if candidate_block is None or unconfirmed_block.header.hash != candidate_block.header.hash:
            return

        raise DuplicationUnconfirmedBlock("Unconfirmed block has already been added.")

    def __validate_epoch_of_unconfirmed_block(self, unconfirmed_block: Block, round_: int):
        current_state = self.__channel_service.state_machine.state
        block_header = unconfirmed_block.header
        last_u_block = self.blockchain.last_unconfirmed_block

        if self.epoch.height == block_header.height and self.epoch.round < round_:
            raise InvalidUnconfirmedBlock(
                f"The unconfirmed block has invalid round. Expected({self.epoch.round}), Unconfirmed_block({round_})")

        if not self.epoch.complained_result:
            if last_u_block and (last_u_block.header.hash == block_header.hash or last_u_block.header.prep_changed):
                # TODO do not validate epoch in this case.
                expected_leader = block_header.peer_id.hex_hx()
            else:
                expected_leader = self.epoch.leader_id

            if expected_leader != block_header.peer_id.hex_hx():
                raise UnexpectedLeader(
                    f"The unconfirmed block({block_header.hash}) is made by an unexpected leader. "
                    f"Expected({expected_leader}), Unconfirmed_block({block_header.peer_id.hex_hx()})")

        if current_state == 'LeaderComplain' and self.epoch.leader_id == block_header.peer_id.hex_hx():
            raise InvalidUnconfirmedBlock(f"The unconfirmed block is made by complained leader.\n{block_header})")

    def add_unconfirmed_block(self, unconfirmed_block: Block, round_: int):
        """

        :param unconfirmed_block:
        :param round_:
        :return:
        """
        self.__validate_epoch_of_unconfirmed_block(unconfirmed_block, round_)
        self.__validate_duplication_of_unconfirmed_block(unconfirmed_block)

        last_unconfirmed_block: Block = self.blockchain.last_unconfirmed_block

        # TODO After the v0.4 update, remove this version parsing.
        if parse_version(unconfirmed_block.header.version) >= parse_version("0.4"):
            ratio = conf.VOTING_RATIO
        else:
            ratio = conf.LEADER_COMPLAIN_RATIO

        if unconfirmed_block.header.reps_hash:
            reps = self.blockchain.find_preps_addresses_by_roothash(unconfirmed_block.header.reps_hash)
            version = self.blockchain.block_versioner.get_version(unconfirmed_block.header.height)
            leader_votes = Votes.get_leader_votes_class(version)(
                reps,
                ratio,
                unconfirmed_block.header.height,
                None,
                unconfirmed_block.body.leader_votes
            )
            need_to_confirm = leader_votes.get_result() is None
        elif unconfirmed_block.body.confirm_prev_block:
            need_to_confirm = True
        else:
            need_to_confirm = False

        try:
            if need_to_confirm:
                self.blockchain.confirm_prev_block(unconfirmed_block)
                if unconfirmed_block.header.is_unrecorded:
                    self.blockchain.last_unconfirmed_block = None
                    raise UnrecordedBlock("It's an unnecessary block to vote.")
            elif last_unconfirmed_block is None:
                if self.blockchain.last_block.header.hash != unconfirmed_block.header.prev_hash:
                    raise BlockchainError(f"last block is not previous block. block={unconfirmed_block}")

                self.blockchain.last_unconfirmed_block = unconfirmed_block
        except BlockchainError as e:
            util.logger.warning(f"BlockchainError while confirm_block({e}), retry block_height_sync")
            self.__channel_service.state_machine.block_sync()
            raise InvalidUnconfirmedBlock(e)

    def add_confirmed_block(self, confirmed_block: Block, confirm_info=None):
        if self.__channel_service.state_machine.state != "Watch":
            util.logger.info(f"Can't add confirmed block if state is not Watch. {confirmed_block.header.hash.hex()}")
            return

        self.blockchain.add_block(confirmed_block, confirm_info=confirm_info)

    def rebuild_block(self):
        self.blockchain.rebuild_transaction_count()
        self.blockchain.rebuild_made_block_count()
        self.new_epoch()

        nid = self.blockchain.find_nid()
        if nid is None:
            genesis_block = self.blockchain.find_block_by_height(0)
            self.rebuild_nid(genesis_block)
        else:
            ChannelProperty().nid = nid

    def rebuild_nid(self, block: Block):
        nid = NID.unknown.value
        if block.header.hash.hex() == BlockManager.MAINNET:
            nid = NID.mainnet.value
        elif block.header.hash.hex() == BlockManager.TESTNET:
            nid = NID.testnet.value
        elif len(block.body.transactions) > 0:
            tx = next(iter(block.body.transactions.values()))
            nid = tx.nid
            if nid is None:
                nid = NID.unknown.value

        if isinstance(nid, int):
            nid = hex(nid)

        self.blockchain.put_nid(nid)
        ChannelProperty().nid = nid

    def start_block_height_sync(self):
        self._block_sync.block_height_sync()

    def start_block_height_sync_timer(self, is_run_at_start=False):
        timer_key = TimerService.TIMER_KEY_BLOCK_HEIGHT_SYNC
        timer_service: TimerService = self.__channel_service.timer_service

        if timer_key not in timer_service.timer_list:
            util.logger.spam(f"add timer for block_request_call to radiostation...")
            timer_service.add_timer(
                timer_key,
                Timer(
                    target=timer_key,
                    duration=conf.GET_LAST_BLOCK_TIMER,
                    callback=self.start_block_height_sync,
                    is_repeat=True,
                    is_run_at_start=is_run_at_start
                )
            )

    def stop_block_height_sync_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_HEIGHT_SYNC
        timer_service: TimerService = self.__channel_service.timer_service
        if timer_key in timer_service.timer_list:
            timer_service.stop_timer(timer_key)

    def start_block_generate_timer(self):
        timer_key = TimerService.TIMER_KEY_BLOCK_GENERATE
        timer_service: TimerService = self.__channel_service.timer_service

        if timer_key not in timer_service.timer_list:
            if self.__consensus_algorithm:
                self.__consensus_algorithm.stop()

        self.__consensus_algorithm = ConsensusSiever(self)
        self.__consensus_algorithm.start_timer(timer_service)

    def stop_block_generate_timer(self):
        if self.__consensus_algorithm:
            self.__consensus_algorithm.stop()

    def request_rollback(self) -> bool:
        """Request block data rollback behind to 1 block

        :return: if rollback success return True, else return False
        """
        target_block = self.blockchain.find_block_by_hash32(self.blockchain.last_block.header.prev_hash)
        if not self.blockchain.check_rollback_possible(target_block):
            util.logger.warning(f"The request cannot be rollback to the target block({target_block}).")
            return False

        request_origin = {
            'blockHeight': target_block.header.height,
            'blockHash': target_block.header.hash.hex_0x()
        }

        request = convert_params(request_origin, ParamType.roll_back)
        stub = StubCollection().icon_score_stubs[ChannelProperty().name]

        util.logger.debug(f"Rollback request({request})")
        response: dict = cast(dict, stub.sync_task().rollback(request))
        try:
            response_to_json_query(response)
        except GenericJsonRpcServerError as e:
            util.logger.warning(f"response error = {e}")
        else:
            result_height = response.get("blockHeight")
            if hex(target_block.header.height) == result_height:
                util.logger.info(f"Rollback Success. result height = {result_height}")
                self.blockchain.rollback(target_block)
                self.rebuild_block()
                return True

        util.logger.warning(f"Rollback Fail. response = {response}")
        return False

    def get_next_leader(self) -> Optional[str]:
        """get next leader from last_block of BlockChain. for new_epoch and set_peer_type_in_channel

        :return:
        """

        block = self.blockchain.last_block

        if block.header.prep_changed_reason is NextRepsChangeReason.TermEnd:
            next_leader = self.blockchain.get_first_leader_of_next_reps(block)
        elif self.blockchain.made_block_count_reached_max(block):
            reps_hash = block.header.revealed_next_reps_hash or ChannelProperty().crep_root_hash
            reps = self.blockchain.find_preps_addresses_by_roothash(reps_hash)
            next_leader = self.blockchain.get_next_rep_string_in_reps(block.header.peer_id, reps)

            if next_leader is None:
                next_leader = self.__get_next_leader_by_block(block)
        else:
            next_leader = self.__get_next_leader_by_block(block)

        util.logger.debug(f"next_leader({next_leader}) from block({block.header.height})")
        return next_leader

    def __get_next_leader_by_block(self, block: Block) -> str:
        if block.header.next_leader is None:
            if block.header.peer_id:
                return block.header.peer_id.hex_hx()
            else:
                return ExternalAddress.empty().hex_hx()
        else:
            return block.header.next_leader.hex_hx()

    def get_target_list(self) -> List[str]:
        if self.blockchain.last_block:
            reps_hash = self.blockchain.get_reps_hash_by_header(self.blockchain.last_block.header)
        else:
            reps_hash = ChannelProperty().crep_root_hash
        rep_targets = self.blockchain.find_preps_targets_by_roothash(reps_hash)
        return list(rep_targets.values())

    def new_epoch(self):
        new_leader_id = self.get_next_leader()
        self.epoch = Epoch(self, new_leader_id)
        util.logger.info(f"Epoch height({self.epoch.height}), leader({self.epoch.leader_id})")

    def stop(self):
        self._block_sync.stop()

        if self.consensus_algorithm:
            self.consensus_algorithm.stop()

        # close store(aka. leveldb) after cleanup all threads
        # because hard crashes may occur.
        # https://plyvel.readthedocs.io/en/latest/api.html#DB.close
        self.blockchain.close_blockchain_store()

    def add_complain(self, vote: LeaderVote):
        util.logger.debug(f"vote({vote})")

        if not self.preps_contain(vote.rep):
            util.logger.debug(f"ignore vote from unknown prep: {vote.rep.hex_hx()}")
            return

        if not self.epoch:
            util.logger.debug(f"Epoch is not initialized.")
            return

        if self.epoch.height == vote.block_height:
            if self.epoch.round == vote.round:
                self.epoch.add_complain(vote)
                elected_leader = self.epoch.complain_result()
                if elected_leader:
                    self.__channel_service.reset_leader(elected_leader, complained=True)
            elif self.epoch.round > vote.round:
                if vote.new_leader != ExternalAddress.empty():
                    self.__send_fail_leader_vote(vote)
                else:
                    return
            else:
                # TODO: do round sync
                return
        elif self.epoch.height < vote.block_height:
            self.__channel_service.state_machine.block_sync()

    def __send_fail_leader_vote(self, leader_vote: LeaderVote):
        version = self.blockchain.block_versioner.get_version(leader_vote.block_height)
        fail_vote = Vote.get_leader_vote_class(version).new(
            signer=ChannelProperty().peer_auth,
            block_height=leader_vote.block_height,
            round_=leader_vote.round,
            old_leader=leader_vote.old_leader,
            new_leader=ExternalAddress.empty(),
            timestamp=util.get_time_stamp()
        )

        fail_vote_dumped = json.dumps(fail_vote.serialize())

        complain_kwargs = {
            "complain_vote": fail_vote_dumped,
            "channel": self.channel_name
        }

        if conf.RECOVERY_MODE:
            complain_kwargs["from_recovery"] = True

        request = loopchain_pb2.ComplainLeaderRequest(**complain_kwargs)

        reps_hash = self.blockchain.last_block.header.revealed_next_reps_hash or ChannelProperty().crep_root_hash
        rep_id = leader_vote.rep.hex_hx()
        target = self.blockchain.find_preps_targets_by_roothash(reps_hash)[rep_id]

        util.logger.debug(
            f"fail leader complain "
            f"complained_leader_id({leader_vote.old_leader}), "
            f"new_leader_id({ExternalAddress.empty()}),"
            f"round({leader_vote.round}),"
            f"target({target})")

        self.__channel_service.broadcast_scheduler.schedule_send_failed_leader_complain(
            "ComplainLeader", request, target=target
        )

    def get_leader_ids_for_complaint(self) -> Tuple[str, str]:
        """
        :return: Return complained_leader_id and new_leader_id for the Leader Complaint.
        """
        complained_leader_id = self.epoch.leader_id

        new_leader = self.blockchain.get_next_rep_in_reps(
            ExternalAddress.fromhex(complained_leader_id), self.epoch.reps)
        new_leader_id = new_leader.hex_hx() if new_leader else None

        if not isinstance(new_leader_id, str):
            new_leader_id = ""

        if not isinstance(complained_leader_id, str):
            complained_leader_id = ""

        return complained_leader_id, new_leader_id

    def leader_complain(self):
        complained_leader_id, new_leader_id = self.get_leader_ids_for_complaint()
        version = self.blockchain.block_versioner.get_version(self.epoch.height)
        leader_vote = Vote.get_leader_vote_class(version).new(
            signer=ChannelProperty().peer_auth,
            block_height=self.epoch.height,
            round_=self.epoch.round,
            old_leader=ExternalAddress.fromhex_address(complained_leader_id),
            new_leader=ExternalAddress.fromhex_address(new_leader_id),
            timestamp=util.get_time_stamp()
        )
        util.logger.info(
            f"LeaderVote : old_leader({complained_leader_id}), new_leader({new_leader_id}), round({self.epoch.round})")
        self.add_complain(leader_vote)

        leader_vote_serialized = leader_vote.serialize()
        leader_vote_dumped = json.dumps(leader_vote_serialized)

        complain_kwargs = {
            "complain_vote": leader_vote_dumped,
            "channel": self.channel_name
        }

        if conf.RECOVERY_MODE:
            complain_kwargs["from_recovery"] = True

        request = loopchain_pb2.ComplainLeaderRequest(**complain_kwargs)

        util.logger.debug(
            f"complained_leader_id({complained_leader_id}), "
            f"new_leader_id({new_leader_id})")

        reps_hash = self.blockchain.get_next_reps_hash_by_header(self.blockchain.last_block.header)
        self.__channel_service.broadcast_scheduler.schedule_broadcast("ComplainLeader",
                                                                      request,
                                                                      reps_hash=reps_hash)

    def vote_unconfirmed_block(self, block: Block, round_: int, is_validated):
        util.logger.debug(f"height({block.header.height}), "
                          f"block_hash({block.header.hash}), "
                          f"is_validated({is_validated})")
        vote = Vote.get_block_vote_class(block.header.version).new(
            signer=ChannelProperty().peer_auth,
            block_height=block.header.height,
            round_=round_,
            block_hash=block.header.hash if is_validated else Hash32.empty(),
            timestamp=util.get_time_stamp()
        )
        self.candidate_blocks.add_vote(vote)

        vote_serialized = vote.serialize()
        vote_dumped = json.dumps(vote_serialized)
        block_vote = loopchain_pb2.BlockVote(vote=vote_dumped, channel=ChannelProperty().name)

        target_reps_hash = block.header.reps_hash or ChannelProperty().crep_root_hash

        self.__channel_service.broadcast_scheduler.schedule_broadcast(
            "VoteUnconfirmedBlock",
            block_vote,
            reps_hash=target_reps_hash
        )

        return vote

    def verify_confirm_info(self, unconfirmed_block: Block):
        unconfirmed_header = unconfirmed_block.header
        my_height = self.blockchain.block_height
        util.logger.info(f"my_height({my_height}), unconfirmed_block_height({unconfirmed_header.height})")

        if my_height < (unconfirmed_header.height - 2):
            raise ConfirmInfoInvalidNeedBlockSync(
                f"trigger block sync: my_height({my_height}), "
                f"unconfirmed_block.header.height({unconfirmed_header.height})"
            )

        is_rep = ObjectManager().channel_service.is_support_node_function(conf.NodeFunction.Vote)
        if is_rep and my_height == unconfirmed_header.height - 2 and not self.blockchain.last_unconfirmed_block:
            raise ConfirmInfoInvalidNeedBlockSync(
                f"trigger block sync: my_height({my_height}), "
                f"unconfirmed_block.header.height({unconfirmed_header.height}), "
                f"last_unconfirmed_block({self.blockchain.last_unconfirmed_block})"
            )

        # a block is already added that same height unconfirmed_block height
        if my_height >= unconfirmed_header.height:
            raise ConfirmInfoInvalidAddedBlock(
                f"block is already added my_height({my_height}), "
                f"unconfirmed_block.header.height({unconfirmed_header.height})")

        block_verifier = BlockVerifier.new(unconfirmed_header.version, self.blockchain.tx_versioner)
        prev_block = self.blockchain.get_prev_block(unconfirmed_block)
        reps_getter = self.blockchain.find_preps_addresses_by_roothash

        util.logger.spam(f"prev_block: {prev_block.header.hash if prev_block else None}")
        if not prev_block:
            raise NotReadyToConfirmInfo(
                "There is no prev block or not ready to confirm block (Maybe node is starting)")

        try:
            if prev_block and prev_block.header.reps_hash and unconfirmed_header.height > 1:
                prev_reps = reps_getter(prev_block.header.reps_hash)
                block_verifier.verify_prev_votes(unconfirmed_block, prev_reps)
        except Exception as e:
            util.logger.warning(f"{e!r}")
            traceback.print_exc()
            raise ConfirmInfoInvalid("Unconfirmed block has no valid confirm info for previous block")

    def _vote(self, unconfirmed_block: Block, round_: int):
        exc = None
        try:
            block_version = self.blockchain.block_versioner.get_version(unconfirmed_block.header.height)
            block_verifier = BlockVerifier.new(block_version, self.blockchain.tx_versioner)
            block_verifier.invoke_func = self.blockchain.score_invoke
            reps_getter = self.blockchain.find_preps_addresses_by_roothash

            util.logger.debug(f"unconfirmed_block.header({unconfirmed_block.header})")

            block_verifier.verify(unconfirmed_block,
                                  self.blockchain.last_block,
                                  self.blockchain,
                                  generator=self.blockchain.get_expected_generator(unconfirmed_block),
                                  reps_getter=reps_getter)
        except NotInReps as e:
            util.logger.debug(f"Not In Reps({e}) state({self.__channel_service.state_machine.state})")
        except BlockHeightMismatch as e:
            exc = e
            util.logger.warning(f"Don't vote to the block of unexpected height. {e!r}")
        except Exception as e:
            exc = e
            util.logger.exception(f"{e!r}")
        else:
            self.candidate_blocks.add_block(
                unconfirmed_block, self.blockchain.find_preps_addresses_by_header(unconfirmed_block.header))
        finally:
            if isinstance(exc, BlockHeightMismatch):
                return

            is_validated = exc is None
            vote = self.vote_unconfirmed_block(unconfirmed_block, round_, is_validated)
            if self.__channel_service.state_machine.state == "BlockGenerate" and self.consensus_algorithm:
                self.consensus_algorithm.vote(vote)

    def vote_as_peer(self, unconfirmed_block: Block, round_: int):
        """Vote to AnnounceUnconfirmedBlock
        """
        util.logger.debug(
            f"height({unconfirmed_block.header.height}) "
            f"round({round_}) "
            f"unconfirmed_block({unconfirmed_block.header.hash.hex()})")
        util.logger.warning(f"last_block({self.blockchain.last_block.header.hash})")

        try:
            self.add_unconfirmed_block(unconfirmed_block, round_)
            if self.is_shutdown_block():
                self.start_suspend()
                return

        except InvalidUnconfirmedBlock as e:
            self.candidate_blocks.remove_block(unconfirmed_block.header.hash)
            util.logger.warning(f"{e!r}")
        except RoundMismatch as e:
            self.candidate_blocks.remove_block(unconfirmed_block.header.prev_hash)
            util.logger.warning(f"{e!r}")
        except UnrecordedBlock as e:
            util.logger.info(f"{e!r}")
        except DuplicationUnconfirmedBlock as e:
            util.logger.debug(f"{e!r}")
            self._vote(unconfirmed_block, round_)
        else:
            self._vote(unconfirmed_block, round_)

    def preps_contain(self, peer_address: ExternalAddress) -> bool:
        last_block = self.blockchain.last_block
        if last_block:
            preps = self.blockchain.find_preps_addresses_by_roothash(last_block.header.revealed_next_reps_hash)
            util.logger.debug(f"peer_addr: {peer_address}, preps: {preps}")
            return peer_address in preps

        return False

    def is_shutdown_block(self) -> bool:
        return self.blockchain.is_shutdown_block()

    def start_suspend(self):
        self.__channel_service.state_machine.suspend()
        self.blockchain.add_shutdown_unconfirmed_block()