def leader_complain(self): complained_leader_id, new_leader_id = self.get_leader_ids_for_complaint( ) version = self.blockchain.block_versioner.get_version( self.epoch.height) leader_vote = Vote.get_leader_vote_class(version).new( signer=ChannelProperty().peer_auth, block_height=self.epoch.height, round_=self.epoch.round, old_leader=ExternalAddress.fromhex_address(complained_leader_id), new_leader=ExternalAddress.fromhex_address(new_leader_id), timestamp=util.get_time_stamp()) util.logger.info( f"LeaderVote : old_leader({complained_leader_id}), new_leader({new_leader_id}), round({self.epoch.round})" ) self.add_complain(leader_vote) leader_vote_serialized = leader_vote.serialize() leader_vote_dumped = json.dumps(leader_vote_serialized) request = loopchain_pb2.ComplainLeaderRequest( complain_vote=leader_vote_dumped, channel=self.channel_name) util.logger.debug(f"leader complain " f"complained_leader_id({complained_leader_id}), " f"new_leader_id({new_leader_id})") reps_hash = self.blockchain.get_next_reps_hash_by_header( self.blockchain.last_block.header) self.__channel_service.broadcast_scheduler.schedule_broadcast( "ComplainLeader", request, reps_hash=reps_hash)
def _deserialize_header_data(self, json_data: dict): hash_ = Hash32.fromhex(json_data["hash"]) prev_hash = json_data.get('prevHash') prev_hash = Hash32.fromhex(prev_hash) if prev_hash else None peer_id = json_data.get('leader') peer_id = ExternalAddress.fromhex(peer_id) if peer_id else None signature = json_data.get('signature') signature = Signature.from_base64str(signature) if signature else None next_leader = json_data.get("nextLeader") next_leader = ExternalAddress.fromhex( next_leader) if next_leader else None transactions_hash = json_data["transactionsHash"] transactions_hash = Hash32.fromhex(transactions_hash) receipts_hash = json_data["receiptsHash"] receipts_hash = Hash32.fromhex(receipts_hash) state_hash = json_data["stateHash"] state_hash = Hash32.fromhex(state_hash) reps_hash = json_data["repsHash"] reps_hash = Hash32.fromhex(reps_hash) next_reps_hash = json_data["nextRepsHash"] next_reps_hash = Hash32.fromhex(next_reps_hash) leader_votes_hash = json_data["leaderVotesHash"] leader_votes_hash = Hash32.fromhex(leader_votes_hash) prev_votes_hash = json_data["prevVotesHash"] prev_votes_hash = Hash32.fromhex(prev_votes_hash) height = json_data["height"] height = int(height, 16) timestamp = json_data["timestamp"] timestamp = int(timestamp, 16) return { "hash": hash_, "prev_hash": prev_hash, "height": height, "timestamp": timestamp, "peer_id": peer_id, "signature": signature, "next_leader": next_leader, "transactions_hash": transactions_hash, "receipts_hash": receipts_hash, "state_hash": state_hash, "reps_hash": reps_hash, "next_reps_hash": next_reps_hash, "leader_votes_hash": leader_votes_hash, "prev_votes_hash": prev_votes_hash, "logs_bloom": BloomFilter.fromhex(json_data["logsBloom"]) }
def from_(self, tx_data: dict) -> 'Transaction': tx_data_copied = dict(tx_data) tx_data_copied.pop('method', None) hash = tx_data_copied.pop('tx_hash', None) signature = tx_data_copied.pop('signature', None) timestamp = tx_data_copied.pop('timestamp', None) from_address = tx_data_copied.pop('from', None) to_address = tx_data_copied.pop('to', None) value = tx_data_copied.pop('value', None) fee = tx_data_copied.pop('fee', None) nonce = tx_data_copied.pop('nonce', None) extra = tx_data_copied value = int_fromhex(value) fee = int_fromhex(fee) if nonce is not None: nonce = int_fromstr(nonce) return Transaction( raw_data=tx_data, hash=Hash32.fromhex(hash, ignore_prefix=True, allow_malformed=False), signature=Signature.from_base64str(signature), timestamp=int(timestamp) if timestamp is not None else None, from_address=ExternalAddress.fromhex(from_address, ignore_prefix=False, allow_malformed=True), to_address=ExternalAddress.fromhex(to_address, ignore_prefix=False, allow_malformed=True), value=value, fee=fee, nonce=nonce, extra=extra, )
def add_complain(self, vote: LeaderVote): util.logger.spam(f"add_complain vote({vote})") if not self.epoch: util.logger.debug(f"Epoch is not initialized.") return if self.epoch.height == vote.block_height: if self.epoch.round == vote.round_: self.epoch.add_complain(vote) elif self.epoch.round > vote.round_: if vote.new_leader != ExternalAddress.empty(): self.__send_fail_leader_vote(vote) else: return else: # TODO: do round sync return elected_leader = self.epoch.complain_result() if elected_leader: if elected_leader == ExternalAddress.empty().hex_xx( ) and vote.round_ == self.epoch.round: util.logger.warning( f"Fail to elect the next leader on {self.epoch.round} round." ) elected_leader = self.blockchain.get_next_rep_in_reps( ExternalAddress.fromhex(self.epoch.leader_id), self.epoch.reps).hex_hx() if self.epoch.round == vote.round_: self.__channel_service.reset_leader(elected_leader, complained=True) elif self.epoch.height < vote.block_height: self.__channel_service.state_machine.block_sync()
def _deserialize(cls, data: dict): data_deserialized = super()._deserialize(data) data_deserialized["block_height"] = int(data["blockHeight"], 16) data_deserialized["round"] = int(data["round"], 16) data_deserialized["old_leader"] = ExternalAddress.fromhex_address( data["oldLeader"]) data_deserialized["new_leader"] = ExternalAddress.fromhex_address( data["newLeader"]) return data_deserialized
def to_origin_data(cls, rep: ExternalAddress, timestamp: int, block_height: int, round: int, old_leader: ExternalAddress, new_leader: ExternalAddress): origin_data = super().to_origin_data(rep, timestamp) origin_data["blockHeight"] = hex(block_height) origin_data["round"] = hex(round) origin_data["oldLeader"] = old_leader.hex_hx() origin_data["newLeader"] = new_leader.hex_hx() return origin_data
def verify_leader_votes(self, block: 'Block', prev_block: 'Block', reps: Sequence[ExternalAddress]): body: BlockBody = block.body if body.leader_votes: any_vote = next(vote for vote in body.leader_votes if vote) votes_class = v0_5.LeaderVotes if any_vote.version else v0_1a.LeaderVotes leader_votes = votes_class( reps, conf.VOTING_RATIO, block.header.height, any_vote.round, any_vote.old_leader, body.leader_votes) if leader_votes.get_result() == ExternalAddress.empty(): if leader_votes.block_height != block.header.height: exception = RuntimeError(f"Block({block.header.height}, {block.header.hash.hex()}, " f"Height({block.header.height}), " f"Expected({leader_votes.round}).") self._handle_exception(exception) elif leader_votes.get_result() != block.header.peer_id: exception = RuntimeError(f"Block({block.header.height}, {block.header.hash.hex()}, " f"Leader({block.header.peer_id.hex_xx()}), " f"Expected({leader_votes.get_result()}).") self._handle_exception(exception) try: leader_votes.verify() except Exception as e: # FIXME : leader_votes.verify does not verify all votes when raising an exception. self._handle_exception(e) else: prev_block_header: BlockHeader = prev_block.header if prev_block_header.next_leader != block.header.peer_id and not prev_block_header.prep_changed: exception = RuntimeError(f"Block({block.header.height}, {block.header.hash.hex()}, " f"Leader({block.header.peer_id.hex_xx()}), " f"Expected({prev_block_header.next_leader.hex_xx()}).\n " f"LeaderVotes({body.leader_votes}") self._handle_exception(exception)
async def init(self, **kwargs): """Initialize Channel Service :param kwargs: takes (peer_id, peer_port, peer_target, rest_target) within parameters :return: None """ loggers.get_preset().peer_id = kwargs.get('peer_id') loggers.get_preset().update_logger() ChannelProperty().peer_port = kwargs.get('peer_port') ChannelProperty().peer_target = kwargs.get('peer_target') ChannelProperty().rest_target = kwargs.get('rest_target') ChannelProperty().peer_id = kwargs.get('peer_id') ChannelProperty().peer_address = ExternalAddress.fromhex_address( ChannelProperty().peer_id) ChannelProperty().node_type = conf.NodeType.CitizenNode ChannelProperty().rs_target = None self.__peer_manager = PeerManager() await self.__init_peer_auth() self.__init_broadcast_scheduler() self.__init_block_manager() await self.__init_score_container() await self.__inner_service.connect(conf.AMQP_CONNECTION_ATTEMPTS, conf.AMQP_RETRY_DELAY, exclusive=True) await self.__init_sub_services()
def is_unrecorded(self) -> bool: """Return is unrecorded block :return: bool """ return (self.next_leader == ExternalAddress.empty() and self.reps_hash == self.next_reps_hash == Hash32.empty())
def add_complain(self, vote: LeaderVote): util.logger.debug(f"vote({vote})") if not self.preps_contain(vote.rep): util.logger.debug(f"ignore vote from unknown prep: {vote.rep.hex_hx()}") return if not self.epoch: util.logger.debug(f"Epoch is not initialized.") return if self.epoch.height == vote.block_height: if self.epoch.round == vote.round: self.epoch.add_complain(vote) elected_leader = self.epoch.complain_result() if elected_leader: self.__channel_service.reset_leader(elected_leader, complained=True) elif self.epoch.round > vote.round: if vote.new_leader != ExternalAddress.empty(): self.__send_fail_leader_vote(vote) else: return else: # TODO: do round sync return elif self.epoch.height < vote.block_height: self.__channel_service.state_machine.block_sync()
def test_prep_changed_by_term_end_if_next_leader_is_empty(self, header_factory): header = header_factory(next_leader=ExternalAddress.empty(), reps_hash=Hash32(os.urandom(Hash32.size)), next_reps_hash=Hash32(os.urandom(Hash32.size))) assert header.prep_changed assert header.prep_changed_reason is NextRepsChangeReason.TermEnd
def test_transaction_v3_invalid_nid(self): MockBlockchain = namedtuple("MockBlockchain", "find_nid find_tx_by_key") nids = list(range(0, 1000)) random.shuffle(nids) tb = TransactionBuilder.new("0x3", None, self.tx_versioner) tb.step_limit = 1000000 tb.value = 100000 tb.signer = self.signer tb.to_address = ExternalAddress(os.urandom(20)) tb.nid = nids[0] tb.nonce = random.randint(0, 100000) tb.data = "test" tb.data_type = "message" tx = tb.build() expected_nid = nids[1] mock_blockchain = MockBlockchain(find_nid=lambda: hex(expected_nid), find_tx_by_key=lambda _: False) tv = TransactionVerifier.new(tx.version, tx.type(), self.tx_versioner) self.assertRaises(TransactionInvalidNidError, lambda: tv.verify(tx, mock_blockchain)) self.assertRaises(TransactionInvalidNidError, lambda: tv.pre_verify(tx, nid=expected_nid))
def test_leader_votes_completed_with_out_of_round(self): ratio = 0.51 old_leader = self.reps[0] next_leader = self.reps[1] by_higher_rounder = ExternalAddress.empty() leader_votes = LeaderVotes(self.reps, ratio, 0, 0, old_leader) for i, (rep, signer) in enumerate(zip(self.reps[:26], self.signers[:26])): leader_vote = LeaderVote.new(signer, 0, 0, 0, old_leader, next_leader) leader_votes.add_vote(leader_vote) leader_votes.get_summary() print(f"leader_votes.is_completed(): {leader_votes.is_completed()}") print(f"leader_votes.get_result(): {leader_votes.get_result()}") self.assertEqual(leader_votes.is_completed(), False) self.assertEqual(leader_votes.get_result(), None) for i, (rep, signer) in enumerate(zip(self.reps[26:55], self.signers[26:55])): leader_vote = LeaderVote.new(signer, 0, 0, 0, old_leader, by_higher_rounder) leader_votes.add_vote(leader_vote) leader_votes.get_summary() print(f"leader_votes.is_completed(): {leader_votes.is_completed()}") print(f"leader_votes.get_result(): {leader_votes.get_result()}") self.assertEqual(leader_votes.is_completed(), True) self.assertEqual(leader_votes.get_result(), next_leader)
def test_prep_changed_by_penalty_if_exists_next_reps_hash_and_next_leader(self, header_factory): header = header_factory(next_leader=ExternalAddress(os.urandom(ExternalAddress.size)), reps_hash=Hash32(os.urandom(Hash32.size)), next_reps_hash=Hash32(os.urandom(Hash32.size))) assert header.prep_changed assert header.prep_changed_reason == NextRepsChangeReason.TermEnd
def __send_fail_leader_vote(self, leader_vote: LeaderVote): version = self.blockchain.block_versioner.get_version( leader_vote.block_height) fail_vote = Vote.get_leader_vote_class(version).new( signer=ChannelProperty().peer_auth, block_height=leader_vote.block_height, round_=leader_vote.round, old_leader=leader_vote.old_leader, new_leader=ExternalAddress.empty(), timestamp=util.get_time_stamp()) fail_vote_dumped = json.dumps(fail_vote.serialize()) request = loopchain_pb2.ComplainLeaderRequest( complain_vote=fail_vote_dumped, channel=self.channel_name) reps_hash = self.blockchain.last_block.header.revealed_next_reps_hash or ChannelProperty( ).crep_root_hash rep_id = leader_vote.rep.hex_hx() target = self.blockchain.find_preps_targets_by_roothash( reps_hash)[rep_id] util.logger.debug(f"fail leader complain " f"complained_leader_id({leader_vote.old_leader}), " f"new_leader_id({ExternalAddress.empty()})," f"round({leader_vote.round})," f"target({target})") self.__channel_service.broadcast_scheduler.schedule_send_failed_leader_complain( "ComplainLeader", request, target=target)
def test_invoke(self): guard = self.guard value = f"hx{os.urandom(20).hex()}" address = ExternalAddress.fromhex_address(value) tx = MockTx(address) for i in range(self.guard_threshold): blocked: bool = guard.invoke(tx) assert not blocked assert not guard._is_update_denylist assert len(guard._denylist) == 0 # The case when count > guard_threshold blocked: bool = guard.invoke(tx) assert blocked assert guard._is_update_denylist assert value in guard._denylist # Blocking is expired _sleep(self.block_duration + 1) guard._check_denylist() assert value not in guard._denylist blocked: bool = guard.invoke(tx) assert not blocked assert guard._is_update_denylist assert len(guard._denylist) == 0 # Sleep for reset test _sleep(self.reset_time + 1) # Case when cur_time - expire_time > reset_time and count > 0 and not blocked blocked: bool = guard.invoke(tx) assert not blocked assert value not in guard._statistics
def __get_next_leader_by_block(self, block: Block) -> str: if block.header.next_leader is None: if block.header.peer_id: return block.header.peer_id.hex_hx() else: return ExternalAddress.empty().hex_hx() else: return block.header.next_leader.hex_hx()
def test_valid_timestamp(self): """Test for timestamp buffer in block verifier""" def block_maker(timestamp: int, height: int = 0, prev_hash=None): """Make dummy block""" tx_versioner = TransactionVersioner() dummy_receipts = {} block_builder = BlockBuilder.new("0.1a", tx_versioner) for i in range(1000): tx_builder = TransactionBuilder.new("0x3", None, tx_versioner) tx_builder.signer = test_signer tx_builder.to_address = ExternalAddress.new() tx_builder.step_limit = random.randint(0, 10000) tx_builder.value = random.randint(0, 10000) tx_builder.nid = 2 tx = tx_builder.build() tx_serializer = TransactionSerializer.new(tx.version, tx.type(), tx_versioner) block_builder.transactions[tx.hash] = tx dummy_receipts[tx.hash.hex()] = { "dummy_receipt": "dummy", "tx_dumped": tx_serializer.to_full_data(tx) } block_builder.signer = test_signer block_builder.prev_hash = prev_hash block_builder.height = height block_builder.state_hash = Hash32(bytes(Hash32.size)) block_builder.receipts = dummy_receipts block_builder.reps = [ExternalAddress.fromhex_address(test_signer.address)] block_builder.peer_id = ExternalAddress.fromhex(test_signer.address) block_builder.next_leader = ExternalAddress.fromhex(test_signer.address) block_builder.fixed_timestamp = timestamp b = block_builder.build() assert b.header.timestamp == timestamp return b test_signer = Signer.from_prikey(os.urandom(32)) first_block = block_maker(height=0, timestamp=utils.get_time_stamp()) second_block = block_maker(height=1, timestamp=utils.get_time_stamp() + 5, prev_hash=first_block.header.hash) third_block_from_far_future = block_maker(height=2, prev_hash=second_block.header.hash, timestamp=utils.get_time_stamp() + conf.TIMESTAMP_BUFFER_IN_VERIFIER + 5_000_000) block_verifier = BlockVerifier.new("0.1a", TransactionVersioner()) leader = first_block.header.peer_id reps = [ExternalAddress.fromhex_address(test_signer.address)] print("*---Normal time range") block_verifier.verify(block=second_block, prev_block=first_block, blockchain=None, generator=leader, reps=reps) print("*---Abnormal time range") with self.assertRaises(Exception): block_verifier.verify(block=third_block_from_far_future, prev_block=second_block, blockchain=None, generator=leader, reps=reps)
def add_attrs_to_v3_builder(tx_builder): # Attributes that must be assigned tx_builder.to_address = ExternalAddress(os.urandom(ExternalAddress.size)) tx_builder.value: int = 10000 tx_builder.step_limit: int = 10000 tx_builder.nid: int = 3 tx_builder.nonce: int = 10000 return tx_builder
def build_peer_id(self): if self.peer_id is not None: return self.peer_id if self.signer is None: raise RuntimeError self.peer_id = ExternalAddress.fromhex_address(self.signer.address) return self.peer_id
def test_from_address_returns_its_addr_if_exists( self, tx_builder_factory: TxBuilderFactory, tx_version): tx_builder = tx_builder_factory(tx_version) expected_addr = ExternalAddress(os.urandom(ExternalAddress.size)) tx_builder.from_address = expected_addr built_addr = tx_builder.build_from_address() assert expected_addr == built_addr
def build_from_address(self): if self.from_address: return self.from_address if self.signer is None: raise RuntimeError(f"'signer' or 'from_address' is required.") self.from_address = ExternalAddress.fromhex_address(self.signer.address) return self.from_address
def reps_hash(self) -> Hash32: """return reps root hash. :return: """ block_prover = BlockProver( (ExternalAddress.fromhex_address(peer.peer_id).extend() for peer in self._peer_list_data.peer_list.values()), BlockProverType.Rep) return block_prover.get_proof_root()
def test_transaction_v2_unsigned(self): signer = Signer.new() tb = TransactionBuilder.new("0x2", None, self.tx_versioner) tb.fee = 1000000 tb.value = 100000 tb.from_address = ExternalAddress.fromhex_address(signer.address) tb.to_address = ExternalAddress(os.urandom(20)) tb.nonce = random.randint(0, 100000) tx = tb.build(is_signing=False) tv = TransactionVerifier.new("0x2", tx.type(), self.tx_versioner) self.assertRaises(TransactionInvalidSignatureError, lambda: tv.verify(tx)) self.assertRaises(TransactionInvalidSignatureError, lambda: tv.pre_verify(tx)) tb.signer = signer signed_tx = tb.sign_transaction(tx) tv.verify(signed_tx) tv.pre_verify(signed_tx)
def block_maker(timestamp: int, height: int = 0, prev_hash=None): """Make dummy block""" tx_versioner = TransactionVersioner() dummy_receipts = {} block_builder = BlockBuilder.new("0.1a", tx_versioner) for i in range(1000): tx_builder = TransactionBuilder.new("0x3", None, tx_versioner) tx_builder.signer = test_signer tx_builder.to_address = ExternalAddress.new() tx_builder.step_limit = random.randint(0, 10000) tx_builder.value = random.randint(0, 10000) tx_builder.nid = 2 tx = tx_builder.build() tx_serializer = TransactionSerializer.new( tx.version, tx.type(), tx_versioner) block_builder.transactions[tx.hash] = tx dummy_receipts[tx.hash.hex()] = { "dummy_receipt": "dummy", "tx_dumped": tx_serializer.to_full_data(tx) } block_builder.signer = test_signer block_builder.prev_hash = prev_hash block_builder.height = height block_builder.state_hash = Hash32(bytes(Hash32.size)) block_builder.receipts = dummy_receipts block_builder.reps = [ ExternalAddress.fromhex_address(test_signer.address) ] block_builder.peer_id = ExternalAddress.fromhex( test_signer.address) block_builder.next_leader = ExternalAddress.fromhex( test_signer.address) block_builder.fixed_timestamp = timestamp b = block_builder.build() assert b.header.timestamp == timestamp return b
def _header(hash_: Hash32 = Hash32.new(), prev_hash: Hash32 = Hash32.new(), height: int = 0, timestamp: int = 0, peer_id: ExternalAddress = ExternalAddress.new(), signature: Signature = Signature.new(), next_leader: ExternalAddress = ExternalAddress.new(), logs_bloom: BloomFilter = BloomFilter.new(), transactions_hash: Hash32 = Hash32.new(), state_hash: Hash32 = Hash32.new(), receipts_hash: Hash32 = Hash32.new(), reps_hash: Hash32 = Hash32.new(), next_reps_hash: Hash32 = Hash32.new(), leader_votes_hash: Hash32 = Hash32.new(), prev_votes_hash: Hash32 = Hash32.new()) -> BlockHeader_v0_4: return BlockHeader_v0_4(hash_, prev_hash, height, timestamp, peer_id, signature, next_leader, logs_bloom, transactions_hash, state_hash, receipts_hash, reps_hash, next_reps_hash, leader_votes_hash, prev_votes_hash)
def deserialize(cls, votes_data: List[Dict], voting_ratio: float): if votes_data: votes = [LeaderVote.deserialize(vote_data) for vote_data in votes_data] reps = [vote.rep for vote in votes] votes_instance = cls(reps, voting_ratio, votes[0].block_height, votes[0].round_, votes[0].old_leader) for vote in votes: index = reps.index(vote.rep) votes_instance.votes[index] = vote return votes_instance else: return cls([], voting_ratio, -1, -1, ExternalAddress.empty())
def _header(hash_: Hash32 = Hash32.new(), prev_hash: Hash32 = Hash32.new(), height: int = 0, timestamp: int = 0, peer_id: ExternalAddress = ExternalAddress.new(), signature: Signature = Signature.new(), next_leader: Address = Address.new(), merkle_tree_root_hash: Hash32 = Hash32.new(), commit_state: dict = dict()) -> BlockHeader_v0_1a: return BlockHeader_v0_1a(hash_, prev_hash, height, timestamp, peer_id, signature, next_leader, merkle_tree_root_hash, commit_state)
def prep_changed_reason(self) -> Optional[NextRepsChangeReason]: """Return prep changed reason :return: NextRepsChangeReason : NoChange, TermEnd, Penalty """ if not self.prep_changed and not self.is_unrecorded: return NextRepsChangeReason.NoChange if self.next_leader == ExternalAddress.empty(): return NextRepsChangeReason.TermEnd return NextRepsChangeReason.Penalty
def _build_next_leader(self): if self.next_reps_change_reason is NextRepsChangeReason.TermEnd: return ExternalAddress.empty() elif self.next_reps_change_reason is NextRepsChangeReason.Penalty: if not self.is_max_made_block_count and self.peer_id in self.next_reps: next_index = self.reps.index(self.peer_id) else: curr_index = self.reps.index(self.peer_id) next_index = curr_index + 1 next_index = next_index if next_index < len(self.next_reps) else 0 return self.next_reps[next_index] else: return self.next_leader