async def test_wallet_make_transaction_with_fee(self, two_wallet_nodes): num_blocks = 5 full_nodes, wallets = two_wallet_nodes full_node_1 = full_nodes[0] wallet_node, server_2 = wallets[0] wallet_node_2, server_3 = wallets[1] wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() await server_2.start_client( PeerInfo(self_hostname, uint16(full_node_1.full_node.server._port)), None) for i in range(0, num_blocks): await full_node_1.farm_new_block(FarmNewBlockProtocol(ph)) funds = sum([ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks) ]) await time_out_assert(5, wallet.get_confirmed_balance, funds) await time_out_assert(5, wallet.get_unconfirmed_balance, funds) assert await wallet.get_confirmed_balance() == funds assert await wallet.get_unconfirmed_balance() == funds tx_amount = 3200000000000 tx_fee = 10 tx = await wallet.generate_signed_transaction( tx_amount, await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash( ), tx_fee, ) fees = tx.spend_bundle.fees() assert fees == tx_fee await wallet.push_transaction(tx) await time_out_assert(5, wallet.get_confirmed_balance, funds) await time_out_assert(5, wallet.get_unconfirmed_balance, funds - tx_amount - tx_fee) for i in range(0, num_blocks): await full_node_1.farm_new_block(FarmNewBlockProtocol(32 * b"0")) new_funds = sum([ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 1) ]) await time_out_assert(5, wallet.get_confirmed_balance, new_funds - tx_amount - tx_fee) await time_out_assert(5, wallet.get_unconfirmed_balance, new_funds - tx_amount - tx_fee)
def __init__( self, local_type: NodeType, ws: Any, # Websocket server_port: int, log: logging.Logger, is_outbound: bool, is_feeler: bool, # Special type of connection, that disconnects after the handshake. peer_host, incoming_queue, close_callback: Callable, peer_id, close_event=None, session=None, ): # Local properties self.ws: Any = ws self.local_type = local_type self.local_port = server_port # Remote properties self.peer_host = peer_host peername = self.ws._writer.transport.get_extra_info("peername") if peername is None: raise ValueError(f"Was not able to get peername from {self.ws_witer} at {self.peer_host}") connection_port = peername[1] self.peer_port = connection_port self.peer_server_port: Optional[uint16] = None self.peer_node_id = peer_id self.log = log # connection properties self.is_outbound = is_outbound self.is_feeler = is_feeler # ChiaConnection metrics self.creation_time = time.time() self.bytes_read = 0 self.bytes_written = 0 self.last_message_time: float = 0 # Messaging self.incoming_queue: asyncio.Queue = incoming_queue self.outgoing_queue: asyncio.Queue = asyncio.Queue() self.inbound_task: Optional[asyncio.Task] = None self.outbound_task: Optional[asyncio.Task] = None self.active: bool = False # once handshake is successful this will be changed to True self.close_event: asyncio.Event = close_event self.session = session self.close_callback = close_callback self.pending_requests: Dict[bytes32, asyncio.Event] = {} self.request_results: Dict[bytes32, Message] = {} self.closed = False self.connection_type: Optional[NodeType] = None self.request_nonce: uint16 = uint16(0)
async def create_request(self, message_no_id: Message, timeout: int) -> Optional[Message]: """ Sends a message and waits for a response. """ if self.closed: return None # We will wait for this event, it will be set either by the response, or the timeout event = asyncio.Event() # The request nonce is an integer between 0 and 2**16 - 1, which is used to match requests to responses request_id = self.request_nonce self.request_nonce = uint16(self.request_nonce + 1) if self.request_nonce != ( 2**16 - 1) else uint16(0) message = Message(message_no_id.type, request_id, message_no_id.data) self.pending_requests[message.id] = event await self.outgoing_queue.put(message) # If the timeout passes, we set the event async def time_out(req_id, req_timeout): try: await asyncio.sleep(req_timeout) if req_id in self.pending_requests: self.pending_requests[req_id].set() except asyncio.CancelledError: if req_id in self.pending_requests: self.pending_requests[req_id].set() raise timeout_task = asyncio.create_task(time_out(message.id, timeout)) self.pending_timeouts[message.id] = timeout_task await event.wait() self.pending_requests.pop(message.id) result: Optional[Message] = None if message.id in self.request_results: result = self.request_results[message.id] assert result is not None self.log.info( f"<- {ProtocolMessageTypes(result.type).name} from: {self.peer_host}:{self.peer_port}" ) self.request_results.pop(result.id) return result
def batch_pre_validate_sub_blocks( constants_dict: Dict, sub_blocks_pickled: Dict[bytes, bytes], header_blocks_pickled: List[bytes], transaction_generators: List[Optional[bytes]], check_filter: bool, expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], ) -> List[bytes]: assert len(header_blocks_pickled) == len(transaction_generators) sub_blocks = {} for k, v in sub_blocks_pickled.items(): sub_blocks[k] = SubBlockRecord.from_bytes(v) results: List[PreValidationResult] = [] constants: ConsensusConstants = dataclass_from_dict( ConsensusConstants, constants_dict) for i in range(len(header_blocks_pickled)): try: header_block: HeaderBlock = HeaderBlock.from_bytes( header_blocks_pickled[i]) generator: Optional[bytes] = transaction_generators[i] required_iters, error = validate_finished_header_block( constants, sub_blocks, header_block, check_filter, expected_difficulty[i], expected_sub_slot_iters[i], ) cost_result = None error_int: Optional[uint16] = None if error is not None: error_int = uint16(error.code.value) if not error and generator is not None: cost_result = calculate_cost_of_program( Program.from_bytes(generator), constants.CLVM_COST_RATIO_CONSTANT) results.append( PreValidationResult(error_int, required_iters, cost_result)) except Exception: error_stack = traceback.format_exc() log.error(f"Exception: {error_stack}") results.append( PreValidationResult(uint16(Err.UNKNOWN.value), None, None)) return [bytes(r) for r in results]
async def test_public_connections(self, wallet_node): full_nodes, wallets = wallet_node full_node_api = full_nodes[0] server_1: ChiaServer = full_node_api.full_node.server wallet_node, server_2 = wallets[0] success = await server_2.start_client( PeerInfo(self_hostname, uint16(server_1._port)), None) assert success is True
async def test_wallet_make_transaction(self, two_wallet_nodes): num_blocks = 10 full_nodes, wallets = two_wallet_nodes full_node_1, server_1 = full_nodes[0] wallet_node, server_2 = wallets[0] wallet_node_2, server_3 = wallets[1] wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() await server_2.start_client(PeerInfo("localhost", uint16(server_1._port)), None) for i in range(0, num_blocks): await full_node_1.farm_new_block(FarmNewBlockProtocol(ph)) funds = sum( [ calculate_base_fee(uint32(i)) + calculate_block_reward(uint32(i)) for i in range(0, num_blocks - 2) ] ) await asyncio.sleep(2) assert await wallet.get_confirmed_balance() == funds assert await wallet.get_unconfirmed_balance() == funds spend_bundle = await wallet.generate_signed_transaction( 10, await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash(), 0, ) await wallet.push_transaction(spend_bundle) await asyncio.sleep(2) confirmed_balance = await wallet.get_confirmed_balance() unconfirmed_balance = await wallet.get_unconfirmed_balance() assert confirmed_balance == funds assert unconfirmed_balance == funds - 10 for i in range(0, num_blocks): await full_node_1.farm_new_block(FarmNewBlockProtocol(ph)) await asyncio.sleep(2) new_funds = sum( [ calculate_base_fee(uint32(i)) + calculate_block_reward(uint32(i)) for i in range(0, (2 * num_blocks) - 2) ] ) confirmed_balance = await wallet.get_confirmed_balance() unconfirmed_balance = await wallet.get_unconfirmed_balance() assert confirmed_balance == new_funds - 10 assert unconfirmed_balance == new_funds - 10
async def open_connection(self, request: Dict): host = request["host"] port = request["port"] target_node: PeerInfo = PeerInfo(host, uint16(int(port))) if getattr(self.service, "server", None) is None or not ( await self.service.server.start_client(target_node, None)): raise aiohttp.web.HTTPInternalServerError() return {"success": True}
async def test1(self): store = FullNodeStore("fndb_test") await store._clear_database() blocks = bt.get_consecutive_blocks(test_constants, 10, [], 10) b: Blockchain = Blockchain(test_constants) await store.add_block(blocks[0]) await b.initialize({}) for i in range(1, 9): assert (await b.receive_block(blocks[i] )) == ReceiveBlockResult.ADDED_TO_HEAD await store.add_block(blocks[i]) full_node_1 = FullNode(store, b) server_1 = ChiaServer(21234, full_node_1, NodeType.FULL_NODE) _ = await server_1.start_server("127.0.0.1", None) full_node_1._set_server(server_1) full_node_2 = FullNode(store, b) server_2 = ChiaServer(21235, full_node_2, NodeType.FULL_NODE) full_node_2._set_server(server_2) await server_2.start_client(PeerInfo("127.0.0.1", uint16(21234)), None) await asyncio.sleep(2) # Allow connections to get made num_unfinished_blocks = 1000 start_unf = time.time() for i in range(num_unfinished_blocks): msg = Message("unfinished_block", peer_protocol.UnfinishedBlock(blocks[9])) server_1.push_message( OutboundMessage(NodeType.FULL_NODE, msg, Delivery.BROADCAST)) # Send the whole block ast the end so we can detect when the node is done block_msg = Message("block", peer_protocol.Block(blocks[9])) server_1.push_message( OutboundMessage(NodeType.FULL_NODE, block_msg, Delivery.BROADCAST)) while time.time() - start_unf < 300: if max([h.height for h in b.get_current_tips()]) == 9: print( f"Time taken to process {num_unfinished_blocks} is {time.time() - start_unf}" ) server_1.close_all() server_2.close_all() await server_1.await_closed() await server_2.await_closed() return await asyncio.sleep(0.1) server_1.close_all() server_2.close_all() await server_1.await_closed() await server_2.await_closed() raise Exception("Took too long to process blocks")
async def test_invalid_block(self, wallet_node_30_freeze): num_blocks = 5 full_nodes, wallets = wallet_node_30_freeze full_node_api: FullNodeSimulator = full_nodes[0] full_node_server = full_node_api.server wallet_node, server_2 = wallets[0] wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() await server_2.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) for i in range(num_blocks): await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph)) funds = sum( [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)] ) # funds += calculate_base_farmer_reward(0) await asyncio.sleep(2) print(await wallet.get_confirmed_balance(), funds) await time_out_assert(10, wallet.get_confirmed_balance, funds) tx: TransactionRecord = await wallet.generate_signed_transaction(100, ph, 0) current_blocks = await full_node_api.get_all_full_blocks() new_blocks = bt.get_consecutive_blocks( 1, block_list_input=current_blocks, transaction_data=tx.spend_bundle, guarantee_transaction_block=True ) last_block = new_blocks[-1:][0] new_blocks_no_tx = bt.get_consecutive_blocks( 1, block_list_input=current_blocks, guarantee_transaction_block=True ) last_block_no_tx = new_blocks_no_tx[-1:][0] result, error, fork = await full_node_api.full_node.blockchain.receive_block(last_block, None) assert error is not None assert error is Err.INITIAL_TRANSACTION_FREEZE assert result is ReceiveBlockResult.INVALID_BLOCK result, error, fork = await full_node_api.full_node.blockchain.receive_block(last_block_no_tx, None) assert error is None assert result is ReceiveBlockResult.NEW_PEAK after_freeze_blocks = bt.get_consecutive_blocks(24, block_list_input=new_blocks_no_tx) for block in after_freeze_blocks: await full_node_api.full_node.blockchain.receive_block(block, None) assert full_node_api.full_node.blockchain.get_peak_height() == 30 new_blocks = bt.get_consecutive_blocks( 1, block_list_input=after_freeze_blocks, transaction_data=tx.spend_bundle, guarantee_transaction_block=True ) last_block = new_blocks[-1:][0] result, error, fork = await full_node_api.full_node.blockchain.receive_block(last_block, None) assert error is None assert result is ReceiveBlockResult.NEW_PEAK
async def test_wallet_reorg_get_coinbase(self, wallet_node_simulator, default_400_blocks): full_nodes, wallets = wallet_node_simulator full_node_api = full_nodes[0] wallet_node, server_2 = wallets[0] fn_server = full_node_api.full_node.server wsm = wallet_node.wallet_state_manager wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() await server_2.start_client( PeerInfo(self_hostname, uint16(fn_server._port)), None) # Insert 400 blocks for block in default_400_blocks: await full_node_api.full_node.respond_block( full_node_protocol.RespondBlock(block)) # Reorg blocks that carry reward num_blocks_reorg = 30 blocks_reorg = bt.get_consecutive_blocks( num_blocks_reorg, block_list_input=default_400_blocks[:-5]) for block in blocks_reorg[:-5]: await full_node_api.full_node.respond_block( full_node_protocol.RespondBlock(block)) async def get_tx_count(wallet_id): txs = await wsm.get_all_transactions(wallet_id) return len(txs) await time_out_assert(10, get_tx_count, 0, 1) num_blocks_reorg_1 = 40 blocks_reorg_1 = bt.get_consecutive_blocks( 1, pool_reward_puzzle_hash=ph, farmer_reward_puzzle_hash=ph, block_list_input=blocks_reorg[:-30]) blocks_reorg_2 = bt.get_consecutive_blocks( num_blocks_reorg_1, block_list_input=blocks_reorg_1) for block in blocks_reorg_2[-41:]: await full_node_api.full_node.respond_block( full_node_protocol.RespondBlock(block)) await disconnect_all_and_reconnect(server_2, fn_server) # Confirm we have the funds funds = calculate_pool_reward(uint32( len(blocks_reorg_1))) + calculate_base_farmer_reward( uint32(len(blocks_reorg_1))) await time_out_assert(10, get_tx_count, 2, 1) await time_out_assert(10, wallet.get_confirmed_balance, funds)
async def test_spam_tx(self, setup_two_nodes): nodes, _ = setup_two_nodes full_node_1, full_node_2 = nodes server_1 = nodes[0].full_node.server server_2 = nodes[1].full_node.server await server_2.start_client( PeerInfo(self_hostname, uint16(server_1._port)), full_node_2.full_node.on_connect) assert len(server_1.all_connections) == 1 ws_con: WSChiaConnection = list(server_1.all_connections.values())[0] ws_con_2: WSChiaConnection = list(server_2.all_connections.values())[0] ws_con.peer_host = "1.2.3.4" ws_con_2.peer_host = "1.2.3.4" new_tx_message = make_msg( ProtocolMessageTypes.new_transaction, full_node_protocol.NewTransaction(bytes([9] * 32), uint64(0), uint64(0)), ) for i in range(4000): await ws_con._send_message(new_tx_message) await asyncio.sleep(1) assert not ws_con.closed # Tests outbound rate limiting, we will not send too much data for i in range(2000): await ws_con._send_message(new_tx_message) await asyncio.sleep(1) assert not ws_con.closed # Remove outbound rate limiter to test inbound limits ws_con.outbound_rate_limiter = RateLimiter(percentage_of_limit=10000) for i in range(6000): await ws_con._send_message(new_tx_message) await asyncio.sleep(1) def is_closed(): return ws_con.closed await time_out_assert(15, is_closed) assert ws_con.closed def is_banned(): return "1.2.3.4" in server_2.banned_peers await time_out_assert(15, is_banned)
async def open_connection(self, request: Dict): host = request["host"] port = request["port"] target_node: PeerInfo = PeerInfo(host, uint16(int(port))) on_connect = None if hasattr(self.rpc_api.service, "on_connect"): on_connect = self.rpc_api.service.on_connect if getattr(self.rpc_api.service, "server", None) is None or not ( await self.rpc_api.service.server.start_client(target_node, on_connect) ): raise ValueError("Start client failed, or server is not set") return {}
async def add_connection(self, text: str): if ":" not in text: self.error_msg.text = ( "Enter a valid IP and port in the following format: 10.5.4.3:8000" ) return else: ip, port = ":".join(text.split(":")[:-1]), text.split(":")[-1] target_node: PeerInfo = PeerInfo(ip, uint16(int(port))) log.error(f"Want to connect to {ip}, {port}") if not (await self.node_server.start_client(target_node, None)): self.error_msg.text = f"Failed to connect to {ip}:{port}"
async def setup_full_system(consensus_constants: ConsensusConstants, b_tools=None, b_tools_1=None): if b_tools is None: b_tools = BlockTools(constants=test_constants) if b_tools_1 is None: b_tools_1 = BlockTools(constants=test_constants) node_iters = [ setup_introducer(21233), setup_harvester(21234, 21235, consensus_constants, b_tools), setup_farmer(21235, consensus_constants, b_tools, uint16(21237)), setup_vdf_clients(8000), setup_timelord(21236, 21237, False, consensus_constants, b_tools), setup_full_node(consensus_constants, "blockchain_test.db", 21237, b_tools, 21233, False, 10), setup_full_node(consensus_constants, "blockchain_test_2.db", 21238, b_tools_1, 21233, False, 10), setup_vdf_client(7999), setup_timelord(21239, 21238, True, consensus_constants, b_tools_1), ] introducer, introducer_server = await node_iters[0].__anext__() harvester, harvester_server = await node_iters[1].__anext__() farmer, farmer_server = await node_iters[2].__anext__() async def num_connections(): count = len(harvester.server.all_connections.items()) return count await time_out_assert_custom_interval(10, 3, num_connections, 1) vdf_clients = await node_iters[3].__anext__() timelord, timelord_server = await node_iters[4].__anext__() node_api_1 = await node_iters[5].__anext__() node_api_2 = await node_iters[6].__anext__() vdf_sanitizer = await node_iters[7].__anext__() sanitizer, sanitizer_server = await node_iters[8].__anext__() yield ( node_api_1, node_api_2, harvester, farmer, introducer, timelord, vdf_clients, vdf_sanitizer, sanitizer, node_api_1.full_node.server, ) await _teardown_nodes(node_iters)
async def test_close_height_but_big_reorg(self, three_nodes): blocks_a = bt.get_consecutive_blocks(50) blocks_b = bt.get_consecutive_blocks(51, seed=b"B") blocks_c = bt.get_consecutive_blocks(90, seed=b"C") full_node_1, full_node_2, full_node_3 = three_nodes server_1 = full_node_1.full_node.server server_2 = full_node_2.full_node.server server_3 = full_node_3.full_node.server for block in blocks_a: await full_node_1.full_node.respond_block( full_node_protocol.RespondBlock(block)) for block in blocks_b: await full_node_2.full_node.respond_block( full_node_protocol.RespondBlock(block)) for block in blocks_c: await full_node_3.full_node.respond_block( full_node_protocol.RespondBlock(block)) await server_2.start_client( PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_2.full_node.on_connect, ) await time_out_assert(60, node_height_exactly, True, full_node_1, 50) await time_out_assert(60, node_height_exactly, True, full_node_2, 50) await time_out_assert(60, node_height_exactly, True, full_node_3, 89) await server_3.start_client( PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_3.full_node.on_connect, ) await server_3.start_client( PeerInfo(self_hostname, uint16(server_2._port)), on_connect=full_node_3.full_node.on_connect, ) await time_out_assert(60, node_height_exactly, True, full_node_1, 89) await time_out_assert(60, node_height_exactly, True, full_node_2, 89) await time_out_assert(60, node_height_exactly, True, full_node_3, 89)
async def test_short_sync_wallet(self, wallet_node): num_blocks = 5 # This must be lower than the short_sync in wallet_node blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [], 10) full_node_1, wallet_node, server_1, server_2 = wallet_node for i in range(1, len(blocks)): async for _ in full_node_1.respond_block( full_node_protocol.RespondBlock(blocks[i])): pass await server_2.start_client( PeerInfo("localhost", uint16(server_1._port)), None) await time_out_assert(60, wallet_height_at_least, True, wallet_node, 3)
async def open_connection(self, request) -> web.Response: """ Opens a new connection to another node. """ request_data = await request.json() host = request_data["host"] port = request_data["port"] target_node: PeerInfo = PeerInfo(host, uint16(int(port))) if self.full_node.server is None or not ( await self.full_node.server.start_client(target_node, None)): raise web.HTTPInternalServerError() return obj_to_response("")
async def test_transaction_freeze(self, wallet_node_30_freeze): num_blocks = 5 full_nodes, wallets = wallet_node_30_freeze full_node_api: FullNodeSimulator = full_nodes[0] full_node_server = full_node_api.server wallet_node, server_2 = wallets[0] wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() incoming_queue, node_id = await add_dummy_connection(full_node_server, 12312) await server_2.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) for i in range(num_blocks): await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph)) funds = sum( [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)] ) # funds += calculate_base_farmer_reward(0) await asyncio.sleep(2) print(await wallet.get_confirmed_balance(), funds) await time_out_assert(10, wallet.get_confirmed_balance, funds) tx: TransactionRecord = await wallet.generate_signed_transaction(100, ph, 0) spend = wallet_protocol.SendTransaction(tx.spend_bundle) response = await full_node_api.send_transaction(spend) assert wallet_protocol.TransactionAck.from_bytes(response.data).status == MempoolInclusionStatus.FAILED new_spend = full_node_protocol.NewTransaction(tx.spend_bundle.name(), 1, 0) response = await full_node_api.new_transaction(new_spend) assert response is None peer = full_node_server.all_connections[node_id] new_spend = full_node_protocol.RespondTransaction(tx.spend_bundle) response = await full_node_api.respond_transaction(new_spend, peer=peer) assert response is None for i in range(26): await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph)) new_spend = full_node_protocol.NewTransaction(tx.spend_bundle.name(), 1, 0) response = await full_node_api.new_transaction(new_spend) assert response is not None assert ProtocolMessageTypes(response.type) == ProtocolMessageTypes.request_transaction tx: TransactionRecord = await wallet.generate_signed_transaction(100, ph, 0) spend = wallet_protocol.SendTransaction(tx.spend_bundle) response = await full_node_api.send_transaction(spend) assert response is not None assert wallet_protocol.TransactionAck.from_bytes(response.data).status == MempoolInclusionStatus.SUCCESS assert ProtocolMessageTypes(response.type) == ProtocolMessageTypes.transaction_ack
async def test_wallet_make_transaction(self, two_wallet_nodes): num_blocks = 5 full_nodes, wallets = two_wallet_nodes full_node_api = full_nodes[0] server_1 = full_node_api.full_node.server wallet_node, server_2 = wallets[0] wallet_node_2, server_3 = wallets[1] wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() await server_2.start_client( PeerInfo(self_hostname, uint16(server_1._port)), None) for i in range(0, num_blocks): await full_node_api.farm_new_transaction_block( FarmNewBlockProtocol(ph)) funds = sum([ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks) ]) await time_out_assert(5, wallet.get_confirmed_balance, funds) await time_out_assert(5, wallet.get_unconfirmed_balance, funds) tx = await wallet.generate_signed_transaction( 10, await wallet_node_2.wallet_state_manager.main_wallet.get_new_puzzlehash( ), 0, ) await wallet.push_transaction(tx) await time_out_assert(5, wallet.get_confirmed_balance, funds) await time_out_assert(5, wallet.get_unconfirmed_balance, funds - 10) for i in range(0, num_blocks): await full_node_api.farm_new_transaction_block( FarmNewBlockProtocol(ph)) new_funds = sum([ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, (2 * num_blocks)) ]) await time_out_assert(5, wallet.get_confirmed_balance, new_funds - 10) await time_out_assert(5, wallet.get_unconfirmed_balance, new_funds - 10)
def get_peers(self, max_peers: int = 0, randomize: bool = False, recent_threshold=9999999) -> List[TimestampedPeerInfo]: target_peers = [ TimestampedPeerInfo(peer.host, uint16(peer.port), uint64(0)) for peer in self._peers if time.time() - self.time_added[peer.get_hash()] < recent_threshold ] if not max_peers or max_peers > len(target_peers): max_peers = len(target_peers) if randomize: random.shuffle(target_peers) return target_peers[:max_peers]
async def test_basic_sync_wallet(self, wallet_node): num_blocks = 25 blocks = bt.get_consecutive_blocks(test_constants, num_blocks, []) full_node_1, wallet_node, server_1, server_2 = wallet_node for i in range(1, len(blocks)): async for _ in full_node_1.respond_block( full_node_protocol.RespondBlock(blocks[i])): pass await server_2.start_client( PeerInfo("localhost", uint16(server_1._port)), None) start = time.time() found = False while time.time() - start < 60: # The second node should eventually catch up to the first one, and have the # same tip at height num_blocks - 1. if (wallet_node.wallet_state_manager.block_records[ wallet_node.wallet_state_manager.lca].height >= num_blocks - 6): found = True break await asyncio.sleep(0.1) if not found: raise Exception( f"Took too long to process blocks, stopped at: {time.time() - start}" ) # Tests a reorg with the wallet start = time.time() found = False blocks_reorg = bt.get_consecutive_blocks(test_constants, 45, blocks[:-5]) for i in range(1, len(blocks_reorg)): async for msg in full_node_1.respond_block( full_node_protocol.RespondBlock(blocks_reorg[i])): server_1.push_message(msg) start = time.time() while time.time() - start < 100: if (wallet_node.wallet_state_manager.block_records[ wallet_node.wallet_state_manager.lca].height == 63): found = True break await asyncio.sleep(0.1) if not found: raise Exception( f"Took too long to process blocks, stopped at: {time.time() - start}" )
async def test_backtrack_sync_2(self, two_nodes): blocks = bt.get_consecutive_blocks(1, skip_slots=3) blocks = bt.get_consecutive_blocks(8, blocks, skip_slots=0) full_node_1, full_node_2, server_1, server_2 = two_nodes # 3 blocks to node_1 in different sub slots for block in blocks: await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block)) await server_2.start_client( PeerInfo(self_hostname, uint16(server_1._port)), on_connect=full_node_2.full_node.on_connect, ) await time_out_assert(60, node_height_exactly, True, full_node_2, 8)
async def test_wallet_reorg_sync(self, wallet_node_simulator, default_400_blocks): num_blocks = 5 full_nodes, wallets = wallet_node_simulator full_node_api = full_nodes[0] wallet_node, server_2 = wallets[0] fn_server = full_node_api.full_node.server wsm: WalletStateManager = wallet_node.wallet_state_manager wallet = wsm.main_wallet ph = await wallet.get_new_puzzlehash() await server_2.start_client( PeerInfo(self_hostname, uint16(fn_server._port)), None) # Insert 400 blocks for block in default_400_blocks: await full_node_api.full_node.respond_block( full_node_protocol.RespondBlock(block)) # Farm few more with reward for i in range(0, num_blocks): await full_node_api.farm_new_transaction_block( FarmNewBlockProtocol(ph)) # Confirm we have the funds funds = sum([ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks) ]) await time_out_assert(5, wallet.get_confirmed_balance, funds) async def get_tx_count(wallet_id): txs = await wsm.get_all_transactions(wallet_id) return len(txs) await time_out_assert(5, get_tx_count, 2 * (num_blocks - 1), 1) # Reorg blocks that carry reward num_blocks = 30 blocks_reorg = bt.get_consecutive_blocks( num_blocks, block_list_input=default_400_blocks[:-5]) for block in blocks_reorg[-30:]: await full_node_api.full_node.respond_block( full_node_protocol.RespondBlock(block)) await time_out_assert(5, get_tx_count, 0, 1) await time_out_assert(5, wallet.get_confirmed_balance, 0)
async def setup_full_system(dic={}): node_iters = [ setup_introducer(21233), setup_harvester(21234, dic), setup_farmer(21235, dic), setup_timelord(21236, dic), setup_vdf_clients(8000), setup_full_node("blockchain_test.db", 21237, 21233, dic), setup_full_node("blockchain_test_2.db", 21238, 21233, dic), ] introducer, introducer_server = await node_iters[0].__anext__() harvester, harvester_server = await node_iters[1].__anext__() farmer, farmer_server = await node_iters[2].__anext__() timelord, timelord_server = await node_iters[3].__anext__() vdf = await node_iters[4].__anext__() node1, node1_server = await node_iters[5].__anext__() node2, node2_server = await node_iters[6].__anext__() await harvester_server.start_client(PeerInfo("127.0.0.1", uint16(farmer_server._port)), auth=True) await farmer_server.start_client( PeerInfo("127.0.0.1", uint16(node1_server._port))) await timelord_server.start_client( PeerInfo("127.0.0.1", uint16(node1_server._port))) yield (node1, node2) for node_iter in node_iters: try: await node_iter.__anext__() except StopAsyncIteration: pass
async def test_wallet_coinbase(self, wallet_node): num_blocks = 10 full_nodes, wallets = wallet_node full_node_api = full_nodes[0] server_1: ChiaServer = full_node_api.full_node.server wallet_node, server_2 = wallets[0] wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() await server_2.start_client( PeerInfo(self_hostname, uint16(server_1._port)), None) for i in range(0, num_blocks): await full_node_api.farm_new_block(FarmNewBlockProtocol(ph)) await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph) ) await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph) ) funds = sum([ calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks + 2) ]) async def check_tx_are_pool_farm_rewards(): wsm: WalletStateManager = wallet_node.wallet_state_manager all_txs = await wsm.get_all_transactions(1) expected_count = (num_blocks + 1) * 2 if len(all_txs) != expected_count: return False pool_rewards = 0 farm_rewards = 0 for tx in all_txs: if tx.type == TransactionType.COINBASE_REWARD: pool_rewards += 1 elif tx.type == TransactionType.FEE_REWARD: farm_rewards += 1 if pool_rewards != expected_count / 2: return False if farm_rewards != expected_count / 2: return False return True await time_out_assert(10, check_tx_are_pool_farm_rewards, True) await time_out_assert(5, wallet.get_confirmed_balance, funds)
async def get_average_block_time(rpc_port: int) -> float: try: blocks_to_compare = 500 config = load_config(DEFAULT_ROOT_PATH, "config.yaml") self_hostname = config["self_hostname"] if rpc_port is None: rpc_port = config["full_node"]["rpc_port"] client = await FullNodeRpcClient.create(self_hostname, uint16(rpc_port), DEFAULT_ROOT_PATH, config) blockchain_state = await client.get_blockchain_state() curr: Optional[BlockRecord] = blockchain_state["peak"] if curr is None or curr.height < (blocks_to_compare + 100): client.close() await client.await_closed() return SECONDS_PER_BLOCK while curr is not None and curr.height > 0 and not curr.is_transaction_block: curr = await client.get_block_record(curr.prev_hash) if curr is None: client.close() await client.await_closed() return SECONDS_PER_BLOCK past_curr = await client.get_block_record_by_height(curr.height - blocks_to_compare) while past_curr is not None and past_curr.height > 0 and not past_curr.is_transaction_block: past_curr = await client.get_block_record(past_curr.prev_hash) if past_curr is None: client.close() await client.await_closed() return SECONDS_PER_BLOCK client.close() await client.await_closed() return (curr.timestamp - past_curr.timestamp) / (curr.height - past_curr.height) except Exception as e: if isinstance(e, aiohttp.client_exceptions.ClientConnectorError): print( f"Connection error. Check if full node is running at {rpc_port}" ) else: print(f"Exception from 'full node' {e}") client.close() await client.await_closed() return SECONDS_PER_BLOCK
async def test_sync_from_zero(self, two_nodes, default_400_blocks): # Must be larger than "sync_block_behind_threshold" in the config num_blocks = len(default_400_blocks) blocks = default_400_blocks full_node_1, full_node_2, server_1, server_2 = two_nodes for block in blocks: await full_node_1.full_node.respond_sub_block( full_node_protocol.RespondSubBlock(block)) await server_2.start_client( PeerInfo(self_hostname, uint16(server_1._port)), None) # The second node should eventually catch up to the first one, and have the # same tip at height num_blocks - 1 (or at least num_blocks - 3, in case we sync to below the tip) await time_out_assert(60, node_height_at_least, True, full_node_2, num_blocks - 1)
async def test_short_sync(self, two_nodes): num_blocks = 10 num_blocks_2 = 4 blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [], 10) blocks_2 = bt.get_consecutive_blocks(test_constants, num_blocks_2, [], 10, seed=b"123") full_node_1, full_node_2, server_1, server_2 = two_nodes # 10 blocks to node_1 for i in range(1, num_blocks): async for _ in full_node_1.respond_block( full_node_protocol.RespondBlock(blocks[i])): pass # 4 different blocks to node_2 for i in range(1, num_blocks_2): async for _ in full_node_2.respond_block( full_node_protocol.RespondBlock(blocks_2[i])): pass # 6th block from node_1 to node_2 async for _ in full_node_2.respond_block( full_node_protocol.RespondBlock(blocks[5])): pass await server_2.start_client( PeerInfo("localhost", uint16(server_1._port)), None) await asyncio.sleep(2) # Allow connections to get made start = time.time() while time.time() - start < 30: # The second node should eventually catch up to the first one, and have the # same tip at height num_blocks - 1. if (max( [h.height for h in full_node_2.blockchain.get_current_tips() ]) == num_blocks - 1): print( f"Time taken to sync {num_blocks} is {time.time() - start}" ) return await asyncio.sleep(0.1) raise Exception("Took too long to process blocks")
async def test2(self): num_blocks = 100 store = FullNodeStore("fndb_test") await store._clear_database() blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [], 10) b: Blockchain = Blockchain(test_constants) await store.add_block(blocks[0]) await b.initialize({}) full_node_1 = FullNode(store, b) server_1 = ChiaServer(21236, full_node_1, NodeType.FULL_NODE) _ = await server_1.start_server("127.0.0.1", None) full_node_1._set_server(server_1) full_node_2 = FullNode(store, b) server_2 = ChiaServer(21237, full_node_2, NodeType.FULL_NODE) full_node_2._set_server(server_2) await server_2.start_client(PeerInfo("127.0.0.1", uint16(21236)), None) await asyncio.sleep(2) # Allow connections to get made start_unf = time.time() for i in range(1, num_blocks): msg = Message("block", peer_protocol.Block(blocks[i])) server_1.push_message( OutboundMessage(NodeType.FULL_NODE, msg, Delivery.BROADCAST)) while time.time() - start_unf < 300: if max([h.height for h in b.get_current_tips()]) == num_blocks - 1: print( f"Time taken to process {num_blocks} is {time.time() - start_unf}" ) server_1.close_all() server_2.close_all() await server_1.await_closed() await server_2.await_closed() return await asyncio.sleep(0.1) server_1.close_all() server_2.close_all() await server_1.await_closed() await server_2.await_closed() raise Exception("Took too long to process blocks")
async def setup_full_system(consensus_constants: ConsensusConstants): node_iters = [ setup_introducer(21233), setup_harvester(21234, 21235, consensus_constants), setup_farmer(21235, consensus_constants, uint16(21237)), setup_vdf_clients(8000), setup_timelord(21236, 21237, False, consensus_constants), setup_full_node(consensus_constants, "blockchain_test.db", 21237, 21232, False, 10), setup_full_node(consensus_constants, "blockchain_test_2.db", 21238, 21232, False, 10), setup_vdf_clients(7999), setup_timelord(21239, 21238, True, consensus_constants), ] introducer, introducer_server = await node_iters[0].__anext__() harvester, harvester_server = await node_iters[1].__anext__() farmer, farmer_server = await node_iters[2].__anext__() async def num_connections(): return len(harvester.global_connections.get_connections()) await time_out_assert(10, num_connections, 1) vdf = await node_iters[3].__anext__() timelord, timelord_server = await node_iters[4].__anext__() node1, node1_server = await node_iters[5].__anext__() node2, node2_server = await node_iters[6].__anext__() vdf_sanitizer = await node_iters[7].__anext__() sanitizer, sanitizer_server = await node_iters[8].__anext__() yield ( node1, node2, harvester, farmer, introducer, timelord, vdf, sanitizer, vdf_sanitizer, node1_server, ) await _teardown_nodes(node_iters)