def get(self) -> Tuple[dict, int]: _ = self.pathfinding_service.web3.eth.block_number info = { "price_info": str(self.api.service_fee), "network_info": { "chain_id": self.pathfinding_service.chain_id, "token_network_registry_address": to_checksum_address( self.pathfinding_service.registry_address ), "user_deposit_address": to_checksum_address( self.pathfinding_service.user_deposit_contract.address ), "service_token_address": to_checksum_address( self.pathfinding_service.service_token_address ), "confirmed_block": { "number": str(self.pathfinding_service.blockchain_state.latest_committed_block) }, }, "version": self.version, "contracts_version": self.contracts_version, "operator": self.api.operator, "message": self.api.info_message, "payment_address": to_checksum_address(self.pathfinding_service.address), "UTC": datetime.utcnow().isoformat(), "matrix_server": self.api.pathfinding_service.matrix_listener.base_url, } return info, 200
def update_feedback(self, token: FeedbackToken, route: List[Address], successful: bool) -> int: hexed_route = [to_checksum_address(e) for e in route] token_dict = dict( token_id=token.uuid.hex, token_network_address=to_checksum_address( token.token_network_address), route=json.dumps(hexed_route), successful=successful, feedback_time=datetime.utcnow(), ) with self._cursor() as cursor: updated_rows = cursor.execute( """ UPDATE feedback SET successful = :successful, feedback_time = :feedback_time WHERE token_id = :token_id AND token_network_address = :token_network_address AND route = :route AND successful IS NULL; """, token_dict, ).rowcount return updated_rows
def __repr__(self) -> str: return "<ChannelView cid={} from={} to={} capacity={}>".format( self.channel_id, to_checksum_address(self.participant1), to_checksum_address(self.participant2), self.capacity, )
def test_saveing_multiple_channel(ms_database: Database): ms_database.conn.execute( "INSERT INTO token_network (address, settle_timeout) VALUES (?, ?)", [ to_checksum_address(DEFAULT_TOKEN_NETWORK_ADDRESS), DEFAULT_TOKEN_NETWORK_SETTLE_TIMEOUT ], ) tn_address2 = make_token_network_address() ms_database.conn.execute( "INSERT INTO token_network (address, settle_timeout) VALUES (?, ?)", [ to_checksum_address(tn_address2), DEFAULT_TOKEN_NETWORK_SETTLE_TIMEOUT ], ) channel1 = create_channel() channel2 = create_channel() channel2.token_network_address = tn_address2 ms_database.upsert_channel(channel1) loaded_channel1 = ms_database.get_channel( token_network_address=channel1.token_network_address, channel_id=channel1.identifier) assert loaded_channel1 == channel1 assert ms_database.channel_count() == 1 ms_database.upsert_channel(channel2) loaded_channel2 = ms_database.get_channel( token_network_address=channel2.token_network_address, channel_id=channel2.identifier) assert loaded_channel2 == channel2 assert ms_database.channel_count() == 2
def upsert_channel(self, channel: Channel) -> None: values = [ to_checksum_address(channel.token_network_address), hex256(channel.identifier), to_checksum_address(channel.participant1), to_checksum_address(channel.participant2), channel.state, hex256(channel.closing_block) if channel.closing_block else None, channel.closing_participant, encode_hex(channel.monitor_tx_hash) if channel.monitor_tx_hash else None, encode_hex(channel.claim_tx_hash) if channel.claim_tx_hash else None, ] if channel.update_status: values += [ to_checksum_address( channel.update_status.update_sender_address), hex256(channel.update_status.nonce), ] else: values += [None, None] upsert_sql = "INSERT OR REPLACE INTO channel VALUES ({})".format( ", ".join("?" * len(values))) self.conn.execute(upsert_sql, values)
def get_feedback_routes( self, token_network_address: TokenNetworkAddress, source_address: Address, target_address: Optional[Address] = None, ) -> Iterator[Dict]: filters = { "token_network_address": to_checksum_address(token_network_address), "source_address": to_checksum_address(source_address), } where_clause = "" if target_address: where_clause = " AND target_address = :target_address" filters["target_address"] = to_checksum_address(target_address) sql = f""" SELECT source_address, target_address, route, estimated_fee, token_id FROM feedback WHERE token_network_address = :token_network_address AND source_address = :source_address {where_clause} """ with self._cursor() as cursor: for row in cursor.execute(sql, filters): route = dict(zip(row.keys(), row)) route["route"] = json.loads(route["route"]) yield route
def get_feedback_token(self, token_id: UUID, token_network_address: TokenNetworkAddress, route: List[Address]) -> Optional[FeedbackToken]: hexed_route = [to_checksum_address(e) for e in route] with self._cursor() as cursor: token = cursor.execute( """SELECT * FROM feedback WHERE token_id = ? AND token_network_address = ? AND route = ?; """, [ token_id.hex, to_checksum_address(token_network_address), json.dumps(hexed_route), ], ).fetchone() if token: return FeedbackToken( token_network_address=TokenNetworkAddress( to_canonical_address(token["token_network_address"])), uuid=UUID(token["token_id"]), creation_time=token["creation_time"], ) return None
def pop_waiting_messages( self, token_network_address: TokenNetworkAddress, channel_id: ChannelID) -> Iterator[DeferableMessage]: """Return all waiting messages for the given channel and delete them from the db""" # Return messages with self._cursor() as cursor: for row in cursor.execute( """ SELECT message FROM waiting_message WHERE token_network_address = ? AND channel_id = ? """, [ to_checksum_address(token_network_address), hex256(channel_id) ], ): yield JSONSerializer.deserialize(row["message"]) with self._cursor() as cursor: # Delete returned messages cursor.execute( "DELETE FROM waiting_message WHERE token_network_address = ? AND channel_id = ?", [ to_checksum_address(token_network_address), hex256(channel_id) ], )
def post( self, token_network_address: str # pylint: disable=unused-argument ) -> Tuple[dict, int]: token_network = self._validate_token_network_argument(token_network_address) feedback_request = self._parse_post(FeedbackRequest) feedback_token = self.pathfinding_service.database.get_feedback_token( token_id=feedback_request.token, token_network_address=token_network.address, route=feedback_request.path, ) # The client doesn't need to know whether the feedback was accepted or not, # so in case the token is invalid we return HTTP 400 without further infos if not feedback_token or not feedback_token.is_valid(): return {}, 400 updated_rows = self.pathfinding_service.database.update_feedback( token=feedback_token, route=feedback_request.path, successful=feedback_request.success ) if updated_rows > 0: log.info( "Received feedback", token_network_address=to_checksum_address(feedback_token.token_network_address), feedback_token=feedback_request.token, feedback_route=[to_checksum_address(addr) for addr in feedback_request.path], was_success=feedback_request.success, ) return {}, 200
def get(self) -> Tuple[dict, int]: info = { "price_info": str(self.api.monitoring_service.context.min_reward), "network_info": { "chain_id": self.monitoring_service.chain_id, "token_network_registry_address": to_checksum_address( self.monitoring_service.context.ms_state.blockchain_state. token_network_registry_address # noqa ), "user_deposit_address": to_checksum_address(self.monitoring_service.context. user_deposit_contract.address), "service_token_address": to_checksum_address(self.service_token_address), "confirmed_block": { "number": str(self.monitoring_service.context.ms_state. blockchain_state.latest_committed_block # noqa ) }, }, "version": self.version, "contracts_version": self.contracts_version, "operator": self.api.operator, "message": self.api.info_message, "UTC": datetime.utcnow().isoformat(), } return info, 200
def check_path_request_errors( self, source: Address, target: Address, value: PaymentAmount, reachability_state: AddressReachabilityProtocol, ) -> Optional[str]: """Checks for basic problems with the path requests. Returns error message or `None`""" with opentracing.tracer.start_span("check_path_request_errors"): if (reachability_state.get_address_reachability(source) is not AddressReachability.REACHABLE): return "Source not online" if (reachability_state.get_address_reachability(target) is not AddressReachability.REACHABLE): return "Target not online" if not any(self.G.edges(source)): return "No channel from source" if not any(self.G.edges(target)): return "No channel to target" source_capacities = [ view.capacity for _, _, view in self.G.out_edges(source, data="view") ] if max(source_capacities) < value: debug_capacities = [ (to_checksum_address(a), to_checksum_address(b), view.capacity) for a, b, view in self.G.out_edges(source, data="view") ] log.debug("Insufficient capacities", capacities=debug_capacities) message = ( f"Source does not have a channel with sufficient capacity " f"(current capacities: {source_capacities} < requested amount: " f" {value})") return message target_capacities = [ view.capacity for _, _, view in self.G.in_edges(target, data="view") ] if max(target_capacities) < value: return "Target does not have a channel with sufficient capacity (%s < %s)" % ( target_capacities, value, ) try: next( nx.shortest_simple_paths(G=self.G, source=source, target=target)) except NetworkXNoPath: return "No route from source to target" return None
def packed_reward_proof_data(self, non_closing_signature: Signature) -> bytes: """Return reward proof data serialized to binary""" return pack_reward_proof( monitoring_service_contract_address=to_checksum_address(self.msc_address), chain_id=self.chain_id, token_network_address=HexAddress(to_checksum_address(self.token_network_address)), non_closing_participant=HexAddress(to_checksum_address(self.non_closing_participant)), non_closing_signature=non_closing_signature, reward_amount=self.reward_amount, )
def test_get_paths_via_debug_endpoint_with_debug_disabled( api_url: str, addresses: List[Address], token_network_model: TokenNetwork ): token_network_address_hex = to_checksum_address(token_network_model.address) address_hex = to_checksum_address(addresses[0]) url_debug = api_url + f"/v1/_debug/routes/{token_network_address_hex}/{address_hex}" # now there must not be a debug endpoint for that specific route response_debug = requests.get(url_debug) assert response_debug.status_code == 404
def upsert_capacity_update(self, message: PFSCapacityUpdate) -> None: capacity_update_dict = dict( updating_participant=to_checksum_address( message.updating_participant), token_network_address=to_checksum_address( message.canonical_identifier.token_network_address), channel_id=hex256(message.canonical_identifier.channel_identifier), updating_capacity=hex256(message.updating_capacity), other_capacity=hex256(message.other_capacity), ) self.upsert("capacity_update", capacity_update_dict)
def test_logging_processor(): # test if our logging processor changes bytes to checksum addresses # even if bytes-addresses are entangled into events logger = Mock() log_method = Mock() address = TokenAddress(b"\x7f[\xf6\xc9To\xa8\x185w\xe4\x9f\x15\xbc\xef@mr\xd5\xd9") address_log = format_to_hex( _logger=logger, _log_method=log_method, event_dict=dict(address=address) ) assert to_checksum_address(address) == address_log["address"] address2 = Address(b"\x7f[\xf6\xc9To\xa8\x185w\xe4\x9f\x15\xbc\xef@mr\xd5\xd1") event = ReceiveTokenNetworkCreatedEvent( token_address=address, token_network_address=TokenNetworkAddress(address2), settle_timeout=DEFAULT_TOKEN_NETWORK_SETTLE_TIMEOUT, block_number=BlockNumber(1), ) event_log = format_to_hex(_logger=logger, _log_method=log_method, event_dict=dict(event=event)) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address) == event_log["event"]["token_address"] ) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address2) == event_log["event"]["token_network_address"] ) assert ( # pylint: disable=unsubscriptable-object event_log["event"]["type_name"] == "ReceiveTokenNetworkCreatedEvent" ) message = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(61), token_network_address=TokenNetworkAddress(address), channel_identifier=ChannelID(1), ), updating_participant=PARTICIPANT1, fee_schedule=FeeScheduleState(), timestamp=datetime.utcnow(), signature=EMPTY_SIGNATURE, ) message_log = format_to_hex( _logger=logger, _log_method=log_method, event_dict=dict(message=message) ) assert ( # pylint: disable=unsubscriptable-object to_checksum_address(address) == message_log["message"]["canonical_identifier"]["token_network_address"] ) assert ( # pylint: disable=unsubscriptable-object message_log["message"]["type_name"] == "PFSFeeUpdate" )
def prepare_feedback(self, token: FeedbackToken, route: List[Address], estimated_fee: FeeAmount) -> None: hexed_route = [to_checksum_address(e) for e in route] token_dict = dict( token_id=token.uuid.hex, creation_time=token.creation_time, token_network_address=to_checksum_address( token.token_network_address), route=json.dumps(hexed_route), estimated_fee=hex256(estimated_fee), source_address=hexed_route[0], target_address=hexed_route[-1], ) self.insert("feedback", token_dict)
def test_get_paths(api_url: str, addresses: List[Address], token_network_model: TokenNetwork): hex_addrs = [to_checksum_address(addr) for addr in addresses] url = api_url + "/v1/" + to_checksum_address(token_network_model.address) + "/paths" data = {"from": hex_addrs[0], "to": hex_addrs[2], "value": 10, "max_paths": DEFAULT_MAX_PATHS} response = requests.post(url, json=data) assert response.status_code == 200 paths = response.json()["result"] assert len(paths) == 2 assert paths[0] == { "path": [hex_addrs[0], hex_addrs[1], hex_addrs[2]], "address_metadata": { hex_addrs[0]: get_address_metadata(hex_addrs[0]), hex_addrs[1]: get_address_metadata(hex_addrs[1]), hex_addrs[2]: get_address_metadata(hex_addrs[2]), }, "estimated_fee": 0, } assert paths[1] == { "path": [hex_addrs[0], hex_addrs[1], hex_addrs[4], hex_addrs[3], hex_addrs[2]], "address_metadata": { hex_addrs[0]: get_address_metadata(hex_addrs[0]), hex_addrs[1]: get_address_metadata(hex_addrs[1]), hex_addrs[2]: get_address_metadata(hex_addrs[2]), hex_addrs[3]: get_address_metadata(hex_addrs[3]), hex_addrs[4]: get_address_metadata(hex_addrs[4]), }, "estimated_fee": 0, } # check default value for num_path data = {"from": hex_addrs[0], "to": hex_addrs[2], "value": 10} default_response = requests.post(url, json=data) assert default_response.json()["result"] == response.json()["result"] # impossible routes for source, dest in [ (hex_addrs[0], hex_addrs[5]), # no connection between 0 and 5 ("0x" + "1" * 40, hex_addrs[5]), # source not in graph (hex_addrs[0], "0x" + "1" * 40), # dest not in graph ]: data = {"from": source, "to": dest, "value": 10, "max_paths": 3} response = requests.post(url, json=data) assert response.status_code == 404 assert response.json()["error_code"] == exceptions.NoRouteFound.error_code
def test_get_paths_via_debug_endpoint_empty_routes( api_url: str, addresses: List[Address], token_network_model: TokenNetwork ): # `last_failed_requests` is a module variable, so it might have entries # from tests that ran earlier. last_failed_requests.clear() hex_addrs = [to_checksum_address(addr) for addr in addresses] token_network_address = to_checksum_address(token_network_model.address) response = requests.post( api_url + f"/v1/{token_network_address}/paths", json={ "from": hex_addrs[0], "to": hex_addrs[5], "value": 10, "max_paths": DEFAULT_MAX_PATHS, }, ) assert response.status_code == 404 # test that requests with no routes found are returned as well url_debug_incl_impossible_route = ( api_url + f"/v1/_debug/routes/{token_network_address}/{hex_addrs[0]}/{hex_addrs[5]}" ) response_debug_incl_impossible_route = requests.get(url_debug_incl_impossible_route) assert response_debug_incl_impossible_route.status_code == 200 request_count = response_debug_incl_impossible_route.json()["request_count"] assert request_count == 1 response = requests.post( api_url + f"/v1/{token_network_address}/paths", json={ "from": hex_addrs[0], "to": hex_addrs[6], "value": 1e10, "max_paths": DEFAULT_MAX_PATHS, }, ) assert response.status_code == 404 # test that requests with no routes found are returned as well # regression test for https://github.com/raiden-network/raiden/issues/5421 url_debug_incl_impossible_route = ( api_url + f"/v1/_debug/routes/{token_network_address}/{hex_addrs[0]}/{hex_addrs[6]}" ) response_debug_incl_impossible_route = requests.get(url_debug_incl_impossible_route) assert response_debug_incl_impossible_route.status_code == 200 request_count = response_debug_incl_impossible_route.json()["request_count"] assert request_count == 1
def make_params(timestamp: str): params = { "sender": to_checksum_address(sender), "receiver": to_checksum_address(api_sut.pathfinding_service.address), "timestamp": timestamp, } local_signer = LocalSigner(private_key=privkey) params["signature"] = encode_hex( local_signer.sign( to_canonical_address(params["sender"]) + to_canonical_address(params["receiver"]) + params["timestamp"].encode("utf8") ) ) return params
def test_get_ious_via_debug_endpoint( api_sut_with_debug: PFSApi, api_url: str, addresses: List[Address] ): hex_addrs = [to_checksum_address(addr) for addr in addresses] iou = IOU( sender=addresses[0], receiver=addresses[4], amount=TokenAmount(111), claimable_until=7619644, signature=Signature( decode_hex("118a93e9fd0a3a1c3d6edbad194b5c9d95715c754881d80e23e985793b1e13de") ), claimed=False, chain_id=ChainID(61), one_to_n_address=api_sut_with_debug.one_to_n_address, ) api_sut_with_debug.pathfinding_service.database.upsert_iou(iou) # now there must be an iou debug endpoint for a request of a sender in the database url_iou_debug = api_url + f"/v1/_debug/ious/{hex_addrs[0]}" response_debug = requests.get(url_iou_debug) assert response_debug.status_code == 200 response_iou = response_debug.json() assert response_iou == {"sender": hex_addrs[0], "amount": 111, "claimable_until": 7619644} # but there is no iou debug endpoint for a request of a sender not in the database url_iou_debug = api_url + f"/v1/_debug/ious/{hex_addrs[1]}" response_debug = requests.get(url_iou_debug) assert response_debug.status_code == 200 ious = response_debug.json() assert ious == {}
def upsert_iou(self, iou: IOU) -> None: iou_dict = IOU.Schema(exclude=["receiver", "chain_id"]).dump(iou) iou_dict["one_to_n_address"] = to_checksum_address( iou_dict["one_to_n_address"]) for key in ("amount", "claimable_until"): iou_dict[key] = hex256(int(iou_dict[key])) self.upsert("iou", iou_dict)
def get_channel(self, token_network_address: TokenNetworkAddress, channel_id: ChannelID) -> Optional[Channel]: row = self.conn.execute( """ SELECT * FROM channel WHERE identifier = ? AND token_network_address = ? """, [hex256(channel_id), to_checksum_address(token_network_address)], ).fetchone() if row is None: return None kwargs = { key: val for key, val in zip(row.keys(), row) if not key.startswith("update_status") } kwargs["token_network_address"] = decode_hex( kwargs["token_network_address"]) kwargs["participant1"] = decode_hex(kwargs["participant1"]) kwargs["participant2"] = decode_hex(kwargs["participant2"]) if kwargs["monitor_tx_hash"] is not None: kwargs["monitor_tx_hash"] = decode_hex(kwargs["monitor_tx_hash"]) if kwargs["claim_tx_hash"] is not None: kwargs["claim_tx_hash"] = decode_hex(kwargs["claim_tx_hash"]) return Channel( update_status=OnChainUpdateStatus( update_sender_address=to_canonical_address( row["update_status_sender"]), nonce=row["update_status_nonce"], ) if row["update_status_nonce"] is not None else None, **kwargs, )
def monitoring_service_mock() -> Generator[MonitoringService, None, None]: web3_mock = Web3Mock() mock_udc = Mock(address=bytes([8] * 20)) mock_udc.functions.effectiveBalance.return_value.call.return_value = 10000 mock_udc.functions.token.return_value.call.return_value = to_checksum_address( bytes([7] * 20)) ms = MonitoringService( web3=web3_mock, private_key=PrivateKey( decode_hex( "3a1076bf45ab87712ad64ccb3b10217737f7faacbf2872e88fdd9a537d8fe266" )), db_filename=":memory:", contracts={ CONTRACT_TOKEN_NETWORK_REGISTRY: Mock(address=bytes([9] * 20)), CONTRACT_USER_DEPOSIT: mock_udc, CONTRACT_MONITORING_SERVICE: Mock(address=bytes([1] * 20)), CONTRACT_SERVICE_REGISTRY: Mock(address=bytes([2] * 20)), }, sync_start_block=BlockNumber(0), required_confirmations=BlockTimeout(0), poll_interval=0, ) yield ms
def make_bytes_readable(value: Any) -> Any: if isinstance(value, bytes): if len(value) == 20: return to_checksum_address(value) return to_hex(value) return value
def test_routing_result_order( token_network_model: TokenNetwork, reachability_state: SimpleReachabilityContainer, addresses: List[Address], ): hex_addrs = [to_checksum_address(addr) for addr in addresses] paths = token_network_model.get_paths( source=addresses[0], target=addresses[2], value=PaymentAmount(10), max_paths=5, reachability_state=reachability_state, ) # 5 paths requested, but only 2 are available assert len(paths) == 2 assert paths[0].to_dict()["path"] == [ hex_addrs[0], hex_addrs[1], hex_addrs[2] ] assert paths[0].to_dict()["estimated_fee"] == 0 assert paths[1].to_dict()["path"] == [ hex_addrs[0], hex_addrs[1], hex_addrs[4], hex_addrs[3], hex_addrs[2], ] assert paths[1].to_dict()["estimated_fee"] == 0
def get_token_network_settle_timeout( self, token_network_address: TokenNetworkAddress) -> Timestamp: with self._cursor() as cursor: return cursor.execute( "SELECT settle_timeout FROM token_network WHERE address = ?", [to_checksum_address(token_network_address)], ).fetchone()[0]
def test_ignore_mr_for_closed_channel(request_collector, build_request_monitoring, ms_database, closing_block): """MRs that come in >=10 blocks after the channel has been closed must be ignored.""" request_monitoring = build_request_monitoring() ms_database.conn.execute( "UPDATE blockchain SET latest_committed_block = ?", [100]) ms_database.conn.execute( "INSERT INTO token_network(address) VALUES (?)", [ to_checksum_address( request_monitoring.balance_proof.token_network_address) ], ) ms_database.upsert_channel( Channel( identifier=request_monitoring.balance_proof.channel_identifier, token_network_address=request_monitoring.balance_proof. token_network_address, participant1=Address(b"1" * 20), participant2=Address(b"2" * 20), closing_block=closing_block if closing_block else None, )) request_collector.on_monitor_request(request_monitoring) # When the channel is not closed, of the closing is less than 10 blocks # before the current block (100), the MR must be saved. expected_mrs = 0 if closing_block == 100 - CHANNEL_CLOSE_MARGIN else 1 assert ms_database.monitor_request_count() == expected_mrs
def pathfinding_service_mock_empty() -> Generator[PathfindingService, None, None]: with patch("pathfinding_service.service.MatrixListener", new=Mock): web3_mock = Web3Mock() mock_udc = Mock(address=bytes([8] * 20)) mock_udc.functions.effectiveBalance.return_value.call.return_value = 10000 mock_udc.functions.token.return_value.call.return_value = to_checksum_address( bytes([7] * 20) ) pathfinding_service = PathfindingService( web3=web3_mock, contracts={ CONTRACT_TOKEN_NETWORK_REGISTRY: Mock(address=bytes([9] * 20)), CONTRACT_USER_DEPOSIT: mock_udc, }, sync_start_block=BlockNumber(0), required_confirmations=BlockTimeout(0), poll_interval=0, private_key=PrivateKey( decode_hex("3a1076bf45ab87712ad64ccb3b10217737f7faacbf2872e88fdd9a537d8fe266") ), db_filename=":memory:", ) yield pathfinding_service pathfinding_service.stop()
def test_payment_with_new_iou_rejected( # pylint: disable=too-many-locals api_sut, api_url: str, addresses: List[Address], token_network_model: TokenNetwork, make_iou: Callable, ): """Regression test for https://github.com/raiden-network/raiden-services/issues/624""" initiator_address = to_checksum_address(addresses[0]) target_address = to_checksum_address(addresses[1]) url = api_url + "/v1/" + to_checksum_address(token_network_model.address) + "/paths" default_params = {"from": initiator_address, "to": target_address, "value": 5, "max_paths": 3} def request_path_with(status_code=400, **kwargs): params = default_params.copy() params.update(kwargs) response = requests.post(url, json=params) assert response.status_code == status_code, response.json() return response # test with payment api_sut.service_fee = 100 sender = PrivateKey(get_random_privkey()) iou = make_iou( sender, api_sut.pathfinding_service.address, one_to_n_address=api_sut.one_to_n_address, amount=100, claimable_until=Timestamp(get_posix_utc_time_now() + 1_234_567), ) first_iou_dict = iou.Schema().dump(iou) second_iou = make_iou( sender, api_sut.pathfinding_service.address, one_to_n_address=api_sut.one_to_n_address, amount=200, claimable_until=Timestamp(get_posix_utc_time_now() + 1_234_568), ) second_iou_dict = second_iou.Schema().dump(second_iou) response = request_path_with(status_code=200, iou=first_iou_dict) assert response.status_code == 200 response = request_path_with(iou=second_iou_dict) assert response.json()["error_code"] == exceptions.UseThisIOU.error_code
def test_waiting_messages(pathfinding_service_mock): participant1_privkey, participant1 = make_privkey_address() token_network_address = TokenNetworkAddress(b"1" * 20) channel_id = ChannelID(1) # register token network internally database = pathfinding_service_mock.database database.conn.execute( "INSERT INTO token_network(address) VALUES (?)", [to_checksum_address(token_network_address)], ) fee_update = PFSFeeUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(61), token_network_address=token_network_address, channel_identifier=channel_id, ), updating_participant=participant1, fee_schedule=FeeScheduleState(), timestamp=datetime.utcnow(), signature=EMPTY_SIGNATURE, ) fee_update.sign(LocalSigner(participant1_privkey)) capacity_update = PFSCapacityUpdate( canonical_identifier=CanonicalIdentifier( chain_identifier=ChainID(61), token_network_address=token_network_address, channel_identifier=channel_id, ), updating_participant=make_address(), other_participant=make_address(), updating_nonce=Nonce(1), other_nonce=Nonce(1), updating_capacity=TokenAmount(100), other_capacity=TokenAmount(111), reveal_timeout=BlockTimeout(50), signature=EMPTY_SIGNATURE, ) capacity_update.sign(LocalSigner(participant1_privkey)) for message in (fee_update, capacity_update): database.insert_waiting_message(message) recovered_messages = list( database.pop_waiting_messages( token_network_address=token_network_address, channel_id=channel_id)) assert len(recovered_messages) == 1 assert message == recovered_messages[0] recovered_messages2 = list( database.pop_waiting_messages( token_network_address=token_network_address, channel_id=channel_id)) assert len(recovered_messages2) == 0