def restore_or_init_snapshot( storage: SerializedSQLiteStorage, node_address: Address, initial_state: State) -> Tuple[State, StateChangeID, int]: """Restore the latest snapshot. Returns the ULID of the state change that is not applied and the accumulated number of state_changes applied to this snapshot so far. If there is no snapshot the state will be primed with `initial_state`. """ state_change_identifier = HIGH_STATECHANGE_ULID snapshot = storage.get_snapshot_before_state_change( state_change_identifier=state_change_identifier) if snapshot is not None: log.debug( "Snapshot found", from_state_change_id=snapshot.state_change_identifier, to_state_change_id=state_change_identifier, node=to_checksum_address(node_address), ) return snapshot.data, snapshot.state_change_identifier, snapshot.state_change_qty else: log.debug( "No snapshot found, initializing the node state", to_state_change_id=state_change_identifier, node=to_checksum_address(node_address), ) # The initial state must be saved to preserve the state of the PRNG storage.write_first_state_snapshot(initial_state) return initial_state, LOW_STATECHANGE_ULID, 0
def create_all_channels_for_network( app_channels: AppChannels, token_addresses: List[TokenAddress], channel_individual_deposit: TokenAmount, channel_settle_timeout: BlockTimeout, ) -> None: greenlets = set() for token_address in token_addresses: for app_pair in app_channels: greenlets.add( gevent.spawn( payment_channel_open_and_deposit, app_pair[0], app_pair[1], token_address, channel_individual_deposit, channel_settle_timeout, ) ) gevent.joinall(greenlets, raise_error=True) channels = [ { "app0": to_checksum_address(app0.raiden.address), "app1": to_checksum_address(app1.raiden.address), "deposit": channel_individual_deposit, "token_address": to_checksum_address(token_address), } for (app0, app1), token_address in product(app_channels, token_addresses) ] log.info("Test channels", channels=channels)
def log_open_channels( raiden: "RaidenService", registry_address: TokenNetworkRegistryAddress, token_address: TokenAddress, funds: TokenAmount, ) -> None: # pragma: no unittest chain_state = views.state_from_raiden(raiden) open_channels = views.get_channelstate_open( chain_state=chain_state, token_network_registry_address=registry_address, token_address=token_address, ) if open_channels: sum_deposits = views.get_our_deposits_for_token_network( views.state_from_raiden(raiden), registry_address, token_address) log.debug( "connect() called on an already joined token network", node=to_checksum_address(raiden.address), registry_address=to_checksum_address(registry_address), token_address=to_checksum_address(token_address), open_channels=len(open_channels), sum_deposits=sum_deposits, funds=funds, )
def update_iou( iou: IOU, privkey: bytes, added_amount: TokenAmount = ZERO_TOKENS, expiration_block: Optional[BlockNumber] = None, ) -> IOU: expected_signature = Signature( sign_one_to_n_iou( privatekey=to_hex(privkey), sender=to_checksum_address(iou.sender), receiver=to_checksum_address(iou.receiver), amount=iou.amount, expiration_block=iou.expiration_block, one_to_n_address=to_checksum_address(iou.one_to_n_address), chain_id=iou.chain_id, )) if iou.signature != expected_signature: raise ServiceRequestFailed( "Last IOU as given by the Pathfinding Service is invalid (signature does not match)" ) iou.amount = TokenAmount(iou.amount + added_amount) if expiration_block: iou.expiration_block = expiration_block iou.sign(privkey) return iou
def transfer_and_assert( server_from: APIServer, server_to: APIServer, token_address: TokenAddress, identifier: int, amount: TokenAmount, ) -> None: url = _url_for( server_from, "token_target_paymentresource", token_address=to_checksum_address(token_address), target_address=to_checksum_address(address_from_apiserver(server_to)), ) json = {"amount": amount, "identifier": identifier} log.debug("PAYMENT REQUEST", url=url, json=json) request = grequests.post(url, json=json) start = time.monotonic() response = request.send().response duration = time.monotonic() - start log.debug("PAYMENT RESPONSE", url=url, json=json, response=response, duration=duration) assert getattr(request, "exception", None) is None assert response is not None assert response.status_code == HTTPStatus.OK, f"Payment failed, reason: {response.content}" assert response.headers["Content-Type"] == "application/json"
def post_pfs_feedback( routing_mode: RoutingMode, pfs_config: PFSConfig, token_network_address: TokenNetworkAddress, route: List[Address], token: UUID, successful: bool, ) -> None: feedback_disabled = routing_mode == RoutingMode.PRIVATE or pfs_config is None if feedback_disabled: return hex_route = [to_checksum_address(address) for address in route] payload = dict(token=token.hex, path=hex_route, success=successful) log.info( "Sending routing feedback to Pathfinding Service", url=pfs_config.info.url, token_network_address=to_checksum_address(token_network_address), payload=payload, ) try: requests.post( f"{pfs_config.info.url}/api/v1/{to_checksum_address(token_network_address)}/feedback", json=payload, timeout=DEFAULT_HTTP_REQUEST_TIMEOUT, ) except requests.RequestException as e: log.warning(f"Could not send feedback to Pathfinding Service", exception_=str(e), payload=payload)
def wait_for_contract(self, contract_address_hex: AddressHex, timeout: int = None) -> bool: """ Wait until a contract is mined Args: contract_address_hex: hex encoded address of the contract timeout: time to wait for the contract to get mined Returns: True if the contract got mined, false otherwise """ contract_address = decode_hex(contract_address_hex) start_time = time.time() result = self._raiden.rpc_client.web3.eth.getCode( to_checksum_address(contract_address)) current_time = time.time() while not result: if timeout and start_time + timeout > current_time: return False result = self._raiden.rpc_client.web3.eth.getCode( to_checksum_address(contract_address)) gevent.sleep(0.5) current_time = time.time() return len(result) > 0
def wait_for_token_network( raiden: "RaidenService", token_network_registry_address: TokenNetworkRegistryAddress, token_address: TokenAddress, retry_timeout: float, ) -> None: # pragma: no unittest """Wait until the token network is visible to the RaidenService. Note: This does not time out, use gevent.Timeout. """ token_network = views.get_token_network_by_token_address( views.state_from_raiden(raiden), token_network_registry_address, token_address) log_details = { "token_network_registry_address": to_checksum_address(token_network_registry_address), "token_address": to_checksum_address(token_address), } while token_network is None: assert raiden, ALARM_TASK_ERROR_MSG assert raiden.alarm, ALARM_TASK_ERROR_MSG log.debug("wait_for_token_network", **log_details) gevent.sleep(retry_timeout) token_network = views.get_token_network_by_token_address( views.state_from_raiden(raiden), token_network_registry_address, token_address)
def set_url(self, url: str) -> None: """Sets the url needed to access the service via HTTP for the caller""" log_details: Dict[str, Any] = { "node": to_checksum_address(self.node_address), "contract": to_checksum_address(self.address), "url": url, } if not url.strip(): msg = "Invalid empty URL" raise BrokenPreconditionError(msg) parsed_url = urlparse(url) if parsed_url.scheme not in ("http", "https"): msg = "URL provided to service registry must be a valid HTTP(S) endpoint." raise BrokenPreconditionError(msg) with log_transaction(log, "set_url", log_details): gas_limit = self.proxy.estimate_gas("latest", "setURL", url) if not gas_limit: msg = f"URL {url} is invalid" raise RaidenUnrecoverableError(msg) log_details["gas_limit"] = gas_limit transaction_hash = self.proxy.transact("setURL", gas_limit, url) receipt = self.client.poll(transaction_hash) failed_receipt = check_transaction_threw(receipt=receipt) if failed_receipt: msg = f"URL {url} is invalid" raise RaidenUnrecoverableError(msg)
def happy_path_fixture(chain_state, token_network_state, our_address): token_network_state, addresses, channel_states = create_square_network_topology( token_network_state=token_network_state, our_address=our_address) address1, address2, address3, address4 = addresses chain_state.nodeaddresses_to_networkstates = { address1: NetworkState.REACHABLE, address2: NetworkState.REACHABLE, address3: NetworkState.REACHABLE, address4: NetworkState.REACHABLE, } json_data = { "result": [{ "path": [ to_checksum_address(our_address), to_checksum_address(address2), to_checksum_address(address3), to_checksum_address(address4), ], "estimated_fee": 0, }], "feedback_token": DEFAULT_FEEDBACK_TOKEN.hex, } response = mocked_json_response(response_data=json_data) return addresses, chain_state, channel_states, response, token_network_state
def send_transaction(self, to: Address, startgas: int, value: int = 0, data: bytes = b"") -> TransactionHash: """ Locally sign the transaction and send it to the network. """ with self._sent_lock: if self._sent: raise RaidenUnrecoverableError( f"A transaction for this slot has been sent already! " f"Reusing the nonce is a synchronization problem.") if to == to_canonical_address(NULL_ADDRESS_HEX): warnings.warn( "For contract creation the empty string must be used.") gas_price = self._client.gas_price() transaction = { "data": data, "gas": startgas, "nonce": self.nonce, "value": value, "gasPrice": gas_price, } node_gas_price = self._client.web3.eth.gasPrice log.debug( "Calculated gas price for transaction", node=to_checksum_address(self._client.address), calculated_gas_price=gas_price, node_gas_price=node_gas_price, ) # add the to address if not deploying a contract if to != b"": transaction["to"] = to_checksum_address(to) signed_txn = self._client.web3.eth.account.signTransaction( transaction, self._client.privkey) log_details = { "node": to_checksum_address(self._client.address), "nonce": transaction["nonce"], "gasLimit": transaction["gas"], "gasPrice": transaction["gasPrice"], } log.debug("send_raw_transaction called", **log_details) tx_hash = self._client.web3.eth.sendRawTransaction( signed_txn.rawTransaction) log.debug("send_raw_transaction returned", tx_hash=encode_hex(tx_hash), **log_details) self._sent = True return TransactionHash(tx_hash)
def test_post_pfs_feedback(query_paths_args): """ Test POST feedback to PFS """ feedback_token = uuid4() token_network_address = factories.make_token_network_address() route = [factories.make_address(), factories.make_address()] with patch.object(session, "post", return_value=mocked_json_response()) as feedback: post_pfs_feedback( routing_mode=RoutingMode.PFS, pfs_config=query_paths_args["pfs_config"], token_network_address=token_network_address, route=route, token=feedback_token, successful=True, ) assert feedback.called assert feedback.call_args[0][0].find( to_checksum_address(token_network_address)) > 0 payload = feedback.call_args[1]["json"] assert payload["token"] == feedback_token.hex assert payload["success"] is True assert payload["path"] == [to_checksum_address(addr) for addr in route] with patch.object(session, "post", return_value=mocked_json_response()) as feedback: post_pfs_feedback( routing_mode=RoutingMode.PFS, pfs_config=query_paths_args["pfs_config"], token_network_address=token_network_address, route=route, token=feedback_token, successful=False, ) assert feedback.called assert feedback.call_args[0][0].find( to_checksum_address(token_network_address)) > 0 payload = feedback.call_args[1]["json"] assert payload["token"] == feedback_token.hex assert payload["success"] is False assert payload["path"] == [to_checksum_address(addr) for addr in route] with patch.object(session, "post", return_value=mocked_json_response()) as feedback: post_pfs_feedback( routing_mode=RoutingMode.PRIVATE, pfs_config=query_paths_args["pfs_config"], token_network_address=token_network_address, route=route, token=feedback_token, successful=False, ) assert not feedback.called
def init( self, monitoring_service_address: MonitoringServiceAddress, one_to_n_address: OneToNAddress, given_block_identifier: BlockSpecification, ) -> None: """ Initialize the UserDeposit contract with MS and OneToN addresses """ log_details = { "monitoring_service_address": to_checksum_address(monitoring_service_address), "one_to_n_address": to_checksum_address(one_to_n_address), } check_address_has_code( client=self.client, address=Address(monitoring_service_address), contract_name=CONTRACT_MONITORING_SERVICE, expected_code=decode_hex( self.contract_manager.get_runtime_hexcode( CONTRACT_MONITORING_SERVICE)), ) check_address_has_code( client=self.client, address=Address(one_to_n_address), contract_name=CONTRACT_ONE_TO_N, expected_code=decode_hex( self.contract_manager.get_runtime_hexcode(CONTRACT_ONE_TO_N)), ) try: existing_monitoring_service_address = self.monitoring_service_address( block_identifier=given_block_identifier) existing_one_to_n_address = self.one_to_n_address( block_identifier=given_block_identifier) except ValueError: pass except BadFunctionCallOutput: raise_on_call_returned_empty(given_block_identifier) else: if existing_monitoring_service_address != EMPTY_ADDRESS: msg = ( f"MonitoringService contract address is already set to " f"{to_checksum_address(existing_monitoring_service_address)}" ) raise BrokenPreconditionError(msg) if existing_one_to_n_address != EMPTY_ADDRESS: msg = (f"OneToN contract address is already set to " f"{to_checksum_address(existing_one_to_n_address)}") raise BrokenPreconditionError(msg) with log_transaction(log, "init", log_details): self._init( monitoring_service_address=monitoring_service_address, one_to_n_address=one_to_n_address, log_details=log_details, )
def __init__( self, web3: Web3, privkey: Optional[PrivateKey], gas_price_strategy: Callable = rpc_gas_price_strategy, gas_estimate_correction: Callable = lambda gas: gas, block_num_confirmations: int = 0, ) -> None: if privkey is None or len(privkey) != 32: raise ValueError("Invalid private key") if block_num_confirmations < 0: raise ValueError("Number of confirmations has to be positive") monkey_patch_web3(web3, gas_price_strategy) version = web3.version.node supported, eth_node, _ = is_supported_client(version) if not supported: raise EthNodeInterfaceError( f"Unsupported Ethereum client {version}") address = privatekey_to_address(privkey) address_checksummed = to_checksum_address(address) if eth_node is EthClient.PARITY: parity_assert_rpc_interfaces(web3) available_nonce = parity_discover_next_available_nonce( web3, address_checksummed) elif eth_node is EthClient.GETH: geth_assert_rpc_interfaces(web3) available_nonce = geth_discover_next_available_nonce( web3, address_checksummed) self.eth_node = eth_node self.privkey = privkey self.address = address self.web3 = web3 self.default_block_num_confirmations = block_num_confirmations # Ask for the chain id only once and store it here self.chain_id = ChainID(int(self.web3.version.network)) self._available_nonce = available_nonce self._nonce_lock = Semaphore() self._gas_estimate_correction = gas_estimate_correction log.debug( "JSONRPCClient created", node=to_checksum_address(self.address), available_nonce=available_nonce, client=version, )
def sign(self, privkey: bytes) -> None: self.signature = Signature( sign_one_to_n_iou( privatekey=encode_hex(privkey), sender=to_checksum_address(self.sender), receiver=to_checksum_address(self.receiver), amount=self.amount, expiration_block=self.expiration_block, one_to_n_address=to_checksum_address(self.one_to_n_address), chain_id=self.chain_id, ))
def flatten_transfer(transfer: LockedTransferType, role: TransferRole) -> Dict[str, Any]: return { "payment_identifier": str(transfer.payment_identifier), "token_address": to_checksum_address(transfer.token), "token_network_address": to_checksum_address(transfer.balance_proof.token_network_address), "channel_identifier": str(transfer.balance_proof.channel_identifier), "initiator": to_checksum_address(transfer.initiator), "target": to_checksum_address(transfer.target), "transferred_amount": str(transfer.balance_proof.transferred_amount), "locked_amount": str(transfer.balance_proof.locked_amount), "role": role.value, }
def test_no_iou_when_pfs_price_0(query_paths_args): """ Test that no IOU is sent when PFS is for free """ query_paths_args["pfs_config"] = PFSConfig( info=PFSInfo( url="abc", price=TokenAmount(0), chain_id=ChainID(42), token_network_registry_address=factories. make_token_network_registry_address(), user_deposit_address=factories.make_address(), payment_address=factories.make_address(), confirmed_block_number=dict(number=BlockNumber(1)), message="", operator="", version="", matrix_server="http://matrix.example.com", matrix_room_id="!room-id:matrix.example.com", ), maximum_fee=TokenAmount(100), iou_timeout=BlockNumber(100), max_paths=5, ) with patch("raiden.network.pathfinding.get_pfs_info") as mocked_pfs_info: mocked_pfs_info.return_value = PFS_CONFIG.info with patch.object(pathfinding, "post_pfs_paths", return_value=mocked_json_response()) as post_path: query_paths( pfs_config=query_paths_args["pfs_config"], our_address=query_paths_args["our_address"], privkey=query_paths_args["privkey"], current_block_number=query_paths_args["current_block_number"], token_network_address=query_paths_args[ "token_network_address"], one_to_n_address=query_paths_args["one_to_n_address"], chain_id=query_paths_args["chain_id"], route_from=query_paths_args["route_from"], route_to=query_paths_args["route_to"], value=query_paths_args["value"], pfs_wait_for_block=query_paths_args["pfs_wait_for_block"], ) assert post_path.call_args == call( payload={ "from": to_checksum_address(query_paths_args["route_from"]), "to": to_checksum_address(query_paths_args["route_to"]), "value": query_paths_args["value"], "max_paths": query_paths_args["pfs_config"].max_paths, }, token_network_address=query_paths_args["token_network_address"], url=query_paths_args["pfs_config"].info.url, )
def as_json(self) -> Dict[str, Any]: data = dict( sender=to_checksum_address(self.sender), receiver=to_checksum_address(self.receiver), one_to_n_address=to_checksum_address(self.one_to_n_address), amount=self.amount, expiration_block=self.expiration_block, chain_id=self.chain_id, ) if self.signature is not None: data["signature"] = to_hex(self.signature) return data
def restore_to_state_change( transition_function: Callable, storage: SerializedSQLiteStorage, state_change_identifier: StateChangeID, node_address: Address, ) -> Tuple[int, int, "WriteAheadLog"]: chain_state: Optional[State] from_identifier: StateChangeID snapshot = storage.get_snapshot_before_state_change( state_change_identifier=state_change_identifier) if snapshot is not None: log.debug( "Restoring from snapshot", from_state_change_id=snapshot.state_change_identifier, to_state_change_id=state_change_identifier, node=to_checksum_address(node_address), ) from_identifier = snapshot.state_change_identifier chain_state = snapshot.data state_change_qty = snapshot.state_change_qty else: log.debug( "No snapshot found, replaying all state changes", to_state_change_id=state_change_identifier, node=to_checksum_address(node_address), ) from_identifier = LOW_STATECHANGE_ULID chain_state = None state_change_qty = 0 state_manager = StateManager(transition_function, chain_state) wal = WriteAheadLog(state_manager, storage) unapplied_state_changes = storage.get_statechanges_by_range( Range(from_identifier, state_change_identifier)) if unapplied_state_changes: log.debug( "Replaying state changes", replayed_state_changes=[ redact_secret(DictSerializer.serialize(state_change)) for state_change in unapplied_state_changes ], node=to_checksum_address(node_address), ) for state_change in unapplied_state_changes: wal.state_manager.dispatch(state_change) return state_change_qty, len(unapplied_state_changes), wal
def _maybe_run_callbacks(self, latest_block: BlockData) -> None: """Run the callbacks if there is at least one new block. The callbacks are executed only if there is a new block, otherwise the filters may try to poll for an inexisting block number and the Ethereum client can return an JSON-RPC error. """ latest_block_number = latest_block["number"] # First run, set the block and run the callbacks if self.known_block_number is None: self.known_block_number = latest_block_number missed_blocks = 1 else: missed_blocks = latest_block_number - self.known_block_number if missed_blocks < 0: log.critical( "Block number decreased", chain_id=self.rpc_client.chain_id, known_block_number=self.known_block_number, old_block_number=latest_block["number"], old_gas_limit=latest_block["gasLimit"], old_block_hash=to_hex(latest_block["hash"]), node=to_checksum_address(self.rpc_client.address), ) elif missed_blocks > 0: log_details = dict( known_block_number=self.known_block_number, latest_block_number=latest_block_number, latest_block_hash=to_hex(latest_block["hash"]), latest_block_gas_limit=latest_block["gasLimit"], node=to_checksum_address(self.rpc_client.address), ) if missed_blocks > 1: log_details["num_missed_blocks"] = missed_blocks - 1 log.debug("Received new block", **log_details) remove = list() for callback in self.callbacks: result = callback(latest_block) if result is REMOVE_CALLBACK: remove.append(callback) for callback in remove: self.callbacks.remove(callback) self.known_block_number = latest_block_number
def test_call_inexisting_address(deploy_client): """ A JSON RPC call to an inexisting address returns the empty string. """ inexisting_address = b"\x01\x02\x03\x04\x05" * 4 assert len( deploy_client.web3.eth.getCode( to_checksum_address(inexisting_address))) == 0 transaction = { "from": to_checksum_address(deploy_client.address), "to": to_checksum_address(inexisting_address), "data": b"", "value": 0, } assert deploy_client.web3.eth.call(transaction) == b""
def check_address_has_code(client: "JSONRPCClient", address: Address, contract_name: str = "", expected_code: bytes = None) -> None: """ Checks that the given address contains code. """ result = client.web3.eth.getCode(to_checksum_address(address), "latest") if not result: raise AddressWithoutCode("[{}]Address {} does not contain code".format( contract_name, to_checksum_address(address))) if expected_code is not None and result != expected_code: raise ContractCodeMismatch( f"[{contract_name}]Address {to_checksum_address(address)} has wrong code." )
def handle_paymentsentsuccess( raiden: "RaidenService", payment_sent_success_event: EventPaymentSentSuccess ) -> None: # pragma: no unittest feedback_token = raiden.route_to_feedback_token.get( tuple(payment_sent_success_event.route)) pfs_config = raiden.config.pfs_config if feedback_token and pfs_config: log.debug( "Received payment success event", route=[ to_checksum_address(node) for node in payment_sent_success_event.route ], feedback_token=feedback_token, ) post_pfs_feedback( routing_mode=raiden.routing_mode, pfs_config=pfs_config, token_network_address=payment_sent_success_event. token_network_address, route=payment_sent_success_event.route, token=feedback_token, successful=True, )
def restore_state( transition_function: Callable, storage: SerializedSQLiteStorage, state_change_identifier: StateChangeID, node_address: Address, ) -> Optional[State]: snapshot = storage.get_snapshot_before_state_change( state_change_identifier=state_change_identifier) if snapshot is None: return None log.debug( "Snapshot found", from_state_change_id=snapshot.state_change_identifier, to_state_change_id=state_change_identifier, node=to_checksum_address(node_address), ) state, _ = replay_state_changes( node_address=node_address, state=snapshot.data, state_change_range=Range(snapshot.state_change_identifier, state_change_identifier), storage=storage, transition_function=transition_function, ) return state
def wait_for_network_state( raiden: "RaidenService", node_address: Address, network_state: NetworkState, retry_timeout: float, ) -> None: # pragma: no unittest """Wait until `node_address` becomes healthy. Note: This does not time out, use gevent.Timeout. """ network_statuses = views.get_networkstatuses( views.state_from_raiden(raiden)) current = network_statuses.get(node_address) log_details = { "node_address": to_checksum_address(node_address), "target_network_state": network_state, } while current != network_state: assert raiden, TRANSPORT_ERROR_MSG assert raiden.transport, TRANSPORT_ERROR_MSG log.debug("wait_for_network_state", current_network_state=current, **log_details) gevent.sleep(retry_timeout) network_statuses = views.get_networkstatuses( views.state_from_raiden(raiden)) current = network_statuses.get(node_address)
def _find_new_partners(self) -> List[Address]: """ Search the token network for potential channel partners. """ open_channels = views.get_channelstate_open( chain_state=views.state_from_raiden(self.raiden), token_network_registry_address=self.registry_address, token_address=self.token_address, ) known = set(channel_state.partner_state.address for channel_state in open_channels) known.add(self.BOOTSTRAP_ADDR) known.add(self.raiden.address) participants_addresses = views.get_participants_addresses( views.state_from_raiden(self.raiden), self.registry_address, self.token_address) available_addresses = list(participants_addresses - known) shuffle(available_addresses) new_partners = available_addresses log.debug( "Found partners", node=to_checksum_address(self.raiden.address), number_of_partners=len(available_addresses), ) return new_partners
def main(keystore_file, password, rpc_url, eth_amount, targets_file) -> None: web3 = Web3(HTTPProvider(rpc_url)) with open(keystore_file, "r") as keystore: account = Account(json.load(keystore), password, keystore_file) assert account.privkey, "Could not decode keystore file: wrong password" assert account.address, "Could not decode keystore file: no 'address' field found" print("Using account:", to_checksum_address(account.address)) client = JSONRPCClient( web3, account.privkey, block_num_confirmations=DEFAULT_NUMBER_OF_BLOCK_CONFIRMATIONS) targets = [t.strip() for t in targets_file] balance = client.balance(client.address) balance_needed = len(targets) * eth_amount if balance_needed * WEI_TO_ETH > balance: print( "Not enough balance to fund {} accounts with {} eth each. Need {}, have {}" .format(len(targets), eth_amount, balance_needed, balance / WEI_TO_ETH)) print("Sending {} eth to:".format(eth_amount)) for target in targets: print(" - {}".format(target)) gas_price = web3.eth.gasPrice # pylint: disable=no-member client.transact( EthTransfer(to_address=target, value=eth_amount * WEI_TO_ETH, gas_price=gas_price))
def handle_routefailed( raiden: "RaidenService", route_failed_event: EventRouteFailed ) -> None: # pragma: no unittest feedback_token = raiden.route_to_feedback_token.get( tuple(route_failed_event.route)) pfs_config = raiden.config.pfs_config if feedback_token and pfs_config: log.debug( "Received event for failed route", route=[ to_checksum_address(node) for node in route_failed_event.route ], secrethash=encode_hex(route_failed_event.secrethash), feedback_token=feedback_token, ) post_pfs_feedback( routing_mode=raiden.routing_mode, pfs_config=pfs_config, token_network_address=route_failed_event.token_network_address, route=route_failed_event.route, token=feedback_token, successful=False, )
def test_resending_mined_transaction_raises(deploy_client: JSONRPCClient) -> None: """ If a mined transaction is re-sent the exception `EthereumNonceTooLow` is raised. """ # Use a _fixed_ gas price strategy so that both transactions are identical. deploy_client.web3.eth.setGasPriceStrategy(make_fixed_gas_price_strategy(GasPrice(2000000000))) contract_proxy, _ = deploy_rpc_test_contract(deploy_client, "RpcTest") address = contract_proxy.contract_address assert len(deploy_client.web3.eth.getCode(to_checksum_address(address))) > 0 # Create a new instance of the JSONRPCClient, this will store the current available nonce client_invalid_nonce = JSONRPCClient(deploy_client.web3, deploy_client.privkey) check_block = deploy_client.get_checking_block() gas_estimate = contract_proxy.estimate_gas(check_block, "ret") assert gas_estimate, "Gas estimation should not fail here" startgas = safe_gas_limit(gas_estimate) txhash = contract_proxy.transact("ret", startgas) deploy_client.poll(txhash) # At this point `client_invalid_nonce` has a nonce that is `1` too low, # since a transaction was sent using `deploy_client` above and these two # instances share the same underlying private key. # # Note that the same function is called in this test. with pytest.raises(EthereumNonceTooLow): client_invalid_nonce.new_contract_proxy( abi=contract_proxy.contract.abi, contract_address=contract_proxy.contract_address ).transact("ret", startgas)
def get_filter_args_for_specific_event_from_channel( token_network_address: TokenNetworkAddress, channel_identifier: ChannelID, event_name: str, contract_manager: ContractManager, from_block: BlockIdentifier = GENESIS_BLOCK_NUMBER, to_block: BlockIdentifier = BLOCK_ID_LATEST, ) -> FilterParams: """ Return the filter params for a specific event of a given channel. """ event_abi = contract_manager.get_event_abi(CONTRACT_TOKEN_NETWORK, event_name) # Here the topics for a specific event are created # The first entry of the topics list is the event name, then the first parameter is encoded, # in the case of a token network, the first parameter is always the channel identifier _, event_filter_params = construct_event_filter_params( event_abi=event_abi, abi_codec=ABI_CODEC, contract_address=to_checksum_address(token_network_address), argument_filters={"channel_identifier": channel_identifier}, fromBlock=from_block, toBlock=to_block, ) return event_filter_params