# Timeouts TEST_SETTLE_TIMEOUT_MIN = 5 TEST_SETTLE_TIMEOUT_MAX = 100000 DEPLOY_SETTLE_TIMEOUT_MIN = 500 # ~ 2 hours DEPLOY_SETTLE_TIMEOUT_MAX = 555428 # ~ 3 months # Temporary deposit limits for the Red Eyes release in WEI MAX_ETH_CHANNEL_PARTICIPANT = int(0.075 * 10**18) MAX_ETH_TOKEN_NETWORK = int(250 * 10**18) # Special hashes LOCKSROOT_OF_NO_LOCKS = keccak(b"") EMPTY_ADDRESS = HexAddress( HexStr("0x0000000000000000000000000000000000000000")) # Event names # TokenNetworkRegistry EVENT_TOKEN_NETWORK_CREATED = "TokenNetworkCreated" # TokenNetwork EVENT_DEPRECATION_SWITCH = "DeprecationSwitch" # SecretRegistry EVENT_SECRET_REVEALED = "SecretRevealed" # EndpointRegistry EVENT_ADDRESS_REGISTERED = "AddressRegistered" # ServiceRegistry
def _ens_lookup( self, web3: Optional[Web3], name: str, blockchain: SupportedBlockchain = SupportedBlockchain.ETHEREUM, ) -> Optional[Union[ChecksumEthAddress, HexStr]]: """Performs an ENS lookup and returns address if found else None TODO: currently web3.py 5.15.0 does not support multichain ENS domains (EIP-2304), therefore requesting a non-Ethereum address won't use the web3 ens library and will require to extend the library resolver ABI. An issue in their repo (#1839) reporting the lack of support has been created. This function will require refactoring once they include support for EIP-2304. https://github.com/ethereum/web3.py/issues/1839 May raise: - RemoteError if Etherscan is used and there is a problem querying it or parsing its response - InputError if the given name is not a valid ENS name """ try: normal_name = normalize_name(name) except InvalidName as e: raise InputError(str(e)) from e resolver_addr = self._call_contract( web3=web3, contract_address=ENS_MAINNET_ADDR, abi=ENS_ABI, method_name='resolver', arguments=[normal_name_to_hash(normal_name)], ) if is_none_or_zero_address(resolver_addr): return None ens_resolver_abi = ENS_RESOLVER_ABI.copy() arguments = [normal_name_to_hash(normal_name)] if blockchain != SupportedBlockchain.ETHEREUM: ens_resolver_abi.extend(ENS_RESOLVER_ABI_MULTICHAIN_ADDRESS) arguments.append(blockchain.ens_coin_type()) try: deserialized_resolver_addr = deserialize_ethereum_address( resolver_addr) except DeserializationError: log.error( f'Error deserializing address {resolver_addr} while doing' f'ens lookup', ) return None address = self._call_contract( web3=web3, contract_address=deserialized_resolver_addr, abi=ens_resolver_abi, method_name='addr', arguments=arguments, ) if is_none_or_zero_address(address): return None if blockchain != SupportedBlockchain.ETHEREUM: return HexStr(address.hex()) try: return deserialize_ethereum_address(address) except DeserializationError: log.error(f'Error deserializing address {address}') return None
def trim_hex(hexstr: HexStr) -> HexStr: if hexstr.startswith('0x0'): hexstr = HexStr(re.sub('^0x0+', '0x', hexstr)) if hexstr == '0x': hexstr = HexStr('0x0') return hexstr
def node_address_from_userid(user_id: Optional[str]) -> Optional[AddressHex]: if user_id: return AddressHex(HexStr(user_id.split(":", 1)[0][1:])) return None
async def process(self) -> None: """Process validators registration.""" voting_params = await get_voting_parameters(self.network) latest_block_number = voting_params["latest_block_number"] pool_balance = voting_params["pool_balance"] if self.network == GNOSIS_CHAIN: # apply GNO <-> mGNO exchange rate pool_balance = Wei(int(pool_balance * MGNO_RATE // WAD)) if pool_balance < self.validator_deposit: # not enough balance to register next validator return while not (await has_synced_block(self.network, latest_block_number)): await asyncio.sleep(5) # select next validator # TODO: implement scoring system based on the operators performance validator_deposit_data = await select_validator( self.network, latest_block_number) if validator_deposit_data is None: logger.warning( f"[{self.network}] Failed to find the next validator to register" ) return validators_deposit_root = await get_validators_deposit_root( self.network, latest_block_number) public_key = validator_deposit_data["public_key"] if (self.last_vote_validators_deposit_root == validators_deposit_root and self.last_vote_public_key == public_key): # already voted for the validator return # submit vote current_nonce = voting_params["validators_nonce"] operator = validator_deposit_data["operator"] encoded_data: bytes = w3.codec.encode_abi( ["uint256", "bytes", "address", "bytes32"], [current_nonce, public_key, operator, validators_deposit_root], ) vote = ValidatorVote( signature=HexStr(""), nonce=current_nonce, validators_deposit_root=validators_deposit_root, **validator_deposit_data, ) logger.info( f"[{self.network}] Voting for the next validator: operator={operator}, public key={public_key}" ) submit_vote( network=self.network, oracle=self.oracle, encoded_data=encoded_data, vote=vote, name=VALIDATOR_VOTE_FILENAME, ) logger.info(f"[{self.network}] Submitted validator registration vote") # skip voting for the same validator and validators deposit root in the next check self.last_vote_public_key = public_key self.last_vote_validators_deposit_root = validators_deposit_root
def add_0x_prefix(value: HexStr) -> HexStr: if is_0x_prefixed(value): return value return HexStr("0x" + value)
def eth_get_transaction_status(tx_hash: str) -> SubMonitorFuncResponse: """ 如果交易查不到, 怎么处理??? 需要先根据 txid 获取, 如果获取不到, 就代表交易已经不在交易池中 (1) 通过 getTransaction 查询不到, 并不代表交易已经失败, 因为交易会被移到 future queue中 (2) 如果 getTransaction能够查询到交易,且 getTransactionReceipt能够获取到交易, 如果status为 0, 则ERC20交易为失败的 情况(1), 如何判定一笔为失败的? :param tx_hash: :return: """ myweb3 = Web3(provider=HTTPProvider(endpoint_uri=ETH_FULL_NODE_RPC_URL)) rsp = SubMonitorFuncResponse() rsp.transaction_status = WithdrawStatus.transaction_status.PENDING # 广播出去了, 就是pending rsp.confirmations = 0 rsp.block_height = 0 rsp.tx_hash = HexStr(tx_hash) rsp.block_time = datetime.now() try: for i in range(1): # 首先根据 tx_hash 通过 getTransaction接口获取交易, # 如果交易的 blockHash, blockNumber为 null, 则说明交易还在 pending状态 # 返回的是 null, 说明交易已经从交易池 移除! 处理异常 TransactionNotFound tx_data = myweb3.eth.getTransaction(rsp.tx_hash) assert isinstance(tx_data, AttributeDict), 'tx_data is not AttributeDict' if tx_data['blockNumber'] is None: logger.info(f"tx_hash:{tx_hash} is pending") break # raise Exception(f"tx_hash:{tx_hash} is pending") # receipt = myweb3.eth.waitForTransactionReceipt( # transaction_hash= rsp.tx_hash, # timeout=30, # 超时时间 秒 , 没查到就抛异常 web3.exceptions.TimeExhausted # poll_latency=1 # 每隔多少秒 请求一次 # ) receipt = myweb3.eth.getTransactionReceipt( transaction_hash=rsp.tx_hash) logger.info(receipt) assert receipt is not None, 'receipt is None' # 如果提币交易的区块确认数小于3个, 则继续等待 latest_block_number = myweb3.eth.blockNumber if isinstance(latest_block_number, int): MIN_CONFIRMATIONS = 3 if latest_block_number - receipt[ 'blockNumber'] < MIN_CONFIRMATIONS: logger.info( f'tx_hash:{tx_hash} is confirmations is less than {MIN_CONFIRMATIONS}, ' f'so keep waiting for more confirmations ') break else: logger.error('get latest blocknumber failed') raise Exception('get latest blocknumber failed') if receipt['status'] == 1: rsp.transaction_status = WithdrawStatus.transaction_status.SUCCESS else: rsp.transaction_status = WithdrawStatus.transaction_status.FAIL rsp.block_height = receipt['blockNumber'] # 如果是测试 并且是Rinkeby测试网络 , 需要加入一个中间件 if not g_IS_MAINNET and ETH_CHAIN_ID == 4: from web3.middleware import geth_poa_middleware myweb3.middleware_onion.inject(element=geth_poa_middleware, layer=0) block_data = myweb3.eth.getBlock(block_identifier=BlockNumber( rsp.block_height), full_transactions=False) assert isinstance(block_data, AttributeDict) rsp.block_time = timestamp_to_datatime(block_data.timestamp) rsp.confirmations = latest_block_number - rsp.block_height except TransactionNotFound as e: # getTransaction(tx_hash)返回 null, 交易未找到, 说明已经从交易池 移除! rsp.transaction_status = WithdrawStatus.transaction_status.FAIL pass except TimeExhausted as e: #如果获取交易失败 logger.info(f'{e}') pass except Exception as e: logger.error(f'{e}') logger.info(f'eth_get_transaction_status finished, rsp:{rsp}') return rsp
is_string, ) from hexbytes import ( HexBytes, ) from web3.exceptions import ( BlockNotFound, InvalidAddress, TransactionNotFound, ) from web3.types import ( # noqa: F401 BlockData, FilterParams, LogReceipt, Nonce, SyncStatus, TxParams, Wei, ) UNKNOWN_ADDRESS = ChecksumAddress( HexAddress(HexStr('0xdEADBEeF00000000000000000000000000000000'))) UNKNOWN_HASH = HexStr( '0xdeadbeef00000000000000000000000000000000000000000000000000000000') if TYPE_CHECKING: from web3 import Web3 # noqa: F401 from web3.contract import Contract # noqa: F401 class EthModuleTest: def test_eth_protocolVersion(self, web3: "Web3") -> None: protocol_version = web3.eth.protocolVersion assert is_string(protocol_version) assert protocol_version.isdigit()
def test_eth_submitHashrate(self, web3: "Web3") -> None: # node_id from EIP 1474: https://github.com/ethereum/EIPs/pull/1474/files node_id = HexStr( '59daa26581d0acd1fce254fb7e85952f4c09d0915afd33d3886cd914bc7d283c') result = web3.eth.submitHashrate(5000, node_id) assert result is True
class ConnectionManager: # pragma: no unittest """The ConnectionManager provides a high level abstraction for connecting to a Token network. Note: It is initialized with 0 funds; a connection to the token network will be only established _after_ calling `connect(funds)` """ # XXX Hack: for bootstrapping, the first node on a network opens a channel # with this address to become visible. BOOTSTRAP_ADDR_HEX = AddressHex(HexStr("0x" + "2" * 40)) BOOTSTRAP_ADDR = to_canonical_address(BOOTSTRAP_ADDR_HEX) def __init__(self, raiden: "RaidenService", token_network_address: TokenNetworkAddress): self.raiden = raiden chain_state = views.state_from_raiden(raiden) token_network_state = views.get_token_network_by_address( chain_state, token_network_address) token_network_registry = views.get_token_network_registry_by_token_network_address( chain_state, token_network_address) msg = f"Token network for address {to_checksum_address(token_network_address)} not found." assert token_network_state, msg msg = (f"Token network registry for token network address " f"{to_checksum_address(token_network_address)} not found.") assert token_network_registry, msg # TODO: # - Add timeout for transaction polling, used to overwrite the RaidenAPI # defaults # - Add a proper selection strategy (#576) self.funds = 0 self.initial_channel_target = 0 self.joinable_funds_target = 0.0 self.raiden = raiden self.registry_address = token_network_registry.address self.token_network_address = token_network_address self.token_address = token_network_state.token_address self.lock = Semaphore( ) #: protects self.funds and self.initial_channel_target self._retry_greenlet: Optional[Greenlet] = None self.api = RaidenAPI(raiden) def connect( self, funds: typing.TokenAmount, initial_channel_target: int = 3, joinable_funds_target: float = 0.4, ) -> None: """Connect to the network. Subsequent calls to `connect` are allowed, but will only affect the spendable funds and the connection strategy parameters for the future. `connect` will not close any channels. Note: the ConnectionManager does not discriminate manually opened channels from automatically opened ones. If the user manually opened channels, those deposit amounts will affect the funding per channel and the number of new channels opened. Args: funds: Target amount of tokens spendable to join the network. initial_channel_target: Target number of channels to open. joinable_funds_target: Amount of funds not initially assigned. """ confirmed_block_identifier = views.get_confirmed_blockhash(self.raiden) token = self.raiden.proxy_manager.token(self.token_address, confirmed_block_identifier) token_balance = token.balance_of(self.raiden.address) if token_balance < funds: raise InvalidAmount( f"Insufficient balance for token {to_checksum_address(self.token_address)}" ) if funds <= 0: raise InvalidAmount( "The funds to use in the connection need to be a positive integer" ) if joinable_funds_target < 0 or joinable_funds_target > 1: raise InvalidAmount( f"joinable_funds_target should be between 0 and 1. Given: {joinable_funds_target}" ) with self.lock: self.funds = funds self.initial_channel_target = initial_channel_target self.joinable_funds_target = joinable_funds_target log_open_channels(self.raiden, self.registry_address, self.token_address, funds) have_online_partners, potential_partners = self._have_online_channels_to_connect_to( ) if not have_online_partners: bootstrap_address = (self.BOOTSTRAP_ADDR if len(potential_partners) == 0 else random.choice(potential_partners)) log.info( "Bootstrapping token network.", node=to_checksum_address(self.raiden.address), network_id=to_checksum_address(self.registry_address), token_id=to_checksum_address(self.token_address), bootstrap_address=to_checksum_address(bootstrap_address), ) try: self.api.channel_open( registry_address=self.registry_address, token_address=self.token_address, partner_address=bootstrap_address, ) except DuplicatedChannelError: # If we have none else to connect to and connect got called twice # then it's possible to already have channel with the bootstrap node. # In that case do nothing pass else: self._open_channels() def leave( self, registry_address: TokenNetworkRegistryAddress ) -> List[NettingChannelState]: """ Leave the token network. This implies closing all channels and waiting for all channels to be settled. """ with self.lock: self.initial_channel_target = 0 channels_to_close = views.get_channelstate_open( chain_state=views.state_from_raiden(self.raiden), token_network_registry_address=registry_address, token_address=self.token_address, ) partner_addresses = [ channel_state.partner_state.address for channel_state in channels_to_close ] self.api.channel_batch_close(registry_address, self.token_address, partner_addresses) channel_ids = [ channel_state.identifier for channel_state in channels_to_close ] waiting.wait_for_settle( self.raiden, registry_address, self.token_address, channel_ids, self.raiden.alarm.sleep_time, ) return channels_to_close def spawn_retry(self) -> Optional[Greenlet]: """This makes sure, there is only one retry greenlet running at a time.""" if not self._retry_greenlet: self._retry_greenlet = spawn_named("cm-retry_connect", self.retry_connect) return self._retry_greenlet return None def join_channel(self, partner_address: Address, partner_deposit: TokenAmount) -> None: """Will be called, when we were selected as channel partner by another node. It will fund the channel with up to the partners deposit, but not more than remaining funds or the initial funding per channel. If the connection manager has no funds, this is a noop. """ # Consider this race condition: # # - Partner opens the channel and starts the deposit. # - This nodes learns about the new channel, starts ConnectionManager's # retry_connect, which will start a deposit for this half of the # channel. # - This node learns about the partner's deposit before its own. # join_channel is called which will try to deposit again. # # To fix this race, first the node must wait for the pending operations # to finish, because in them could be a deposit, and then deposit must # be called only if the channel is still not funded. confirmed_block_identifier = views.get_confirmed_blockhash(self.raiden) token_network_proxy = self.raiden.proxy_manager.token_network( self.token_network_address, block_identifier=confirmed_block_identifier) # Wait for any pending operation in the channel to complete, before # deciding on the deposit with self.lock, token_network_proxy.channel_operations_lock[ partner_address]: channel_state = views.get_channelstate_for( chain_state=views.state_from_raiden(self.raiden), token_network_registry_address=self.registry_address, token_address=self.token_address, partner_address=partner_address, ) if not channel_state: return joining_funds = min(partner_deposit, self._funds_remaining, self._initial_funding_per_partner) if joining_funds <= 0 or self._leaving_state: return if joining_funds <= channel_state.our_state.contract_balance: return try: self.api.set_total_channel_deposit(self.registry_address, self.token_address, partner_address, joining_funds) except RaidenRecoverableError: log.info("Channel not in opened state", node=to_checksum_address(self.raiden.address)) except InvalidDBData: raise except RaidenUnrecoverableError as e: should_crash = ( self.raiden.config.environment_type != Environment.PRODUCTION or self.raiden.config.unrecoverable_error_should_crash) if should_crash: raise log.critical(str(e), node=to_checksum_address(self.raiden.address)) else: log.info( "Joined a channel", node=to_checksum_address(self.raiden.address), partner=to_checksum_address(partner_address), funds=joining_funds, ) def retry_connect(self) -> None: """Will be called when new channels in the token network are detected. If the minimum number of channels was not yet established, it will try to open new channels. If the connection manager has no funds, this is a noop. """ with self.lock: if self._funds_remaining > 0 and not self._leaving_state: self._open_channels() def _have_online_channels_to_connect_to( self) -> Tuple[bool, List[Address]]: """Returns whether there are any possible new online channel partners to connect to If there are channels online the first element of the returned tuple is True The second element is the list of all potential addresses to connect to(online and offline) """ potential_addresses = self._find_new_partners() have_online_channels = False for address in potential_addresses: reachability = self.raiden.transport.force_check_address_reachability( address) if reachability == AddressReachability.REACHABLE: have_online_channels = True break return have_online_channels, potential_addresses def _find_new_partners(self) -> List[Address]: """ Search the token network for potential channel partners. """ open_channels = views.get_channelstate_open( chain_state=views.state_from_raiden(self.raiden), token_network_registry_address=self.registry_address, token_address=self.token_address, ) known = set(channel_state.partner_state.address for channel_state in open_channels) known.add(self.BOOTSTRAP_ADDR) known.add(self.raiden.address) participants_addresses = views.get_participants_addresses( views.state_from_raiden(self.raiden), self.registry_address, self.token_address) available_addresses = list(participants_addresses - known) shuffle(available_addresses) new_partners = available_addresses log.debug( "Found partners", node=to_checksum_address(self.raiden.address), number_of_partners=len(available_addresses), ) return new_partners def _join_partner(self, partner: Address) -> None: """ Ensure a channel exists with partner and is funded in our side """ log.info( "Trying to join or fund channel with partner further", node=to_checksum_address(self.raiden.address), partner=to_checksum_address(partner), ) try: self.api.channel_open(self.registry_address, self.token_address, partner) except DuplicatedChannelError: # If channel already exists (either because partner created it, # or it's nonfunded channel), continue to ensure it's funded pass total_deposit = self._initial_funding_per_partner if total_deposit == 0: return try: self.api.set_total_channel_deposit( registry_address=self.registry_address, token_address=self.token_address, partner_address=partner, total_deposit=total_deposit, ) except InvalidDBData: raise except RECOVERABLE_ERRORS: log.info( "Deposit failed", node=to_checksum_address(self.raiden.address), partner=to_checksum_address(partner), ) except RaidenUnrecoverableError: should_crash = ( self.raiden.config.environment_type != Environment.PRODUCTION or self.raiden.config.unrecoverable_error_should_crash) if should_crash: raise log.critical( "Deposit failed", node=to_checksum_address(self.raiden.address), partner=to_checksum_address(partner), ) def _open_channels(self) -> bool: """ Open channels until there are `self.initial_channel_target` channels open. Do nothing if there are enough channels open already. Note: - This method must be called with the lock held. Return: - False if no channels could be opened """ open_channels = views.get_channelstate_open( chain_state=views.state_from_raiden(self.raiden), token_network_registry_address=self.registry_address, token_address=self.token_address, ) open_channels = [ channel_state for channel_state in open_channels if channel_state.partner_state.address != self.BOOTSTRAP_ADDR ] funded_channels = [ channel_state for channel_state in open_channels if channel_state.our_state.contract_balance >= self._initial_funding_per_partner ] nonfunded_channels = [ channel_state for channel_state in open_channels if channel_state not in funded_channels ] possible_new_partners = self._find_new_partners() # if we already met our target, break if len(funded_channels) >= self.initial_channel_target: return False # if we didn't, but there's no nonfunded channels and no available partners # it means the network is smaller than our target, so we should also break if len(nonfunded_channels) == 0 and len(possible_new_partners) == 0: return False n_to_join = self.initial_channel_target - len(funded_channels) nonfunded_partners = [ channel_state.partner_state.address for channel_state in nonfunded_channels ] # first, fund nonfunded channels, then open and fund with possible_new_partners, # until initial_channel_target of funded channels is met possible_partners = nonfunded_partners + possible_new_partners join_partners: List[Address] = [] # Also filter the possible partners by excluding offline addresses for possible_partner in possible_partners: if len(join_partners) == n_to_join: break reachability = self.raiden.transport.force_check_address_reachability( possible_partner) if reachability == AddressReachability.REACHABLE: join_partners.append(possible_partner) log.debug( "Spawning greenlets to join partners", node=to_checksum_address(self.raiden.address), num_greenlets=len(join_partners), ) greenlets = set( spawn_named(f"cm-join_partner-{to_checksum_address(partner)}", self._join_partner, partner) for partner in join_partners) gevent.joinall(greenlets, raise_error=True) return True @property def _initial_funding_per_partner(self) -> TokenAmount: """The calculated funding per partner depending on configuration and overall funding of the ConnectionManager. Note: - This attribute must be accessed with the lock held. """ if self.initial_channel_target: return TokenAmount( int(self.funds * (1 - self.joinable_funds_target) / self.initial_channel_target)) return TokenAmount(0) @property def _funds_remaining(self) -> TokenAmount: """The remaining funds after subtracting the already deposited amounts. Note: - This attribute must be accessed with the lock held. """ if self.funds > 0: confirmed_block_identifier = views.get_confirmed_blockhash( self.raiden) token = self.raiden.proxy_manager.token( self.token_address, confirmed_block_identifier) token_balance = token.balance_of(self.raiden.address) sum_deposits = views.get_our_deposits_for_token_network( views.state_from_raiden(self.raiden), self.registry_address, self.token_address) return TokenAmount(min(self.funds - sum_deposits, token_balance)) return TokenAmount(0) @property def _leaving_state(self) -> bool: """True if the node is leaving the token network. Note: - This attribute must be accessed with the lock held. """ return self.initial_channel_target < 1 def __repr__(self) -> str: if self.raiden.wal is None: return ( f"{self.__class__.__name__}(target={self.initial_channel_target} " "WAL not initialized)") open_channels = views.get_channelstate_open( chain_state=views.state_from_raiden(self.raiden), token_network_registry_address=self.registry_address, token_address=self.token_address, ) return ( f"{self.__class__.__name__}(target={self.initial_channel_target} " + f"open_channels={len(open_channels)}:{open_channels!r})")
from dataclasses import asdict from eth_typing import HexAddress, HexStr from rotkehlchen.assets.asset import EthereumToken from rotkehlchen.assets.unknown_asset import UnknownEthereumToken from rotkehlchen.typing import ChecksumEthAddress SHUF_ETHEREUM_ADDRESS = ChecksumEthAddress( HexAddress(HexStr('0x3A9FfF453d50D4Ac52A6890647b823379ba36B9E')), ) SHUF_SYMBOL = 'SHUF' SHUF_NAME = 'Shuffle.Monster V3' SHUF_DECIMALS = 18 # Test initialization def test_init_default(): ue_token = UnknownEthereumToken( ethereum_address=SHUF_ETHEREUM_ADDRESS, symbol=SHUF_SYMBOL, ) assert ue_token.ethereum_address == SHUF_ETHEREUM_ADDRESS assert ue_token.symbol == SHUF_SYMBOL assert ue_token.name is None assert ue_token.decimals is None # Test operators def test_eq(): ue_token_1 = UnknownEthereumToken( ethereum_address=SHUF_ETHEREUM_ADDRESS,
def is_block_hash(value: str) -> bool: return is_hex(value) and len(remove_0x_prefix(HexStr(value))) == 64
def setup_testchain(eth_client: EthClient, free_port_generator: Iterator[Port], base_datadir: str, base_logdir: str) -> Iterator[Dict[str, Any]]: # This mapping exists to facilitate the transition from parity to # openethereum. When all traces of parity are remove, just use # ``eth_client.value`` again. eth_client_to_executable = { EthClient.GETH: "geth", EthClient.PARITY: "openethereum", } ensure_executable(eth_client_to_executable[eth_client]) rpc_port = next(free_port_generator) p2p_port = next(free_port_generator) eth_rpc_endpoint = URI(f"http://127.0.0.1:{rpc_port}") web3 = Web3(HTTPProvider(endpoint_uri=eth_rpc_endpoint)) web3.middleware_onion.inject(make_sane_poa_middleware, layer=0) eth_nodes = [ EthNodeDescription( private_key=TEST_PRIVKEY, rpc_port=rpc_port, p2p_port=p2p_port, miner=True, extra_config={}, blockchain_type=eth_client.value, ) ] random_marker = remove_0x_prefix(HexStr(hex(random.getrandbits(100)))) genesis_description = GenesisDescription( prefunded_accounts=[ AccountDescription(TEST_ACCOUNT_ADDRESS, DEFAULT_BALANCE) ], random_marker=random_marker, chain_id=CHAINNAME_TO_ID["smoketest"], ) datadir = eth_node_to_datadir(privatekey_to_address(TEST_PRIVKEY), base_datadir) if eth_client is EthClient.GETH: keystore = geth_keystore(datadir) elif eth_client is EthClient.PARITY: keystore = parity_keystore(datadir) eth_node_runner = run_private_blockchain( web3=web3, eth_nodes=eth_nodes, base_datadir=base_datadir, log_dir=base_logdir, verbosity="info", genesis_description=genesis_description, ) with eth_node_runner as node_executors: yield dict( eth_client=eth_client, base_datadir=base_datadir, eth_rpc_endpoint=eth_rpc_endpoint, keystore=keystore, node_executors=node_executors, web3=web3, )
from eth_typing import ( ChecksumAddress, HexAddress, HexStr, ) from hexbytes import ( HexBytes, ) ACCEPTABLE_STALE_HOURS = 48 AUCTION_START_GAS_CONSTANT = 25000 AUCTION_START_GAS_MARGINAL = 39000 EMPTY_SHA3_BYTES = HexBytes(b'\0' * 32) EMPTY_ADDR_HEX = HexAddress(HexStr('0x' + '00' * 20)) REVERSE_REGISTRAR_DOMAIN = 'addr.reverse' ENS_MAINNET_ADDR = ChecksumAddress( HexAddress(HexStr('0x00000000000C2E074eC69A0dFb2997BA6C7d2e1e')))
async def process(self, voting_params: DistributorVotingParameters) -> None: """Submits vote for the new merkle root and merkle proofs to the IPFS.""" from_block = voting_params["from_block"] to_block = voting_params["to_block"] last_updated_at_block = voting_params["last_updated_at_block"] current_nonce = voting_params["rewards_nonce"] # skip submitting vote if too early or vote has been already submitted if (to_block <= last_updated_at_block or self.last_to_block == to_block or from_block >= to_block): return logger.info( f"[{self.network}] Voting for Merkle Distributor rewards: from block={from_block}, to block={to_block}" ) # fetch active periodic allocations active_allocations = await get_periodic_allocations( network=self.network, from_block=from_block, to_block=to_block) uniswap_v3_pools = await get_uniswap_v3_pools(network=self.network, block_number=to_block) # fetch uni v3 distributions all_distributions = await get_uniswap_v3_distributions( pools=uniswap_v3_pools, active_allocations=active_allocations, from_block=from_block, to_block=to_block, ) # fetch disabled stakers distributions disabled_stakers_distributions = ( await get_disabled_stakers_reward_token_distributions( network=self.network, distributor_reward=voting_params["distributor_reward"], from_block=from_block, to_block=to_block, )) all_distributions.extend(disabled_stakers_distributions) last_merkle_root = voting_params["last_merkle_root"] last_merkle_proofs = voting_params["last_merkle_proofs"] if (last_merkle_root is not None and w3.toInt(hexstr=last_merkle_root) and last_merkle_proofs): # fetch accounts that have claimed since last merkle root update claimed_accounts = await get_distributor_claimed_accounts( network=self.network, merkle_root=last_merkle_root) # calculate unclaimed rewards unclaimed_rewards = await get_unclaimed_balances( claimed_accounts=claimed_accounts, merkle_proofs=last_merkle_proofs, ) else: unclaimed_rewards = {} # calculate reward distributions with coroutines tasks = [] distributor_tokens = await get_distributor_tokens( self.network, from_block) distributor_redirects = await get_distributor_redirects( self.network, from_block) for dist in all_distributions: distributor_rewards = DistributorRewards( network=self.network, uniswap_v3_pools=uniswap_v3_pools, from_block=dist["from_block"], to_block=dist["to_block"], distributor_tokens=distributor_tokens, distributor_redirects=distributor_redirects, reward_token=dist["reward_token"], uni_v3_token=dist["uni_v3_token"], ) task = distributor_rewards.get_rewards( contract_address=dist["contract"], reward=dist["reward"]) tasks.append(task) # process one time rewards tasks.append( get_one_time_rewards(network=self.network, from_block=from_block, to_block=to_block)) # merge results results = await asyncio.gather(*tasks) final_rewards: Rewards = {} for rewards in results: final_rewards = DistributorRewards.merge_rewards( final_rewards, rewards) protocol_reward = voting_params["protocol_reward"] operators_rewards, left_reward = await get_operators_rewards( network=self.network, from_block=from_block, to_block=to_block, total_reward=protocol_reward, ) partners_rewards, left_reward = await get_partners_rewards( network=self.network, from_block=from_block, to_block=to_block, total_reward=left_reward, ) if left_reward > 0: fallback_rewards: Rewards = { self.distributor_fallback_address: { self.reward_token_contract_address: str(left_reward) } } final_rewards = DistributorRewards.merge_rewards( rewards1=final_rewards, rewards2=fallback_rewards, ) for rewards in [operators_rewards, partners_rewards]: final_rewards = DistributorRewards.merge_rewards( final_rewards, rewards) # merge final rewards with unclaimed rewards if unclaimed_rewards: final_rewards = DistributorRewards.merge_rewards( final_rewards, unclaimed_rewards) if not final_rewards: logger.info(f"[{self.network}] No rewards to distribute") return # calculate merkle root merkle_root, claims = calculate_merkle_root(final_rewards) logger.info( f"[{self.network}] Generated new merkle root: {merkle_root}") claims_link = await upload_claims(claims) logger.info(f"[{self.network}] Claims uploaded to: {claims_link}") # submit vote encoded_data: bytes = w3.codec.encode_abi( ["uint256", "string", "bytes32"], [current_nonce, claims_link, merkle_root], ) vote = DistributorVote( signature=HexStr(""), nonce=current_nonce, merkle_root=merkle_root, merkle_proofs=claims_link, ) submit_vote( network=self.network, oracle=self.oracle, encoded_data=encoded_data, vote=vote, name=DISTRIBUTOR_VOTE_FILENAME, ) logger.info( f"[{self.network}] Distributor vote has been successfully submitted" ) self.last_to_block = to_block
def test_eth_getTransactionCount_invalid_address(self, web3: "Web3") -> None: coinbase = web3.eth.coinbase with pytest.raises(InvalidAddress): web3.eth.getTransactionCount( ChecksumAddress(HexAddress(HexStr(coinbase.lower()))))
def remove_0x_prefix(value: HexStr) -> HexStr: if is_0x_prefixed(value): return HexStr(value[2:]) return value
def test_eth_getCode_invalid_address(self, web3: "Web3", math_contract: "Contract") -> None: with pytest.raises(InvalidAddress): web3.eth.getCode( ChecksumAddress( HexAddress(HexStr(math_contract.address.lower()))))
from dataclasses import dataclass from enum import Enum from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union from eth_typing import ChecksumAddress, HexStr from rotkehlchen.accounting.structures import Balance from rotkehlchen.assets.asset import EthereumToken from rotkehlchen.fval import FVal from rotkehlchen.typing import Timestamp # Pools data TOM_POOL_ID = HexStr( '0x2ce0c96383fb229d9776f33846e983a956a7d95844fac57b180ed0071d93bb28') POOL_ID_POOL_NAME = { TOM_POOL_ID: 'Tom', } AdexEventDBTuple = ( Tuple[str, # tx_hash str, # address str, # identity_address int, # timestamp str, # type str, # pool_id str, # amount str, # usd_value Optional[str], # bond_id Optional[int], # nonce Optional[int], # slashed_at Optional[int], # unlock_at
from enum import IntEnum from typing import NamedTuple from eth_typing import HexAddress, HexStr from eth_utils.units import units from raiden_contracts.utils.signature import private_key_to_address UINT256_MAX = 2**256 - 1 NOT_ADDRESS = "0xaaa" FAKE_ADDRESS = HexAddress(HexStr("0x00112233445566778899aabbccddeeff00112233")) EMPTY_HEXADDRESS = "0x0000000000000000000000000000000000000000" EMPTY_BALANCE_HASH = b"\x00" * 32 EMPTY_ADDITIONAL_HASH = b"\x00" * 32 EMPTY_SIGNATURE = b"\x00" * 65 passphrase = "0" FAUCET_PRIVATE_KEY = "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" FAUCET_ADDRESS = private_key_to_address(FAUCET_PRIVATE_KEY) FAUCET_ALLOWANCE = 100 * int(units["ether"]) DEPLOYER_ADDRESS = FAUCET_ADDRESS NONEXISTENT_LOCKSROOT = b"\x00" * 32 SECONDS_PER_DAY = 60 * 60 * 24 # Constants for ServiceRegistry testing SERVICE_DEPOSIT = 5000 * (10**18) DEFAULT_BUMP_NUMERATOR = 6 DEFAULT_BUMP_DENOMINATOR = 5 DEFAULT_DECAY_CONSTANT = 200 * SECONDS_PER_DAY DEFAULT_REGISTRATION_DURATION = 180 * SECONDS_PER_DAY DEFAULT_MIN_PRICE = 1000
def is_hex_node_id(value: Any) -> bool: return (isinstance(value, str) and is_hex(value) and len(remove_0x_prefix(HexStr(value))) == 64)
def select_withdraw_address_balance(): #查询提币地址余额 print(f'into signal user: admin , model: withdraw_address_balance_user , action: select') sql = "select pro_id from tb_project" cursor.execute(sql) select_user = cursor.fetchall() for k in select_user: k = k[0] user = Project.objects.get(pro_id=k) sql = "select token_name, address from tb_withdraw_config where pro_id=%d"%(k) cursor.execute(sql) select_withdrawconfig = cursor.fetchall() for j in select_withdrawconfig: try: if j[0] == "HTDF": # host = 'htdf2020-test01.orientwalt.cn' # port = 1317 from_addr = j[1] HTDF_RPC_HOST = f'{HTDF_NODE_RPC_HOST}:{HTDF_NODE_RPC_PORT}' url = 'http://%s/bank/balances/%s' % (HTDF_RPC_HOST.strip(), from_addr.strip()) print(f'url:{url}') rsp = requests.get(url=url, timeout=10) amount = 0 if rsp.status_code != 200: print(f'get account info error: {rsp.status_code}') else: retList = rsp.json() for item in retList: if item["denom"] == "usdp" or item["denom"] == "htdf" or item["denom"] == "het": amount = str(item["amount"]) # if amount == '0': # print(f'{j.token_name} amount: {amount} -- if amount = 0 continue') # continue update_UserTokenBalances = UserTokenBalances.objects.filter(pro_id=k, token_name=j[0], withdraw_address=from_addr).first() if update_UserTokenBalances: update_UserTokenBalances.withdraw_balance = amount update_UserTokenBalances.update_time = datetime.now() update_UserTokenBalances.save() print(f'token_name : HTDF address : {from_addr} action: update success') else: UserTokenBalances.objects.create(pro_id=user,token_name=j[0], all_balance=0, withdraw_address=from_addr, withdraw_balance=amount, update_time=datetime.now()) print(f'token_name : HTDF address : {from_addr} action: creat success') elif j[0] == "BTC": from_addr = j[1] print(f'token_name : BTC address : {from_addr}') proxy = BTCProxy(host=BTC_API_HOST, port=BTC_API_PORT) balance_in_satoshi = proxy.get_balance(address=from_addr, mem_spent=True, mem_recv=True) balance_in_btc = round_down(Decimal(balance_in_satoshi) / Decimal(10 ** 8)) update_UserTokenBalances = UserTokenBalances.objects.filter(pro_id=k, token_name=j[0], withdraw_address=from_addr).first() if update_UserTokenBalances: update_UserTokenBalances.withdraw_balance = balance_in_btc update_UserTokenBalances.update_time = datetime.now() update_UserTokenBalances.save() print(f'token_name : BTC address : {from_addr} action: update success') else: UserTokenBalances.objects.create(pro_id=user, token_name=j[0], all_balance=0, withdraw_address=from_addr, withdraw_balance=balance_in_btc, update_time=datetime.now()) print(f'token_name : BTC address : {from_addr} action: creat success') pass elif j[0] == "ETH": # host = '192.168.10.199' # port = 28545 from_addr = j[1] ETH_FULL_NODE_RPC_URL = 'http://{}:{}'.format(ETH_FULL_NODE_HOST, ETH_FULL_NODE_PORT) print(f'url : {ETH_FULL_NODE_RPC_URL}') block_identifier = HexStr('latest') # 不能用pending myweb3 = Web3(provider=HTTPProvider(endpoint_uri=URI(ETH_FULL_NODE_RPC_URL))) nbalance = myweb3.eth.getBalance(account=to_checksum_address(from_addr), block_identifier=block_identifier) ether_balance = str(myweb3.fromWei(nbalance, 'ether')) # ETH 余额 # decim_eth_balance = round_down(ether_balance) # if ether_balance == '0': # print(f'{j.token_name} amount: {ether_balance} -- if amount = 0 continue') # continue update_UserTokenBalances = UserTokenBalances.objects.filter(pro_id=k, token_name=j[0], withdraw_address=from_addr).first() if update_UserTokenBalances: update_UserTokenBalances.withdraw_balance = ether_balance update_UserTokenBalances.update_time = datetime.now() update_UserTokenBalances.save() print(f'token_name : ETH address : {from_addr} action: update success') else: UserTokenBalances.objects.create(pro_id=user,token_name=j[0], all_balance=0, withdraw_address=from_addr, withdraw_balance=ether_balance, update_time=datetime.now()) print(f'token_name : ETH address : {from_addr} action: creat success') pass elif j[0] == "USDT": chksum_contract_addr = to_checksum_address(ERC20_USDT_CONTRACT_ADDRESS) from_addr = j[1] ETH_FULL_NODE_RPC_URL = 'http://{}:{}'.format(ETH_FULL_NODE_HOST, ETH_FULL_NODE_PORT) print(f'url : {ETH_FULL_NODE_RPC_URL}') myweb3 = Web3(provider=HTTPProvider(endpoint_uri=URI(ETH_FULL_NODE_RPC_URL))) contract = myweb3.eth.contract(address=chksum_contract_addr, abi=EIP20_ABI) erc20_token_balance_int = contract.functions.balanceOf(to_checksum_address(from_addr)).call() # if erc20_token_balance_int == 0: # print(f'{j.token_name} amount: {erc20_token_balance_int} -- if amount = 0 continue') # continue erc20_token_balance_decimal = str(myweb3.fromWei(erc20_token_balance_int, unit='mwei')) # if erc20_token_balance_decimal == '0': # print(f'{j.token_name} amount: {erc20_token_balance_decimal} -- if amount = 0 continue') # continue update_UserTokenBalances = UserTokenBalances.objects.filter(pro_id=k, token_name=j[0], withdraw_address=from_addr).first() if update_UserTokenBalances: update_UserTokenBalances.withdraw_balance = erc20_token_balance_decimal update_UserTokenBalances.update_time = datetime.now() update_UserTokenBalances.save() print(f'token_name : USDT address : {from_addr} action: update success') else: UserTokenBalances.objects.create(pro_id=user, token_name=j[0], all_balance=0, withdraw_address=from_addr, withdraw_balance=erc20_token_balance_decimal, update_time=datetime.now()) print(f'token_name : USDT address : {from_addr} action: creat success') elif j[0] == "BTU": from_addr = j[1] hrc20_contract = '' hrc20_decimals = 18 for con_addr, sym_info in HRC20_CONTRACT_MAP.items(): if sym_info['symbol'] == j[0]: hrc20_contract = con_addr hrc20_decimals = sym_info['decimal'] assert len(hrc20_contract) == 43, 'hrc20_contract is illegal' assert hrc20_decimals == 18, 'hrc20_deciaml not equal 18' rpc = CosmosProxy(host=HTDF_NODE_RPC_HOST, port=HTDF_NODE_RPC_PORT, cointype=j[0]) strbalance = rpc.getHRC20TokenBalance(contract_addr=hrc20_contract, address=from_addr) token_balance = round_down(Decimal(strbalance)) update_UserTokenBalances = UserTokenBalances.objects.filter(pro_id=k, token_name=j[0], withdraw_address=from_addr).first() if update_UserTokenBalances: update_UserTokenBalances.withdraw_balance = token_balance update_UserTokenBalances.update_time = datetime.now() update_UserTokenBalances.save() print(f'token_name : BTU address : {from_addr} action: update success') else: UserTokenBalances.objects.create(pro_id=user, token_name=j[0], all_balance=0, withdraw_address=from_addr, withdraw_balance=token_balance, update_time=datetime.now()) print(f'token_name : BTU address : {from_addr} action: creat success') except Exception as e: print(f'select withdraw_address_balance error: {e}')
normal_name_to_hash, normalize_name, raw_name_to_hash, ) if TYPE_CHECKING: from web3 import Web3 # noqa: F401 from web3.contract import ( # noqa: F401 Contract, ) from web3.providers import ( # noqa: F401 BaseProvider, ) from web3.types import ( # noqa: F401 TxParams, ) ENS_MAINNET_ADDR = ChecksumAddress( HexAddress(HexStr('0x314159265dD8dbb310642f98f50C066173C1259b'))) class ENS: """ Quick access to common Ethereum Name Service functions, like getting the address for a name. Unless otherwise specified, all addresses are assumed to be a `str` in `checksum format <https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md>`_, like: ``"0x314159265dD8dbb310642f98f50C066173C1259b"`` """ labelhash = staticmethod(label_to_hash) namehash = staticmethod(raw_name_to_hash) nameprep = staticmethod(normalize_name)
def test_trace_transaction(self, web3: "Web3", parity_fixture_data: Dict[str, str]) -> None: trace = web3.parity.traceTransaction( HexStr(parity_fixture_data['mined_txn_hash'])) assert trace[0]['action']['from'] == add_0x_prefix( HexStr(parity_fixture_data['coinbase']))
def address_to_reverse_domain(address: ChecksumAddress) -> str: lower_unprefixed_address = remove_0x_prefix( HexStr(to_normalized_address(address))) return lower_unprefixed_address + '.' + REVERSE_REGISTRAR_DOMAIN
def deprecation_test( ctx: click.Context, private_key: str, rpc_provider: URI, wait: int, gas_price: int, gas_limit: int, ) -> None: """Turn on the deprecation switch and see channel opening fails""" setup_ctx( ctx=ctx, private_key=private_key, password_file=None, rpc_provider=rpc_provider, wait=wait, gas_price=gas_price, gas_limit=gas_limit, ) deployer = ctx.obj["deployer"] # We deploy the Raiden Network contracts and register a token network token_amount = MAX_ETH_CHANNEL_PARTICIPANT * 6 channel_participant_deposit_limit = MAX_ETH_CHANNEL_PARTICIPANT token_network_deposit_limit = MAX_ETH_TOKEN_NETWORK (_, token_network, _) = deprecation_test_setup( deployer=deployer, token_amount=token_amount, channel_participant_deposit_limit=channel_participant_deposit_limit, token_network_deposit_limit=token_network_deposit_limit, ) log.info("Checking that channels can be opened and deposits can be made.") # Check that we can open channels and deposit on behalf of A and B # Some arbitrary Ethereum addresses A = HexAddress(HexStr("0x6AA63296FA94975017244769F00F0c64DB7d7115")) B = HexAddress(HexStr("0xc9a4fad99B6d7D3e48D18d2585470cd8f27FA61e")) channel_identifier = open_and_deposit(A=A, B=B, token_network=token_network, deployer=deployer) log.info("Seding transaction to activate the deprecation switch.") # Activate deprecation switch assert token_network.functions.safety_deprecation_switch().call() is False txhash = deployer.transact(token_network.functions.deprecate()) log.debug(f"Deprecation txHash={encode_hex(txhash)}") assert token_network.functions.safety_deprecation_switch().call() is True log.info("Checking that channels cannot be opened anymore and no more deposits are allowed.") # Check that we cannot open more channels or deposit C = HexAddress(HexStr("0x5a23cedB607684118ccf7906dF3e24Efd2964719")) D = HexAddress(HexStr("0x3827B9cDc68f061aa614F1b97E23664ef3b9220A")) open_and_deposit( A=C, B=D, token_network=token_network, deployer=deployer, channel_identifier=channel_identifier, txn_success_status=False, ) log.info("Deprecation switch test OK.")
def string_to_ethereum_address(value: str) -> ChecksumEthAddress: """This is a conversion without any checks of a string to ethereum address Is only used for typing. """ return ChecksumEthAddress(HexAddress(HexStr(value)))
def _get_logs_in_block_range( self, address: Union[AddressType, List[AddressType]], abi: Union[List[EventABI], EventABI], start_block: Optional[int] = None, stop_block: Optional[int] = None, block_page_size: Optional[int] = None, event_parameters: Optional[Dict] = None, ): start_block = start_block or 0 abis = abi if isinstance(abi, (list, tuple)) else [abi] block_page_size = block_page_size or 100 stop_block = start_block + block_page_size if stop_block is None else stop_block event_parameters = event_parameters or {} for abi in abis: if not isinstance(address, (list, tuple)): address = [address] addresses = [ self.conversion_manager.convert(a, AddressType) for a in address ] log_filter: Dict = { "address": addresses, "fromBlock": start_block, "toBlock": stop_block, "topics": [], } if "topics" not in event_parameters: event_signature_hash = add_0x_prefix( HexStr(keccak(text=abi.selector).hex())) log_filter["topics"] = [event_signature_hash] search_topics = [] abi_types = [] topics = LogInputABICollection(abi, [ abi_input for abi_input in abi.inputs if abi_input.indexed ]) for name, arg in event_parameters.items(): if hasattr(arg, "address"): arg = self.conversion_manager.convert(arg, AddressType) abi_type = None for argument in topics.values: if argument.name == name: abi_type = argument.type if not abi_type: raise DecodingError( f"'{name}' is not an indexed topic for event '{abi.name}'." ) search_topics.append(arg) abi_types.append(abi_type) encoded_topic_data = [ encode_single(topic_type, topic_data).hex() # type: ignore for topic_type, topic_data in zip(topics.types, search_topics) ] log_filter["topics"].extend(encoded_topic_data) else: log_filter["topics"] = event_parameters.pop("topics") log_result = [ dict(log) for log in self.web3.eth.get_logs(log_filter) ] # type: ignore yield from self.network.ecosystem.decode_logs(abi, log_result)
def default(self, obj: Any) -> Union[Dict[Any, Any], HexStr]: if isinstance(obj, AttributeDict): return {k: v for k, v in obj.items()} if isinstance(obj, HexBytes): return HexStr(obj.hex()) return json.JSONEncoder.default(self, obj)
def deserialize_ethereum_address(symbol: str) -> ChecksumEthAddress: return ChecksumEthAddress(HexAddress(HexStr(symbol)))