async def faucet(self, to, value, *, from_private_key=FAUCET_PRIVATE_KEY, startgas=None, gasprice=DEFAULT_GASPRICE, nonce=None, data=b"", wait_on_confirmation=True): if isinstance(from_private_key, str): from_private_key = data_decoder(from_private_key) from_address = private_key_to_address(from_private_key) ethclient = JsonRPCClient(config['ethereum']['url']) to = data_decoder(to) if len(to) not in (20, 0): raise Exception( 'Addresses must be 20 or 0 bytes long (len was {})'.format( len(to))) if nonce is None: nonce = await ethclient.eth_getTransactionCount(from_address) balance = await ethclient.eth_getBalance(from_address) if startgas is None: startgas = await ethclient.eth_estimateGas(from_address, to, data=data, nonce=nonce, value=value, gasprice=gasprice) tx = Transaction(nonce, gasprice, startgas, to, value, data, 0, 0, 0) if balance < (tx.value + (tx.startgas * tx.gasprice)): raise Exception("Faucet doesn't have enough funds") tx.sign(from_private_key) tx_encoded = data_encoder(rlp.encode(tx, Transaction)) tx_hash = await ethclient.eth_sendRawTransaction(tx_encoded) while wait_on_confirmation: resp = await ethclient.eth_getTransactionByHash(tx_hash) if resp is None or resp['blockNumber'] is None: await asyncio.sleep(0.1) else: break if to == b'': print("contract address: {}".format(data_encoder(tx.creates))) return tx_hash
async def _get_token_details(self, contract_address): """Get token details from the contract's metadata endpoints""" bulk = self.eth.bulk() balanceof_future = bulk.eth_call(to_address=contract_address, data="{}000000000000000000000000{}".format( ERC20_BALANCEOF_CALL_DATA, self.user_toshi_id[2:] if self.user_toshi_id is not None else "0000000000000000000000000000000000000000")) name_future = bulk.eth_call(to_address=contract_address, data=ERC20_NAME_CALL_DATA) sym_future = bulk.eth_call(to_address=contract_address, data=ERC20_SYMBOL_CALL_DATA) decimals_future = bulk.eth_call(to_address=contract_address, data=ERC20_DECIMALS_CALL_DATA) try: await bulk.execute() except: log.exception("failed getting token details") return None balance = data_decoder(unwrap_or(balanceof_future, "0x")) if balance and balance != "0x": try: balance = decode_abi(['uint256'], balance)[0] except: log.exception("Invalid erc20.balanceOf() result: {}".format(balance)) balance = None else: balance = None name = data_decoder(unwrap_or(name_future, "0x")) if name and name != "0x": try: name = decode_abi(['string'], name)[0].decode('utf-8') except: log.exception("Invalid erc20.name() data: {}".format(name)) name = None else: name = None symbol = data_decoder(unwrap_or(sym_future, "0x")) if symbol and symbol != "0x": try: symbol = decode_abi(['string'], symbol)[0].decode('utf-8') except: log.exception("Invalid erc20.symbol() data: {}".format(symbol)) symbol = None else: symbol = None decimals = data_decoder(unwrap_or(decimals_future, "0x")) if decimals and decimals != "0x": try: decimals = decode_abi(['uint256'], decimals)[0] except: log.exception("Invalid erc20.decimals() data: {}".format(decimals)) decimals = None else: decimals = None return balance, name, symbol, decimals
def test_personal_sign(self): msg = "Hello world!" signature = personal_sign(TEST_PRIVATE_KEY, msg) signature_bytes = data_decoder(signature) self.assertTrue(signature_bytes[-1] == 27 or signature_bytes[-1] == 28, "signature must be an ethereum signature") self.assertEqual("0x9ab94a7f9455231eabc3d8cb4e343e87d34d820dec276f4c89c56eb4c965cc855d63c7208cd72054e6f9bf792493debf8e03a80a511d508c4c2d3f8dff05655b1b", signature) self.assertTrue(personal_ecrecover(msg, signature, TEST_ADDRESS))
def test_valid_recovery_unicode(self): self.assertTrue( ecrecover( '{"custom":{"about":"æ","location":""},"timestamp":1483964545,"username":"******"}', data_decoder( '0xb3c61812e1e73f1a75cc9a2f5e748099378b7af2dd8bc3c1b4f0c067e6e9a4012d0c411b77bab63708b350742d41de574add6b06a3d06a5ae10fc9c63c18405301' ), '0x5249dc212cd9c16f107c50b6c893952d617c011e'))
def test_valid_recovery(self): self.assertTrue( ecrecover( '{"custom":{"about":"about ","location":"location "},"timestamp":1483968938,"username":"******"}', data_decoder( '0xbd5c9009cc87c6d4ebb3ef8223fc036726bc311678890890619c787aa914d3b636aee82d885c6fb668233b5cc70ab09eea7051648f989e758ee09234f5340d9100' ), '0x5249dc212cd9c16f107c50b6c893952d617c011e'))
def __init__(self, name, contract, *, from_key=None, constant=None, return_raw_tx=False): self.name = name self.contract = contract # TODO: forcing const seems to do nothing, since eth_call # will just return a tx_hash (on parity at least) if constant is None: self.is_constant = self.contract.translator.function_data[name][ 'is_constant'] else: # force constantness of this function self.is_constant = constant if from_key: if isinstance(from_key, str): self.from_key = data_decoder(from_key) else: self.from_key = from_key self.from_address = private_key_to_address(from_key) else: self.from_key = None self.from_address = None self.return_raw_tx = return_raw_tx
def test_add_signature_to_transaction_with_netowrk_id(self): for network_id in [1, 2, 66, 100]: sender_private_key = "0x0164f7c7399f4bb1eafeaae699ebbb12050bc6a50b2836b9ca766068a9d000c0" sender_address = "0xde3d2d9dd52ea80f7799ef4791063a5458d13913" to_address = "0x056db290f8ba3250ca64a45d16284d04bc6f5fbf" value = 10000000000 nonce = 1048576 data = b'' gasprice = DEFAULT_GASPRICE startgas = DEFAULT_STARTGAS network_id = 1 tx1 = Transaction(nonce, gasprice, startgas, to_address, value, data, network_id, 0, 0) tx = encode_transaction(tx1) tx1.sign(data_decoder(sender_private_key), network_id=network_id) expected_signed_tx = encode_transaction(tx1) sig = data_encoder(signature_from_transaction(tx1)) signed_tx = add_signature_to_transaction(tx, sig) self.assertEqual(signed_tx, expected_signed_tx) tx_obj = decode_transaction(tx) add_signature_to_transaction(tx_obj, sig) self.assertEqual(tx_obj.network_id, network_id) self.assertEqual(data_encoder(tx_obj.sender), sender_address) self.assertEqual(encode_transaction(tx_obj), expected_signed_tx)
async def deploy_contract(self, bytecode, *, from_private_key=FAUCET_PRIVATE_KEY, startgas=None, gasprice=DEFAULT_GASPRICE, wait_on_confirmation=True): if isinstance(from_private_key, str): from_private_key = data_decoder(from_private_key) from_address = private_key_to_address(from_private_key) ethclient = JsonRPCClient(config['ethereum']['url']) nonce = await ethclient.eth_getTransactionCount(from_address) balance = await ethclient.eth_getBalance(from_address) gasestimate = await ethclient.eth_estimateGas(from_address, '', data=bytecode, nonce=nonce, value=0, gasprice=gasprice) if startgas is None: startgas = gasestimate elif gasestimate > startgas: raise Exception( "Estimated gas usage is larger than the provided gas") tx = Transaction(nonce, gasprice, startgas, '', 0, bytecode, 0, 0, 0) if balance < (tx.value + (tx.startgas * tx.gasprice)): raise Exception("Faucet doesn't have enough funds") tx.sign(from_private_key) tx_encoded = data_encoder(rlp.encode(tx, Transaction)) tx_hash = await ethclient.eth_sendRawTransaction(tx_encoded) contract_address = data_encoder(tx.creates) while wait_on_confirmation: resp = await ethclient.eth_getTransactionByHash(tx_hash) if resp is None or resp['blockNumber'] is None: await asyncio.sleep(0.1) else: code = await ethclient.eth_getCode(contract_address) if code == '0x': raise Exception("Failed to deploy contract") break return tx_hash, contract_address
def test_encode_decode_transaction(self): sender_private_key = "0x0164f7c7399f4bb1eafeaae699ebbb12050bc6a50b2836b9ca766068a9d000c0" sender_address = "0xde3d2d9dd52ea80f7799ef4791063a5458d13913" to_address = "0x056db290f8ba3250ca64a45d16284d04bc6f5fbf" value = 10000000000 nonce = 1048576 data = b'' gasprice = 20000000000 startgas = DEFAULT_STARTGAS expected_tx_hash = "0x2f321aa116146a9bc62b61c76508295f708f42d56340c9e613ebfc27e33f240c" tx1 = Transaction(nonce, gasprice, startgas, to_address, value, data) tx1.sign(data_decoder(sender_private_key)) self.assertEqual(data_encoder(tx1.hash), expected_tx_hash) # rlputx1 = rlp.encode(tx1, UnsignedTransaction) # rlpstx1 = rlp.encode(tx1, Transaction) tx1 = Transaction(nonce, gasprice, startgas, to_address, value, data) enc1 = rlp.encode(tx1, UnsignedTransaction) tx2 = rlp.decode(enc1, UnsignedTransaction) tx2.sign(data_decoder(sender_private_key)) tx3 = Transaction(tx2.nonce, tx2.gasprice, tx2.startgas, tx2.to, tx2.value, tx2.data) tx3.sign(data_decoder(sender_private_key)) self.assertEqual(data_encoder(tx3.sender), sender_address) self.assertEqual(data_encoder(tx3.hash), expected_tx_hash) self.assertEqual(data_encoder(tx2.sender), sender_address) # NOTE: this is false because tx2 still thinks it's an unsigned tx # so it doesn't include the signature variables in the tx # if this suddenly starts failing, it means the behaviour # has been modified in the library self.assertNotEqual(data_encoder(tx2.hash), expected_tx_hash)
def database_transaction_to_rlp_transaction(transaction): """returns an rlp transaction for the given transaction""" nonce = transaction['nonce'] value = parse_int(transaction['value']) gas = parse_int(transaction['gas']) gas_price = parse_int(transaction['gas_price']) tx = create_transaction(nonce=nonce, gasprice=gas_price, startgas=gas, to=transaction['to_address'], value=value, data=data_decoder(transaction['data']), v=parse_int(transaction['v']), r=parse_int(transaction['r']), s=parse_int(transaction['s'])) return tx
async def test_create_transaction_with_large_data(self): body = { "from": "0x0004DE837Ea93edbE51c093f45212AB22b4B35fc", "to": "0xa0c4d49fe1a00eb5ee3d85dc7a287d84d8c66699", "value": 0, "data": "0x94d9cf8f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000003c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" } resp = await self.fetch("/tx/skel", method="POST", body=body) self.assertEqual(resp.code, 200) tx = sign_transaction(json_decode(resp.body)['tx'], FAUCET_PRIVATE_KEY) print(", ".join(["0x{:02x}".format(b) for b in data_decoder(tx)])) print(FAUCET_ADDRESS) print(", ".join(["0x{:02x}".format(b) for b in decode_transaction(tx).hash]))
async def create_transaction_skeleton(self, *, to_address, from_address, value=0, nonce=None, gas=None, gas_price=None, data=None): if not validate_address(from_address): raise JsonRPCInvalidParamsError(data={'id': 'invalid_from_address', 'message': 'Invalid From Address'}) if to_address is not None and not validate_address(to_address): raise JsonRPCInvalidParamsError(data={'id': 'invalid_to_address', 'message': 'Invalid To Address'}) if from_address != from_address.lower() and not checksum_validate_address(from_address): raise JsonRPCInvalidParamsError(data={'id': 'invalid_from_address', 'message': 'Invalid From Address Checksum'}) if to_address is not None and to_address != to_address.lower() and not checksum_validate_address(to_address): raise JsonRPCInvalidParamsError(data={'id': 'invalid_to_address', 'message': 'Invalid To Address Checksum'}) if value: value = parse_int(value) if value is None or value < 0: raise JsonRPCInvalidParamsError(data={'id': 'invalid_value', 'message': 'Invalid Value'}) # check optional arguments # check if we should ignore the given gasprice # NOTE: only meant to be here while cryptokitty fever is pushing # up gas prices... this shouldn't be perminant # anytime the nonce is also set, use the provided gas (this is to # support easier overwriting of transactions) if gas_price is not None and nonce is None: async with self.db: whitelisted = await self.db.fetchrow("SELECT 1 FROM from_address_gas_price_whitelist WHERE address = $1", from_address) if not whitelisted: whitelisted = await self.db.fetchrow("SELECT 1 FROM to_address_gas_price_whitelist WHERE address = $1", to_address) if not whitelisted: gas_price = None if nonce is None: # check cache for nonce nonce = await self.get_transaction_count(from_address) else: nonce = parse_int(nonce) if nonce is None: raise JsonRPCInvalidParamsError(data={'id': 'invalid_nonce', 'message': 'Invalid Nonce'}) if data is not None: if isinstance(data, int): data = hex(data) if isinstance(data, str): try: data = data_decoder(data) except binascii.Error: pass if not isinstance(data, bytes): raise JsonRPCInvalidParamsError(data={'id': 'invalid_data', 'message': 'Invalid Data field'}) else: data = b'' if gas is None: try: gas = await self.eth.eth_estimateGas(from_address, to_address, data=data, value=value) except JsonRPCError: # this can occur if sending a transaction to a contract that doesn't match a valid method # and the contract has no default method implemented raise JsonRPCInvalidParamsError(data={'id': 'invalid_data', 'message': 'Unable to estimate gas for contract call'}) else: gas = parse_int(gas) if gas is None: raise JsonRPCInvalidParamsError(data={'id': 'invalid_gas', 'message': 'Invalid Gas'}) if gas_price is None: # try and use cached gas station gas price gas_station_gas_price = self.redis.get('gas_station_standard_gas_price') if gas_station_gas_price: gas_price = parse_int(gas_station_gas_price) if gas_price is None: gas_price = self.application.config['ethereum'].getint('default_gasprice', DEFAULT_GASPRICE) else: gas_price = parse_int(gas_price) if gas_price is None: raise JsonRPCInvalidParamsError(data={'id': 'invalid_gas_price', 'message': 'Invalid Gas Price'}) try: tx = create_transaction(nonce=nonce, gasprice=gas_price, startgas=gas, to=to_address, value=value, data=data, network_id=self.network_id) except InvalidTransaction as e: raise JsonRPCInvalidParamsError(data={'id': 'invalid_transaction', 'message': str(e)}) if tx.intrinsic_gas_used > gas: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_transaction', 'message': 'Transaction gas is too low. There is not enough gas to cover minimal cost of the transaction (minimal: {}, got: {}). Try increasing supplied gas.'.format( tx.intrinsic_gas_used, gas)}) transaction = encode_transaction(tx) return {"tx": transaction, "gas": hex(gas), "gas_price": hex(gas_price), "nonce": hex(nonce), "value": hex(value)}
async def create_transaction_skeleton(self, *, to_address, from_address, value=0, nonce=None, gas=None, gas_price=None, data=None, token_address=None): if not validate_address(from_address): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_from_address', 'message': 'Invalid From Address' }) if to_address is not None and not validate_address(to_address): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_to_address', 'message': 'Invalid To Address' }) if from_address != from_address.lower( ) and not checksum_validate_address(from_address): raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_from_address', 'message': 'Invalid From Address Checksum' }) if to_address is not None and to_address != to_address.lower( ) and not checksum_validate_address(to_address): raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_to_address', 'message': 'Invalid To Address Checksum' }) # check if we should ignore the given gasprice # NOTE: only meant to be here while cryptokitty fever is pushing # up gas prices... this shouldn't be perminant # anytime the nonce is also set, use the provided gas (this is to # support easier overwriting of transactions) if gas_price is not None and nonce is None: async with self.db: whitelisted = await self.db.fetchrow( "SELECT 1 FROM from_address_gas_price_whitelist WHERE address = $1", from_address) if not whitelisted: whitelisted = await self.db.fetchrow( "SELECT 1 FROM to_address_gas_price_whitelist WHERE address = $1", to_address) if not whitelisted: gas_price = None if gas_price is None: # try and use cached gas station gas price gas_station_gas_price = await self.redis.get( 'gas_station_standard_gas_price') if gas_station_gas_price: gas_price = parse_int(gas_station_gas_price) if gas_price is None: gas_price = config['ethereum'].getint('default_gasprice', DEFAULT_GASPRICE) else: gas_price = parse_int(gas_price) if gas_price is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_gas_price', 'message': 'Invalid Gas Price' }) if gas is not None: gas = parse_int(gas) if gas is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_gas', 'message': 'Invalid Gas' }) if nonce is None: # check cache for nonce nonce = await self.get_transaction_count(from_address) else: nonce = parse_int(nonce) if nonce is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_nonce', 'message': 'Invalid Nonce' }) if data is not None: if isinstance(data, int): data = hex(data) if isinstance(data, str): try: data = data_decoder(data) except binascii.Error: pass if not isinstance(data, bytes): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_data', 'message': 'Invalid Data field' }) else: data = b'' # flag to force arguments into an erc20 token transfer if token_address is not None: if not validate_address(token_address): raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_token_address', 'message': 'Invalid Token Address' }) if data != b'': raise JsonRPCInvalidParamsError( data={ 'id': 'bad_arguments', 'message': 'Cannot include both data and token_address' }) if isinstance(value, str) and value.lower() == "max": # get the balance in the database async with self.db: value = await self.db.fetchval( "SELECT value FROM token_balances " "WHERE contract_address = $1 AND eth_address = $2", token_address, from_address) if value is None: # get the value from the ethereum node data = "0x70a08231000000000000000000000000" + from_address[ 2:].lower() value = await self.eth.eth_call(to_address=token_address, data=data) value = parse_int(value) if value is None or value < 0: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_value', 'message': 'Invalid Value' }) data = data_decoder( "0xa9059cbb000000000000000000000000{}{:064x}".format( to_address[2:].lower(), value)) token_value = value value = 0 to_address = token_address elif value: if value == "max": network_balance, balance, _, _ = await self.get_balances( from_address) if gas is None: code = await self.eth.eth_getCode(to_address) if code: # we might have to do some work try: gas = await self.eth.eth_estimateGas(from_address, to_address, data=data, value=0) except JsonRPCError: # no fallback function implemented in the contract means no ether can be sent to it raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_to_address', 'message': 'Cannot send payments to that address' }) attempts = 0 # because the default function could do different things based on the eth sent, we make sure # the value is suitable. if we get different values 3 times abort while True: if attempts > 2: log.warning( "Hit max attempts trying to get max value to send to contract '{}'" .format(to_address)) raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_to_address', 'message': 'Cannot send payments to that address' }) value = balance - (gas_price * gas) try: gas_with_value = await self.eth.eth_estimateGas( from_address, to_address, data=data, value=value) except JsonRPCError: # no fallback function implemented in the contract means no ether can be sent to it raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_to_address', 'message': 'Cannot send payments to that address' }) if gas_with_value != gas: gas = gas_with_value attempts += 1 continue else: break else: # normal address, 21000 gas per transaction gas = 21000 value = balance - (gas_price * gas) else: # preset gas, run with it! value = balance - (gas_price * gas) else: value = parse_int(value) if value is None or value < 0: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_value', 'message': 'Invalid Value' }) if gas is None: try: gas = await self.eth.eth_estimateGas(from_address, to_address, data=data, value=value) except JsonRPCError: # this can occur if sending a transaction to a contract that doesn't match a valid method # and the contract has no default method implemented. # this can also happen if the current state of the blockchain means that submitting the # transaction would fail (abort). if token_address is not None: # when dealing with erc20, this usually means the user's balance for that token isn't # high enough, check that and throw an error if it's the case, and if not fall # back to the standard invalid_data error async with self.db: bal = await self.db.fetchval( "SELECT value FROM token_balances " "WHERE contract_address = $1 AND eth_address = $2", token_address, from_address) if bal is not None: bal = parse_int(bal) if bal < token_value: raise JsonRPCInsufficientFundsError( data={ 'id': 'insufficient_funds', 'message': 'Insufficient Funds' }) raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_data', 'message': 'Unable to estimate gas for contract call' }) # if data is present, buffer gas estimate by 20% if len(data) > 0: gas = int(gas * 1.2) try: tx = create_transaction(nonce=nonce, gasprice=gas_price, startgas=gas, to=to_address, value=value, data=data, network_id=self.network_id) except InvalidTransaction as e: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_transaction', 'message': str(e) }) if tx.intrinsic_gas_used > gas: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_transaction', 'message': 'Transaction gas is too low. There is not enough gas to cover minimal cost of the transaction (minimal: {}, got: {}). Try increasing supplied gas.' .format(tx.intrinsic_gas_used, gas) }) transaction = encode_transaction(tx) return { "tx": transaction, "gas": hex(gas), "gas_price": hex(gas_price), "nonce": hex(nonce), "value": hex(token_value) if token_address else hex(value) }
async def send_transaction(self, *, tx, signature=None): try: tx = decode_transaction(tx) except: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_transaction', 'message': 'Invalid Transaction' }) if is_transaction_signed(tx): tx_sig = data_encoder(signature_from_transaction(tx)) if signature: if tx_sig != signature: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_signature', 'message': 'Invalid Signature: Signature in payload and signature of transaction do not match' }) else: signature = tx_sig else: if signature is None: raise JsonRPCInvalidParamsError(data={ 'id': 'missing_signature', 'message': 'Missing Signature' }) if not validate_signature(signature): raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_signature', 'message': 'Invalid Signature: {}'.format('Invalid length' if len( signature) != 132 else 'Invalid hex value') }) try: sig = data_decoder(signature) except Exception: log.exception( "Unexpected error decoding valid signature: {}".format( signature)) raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_signature', 'message': 'Invalid Signature' }) add_signature_to_transaction(tx, sig) # validate network id, if it's not for "all networks" if tx.network_id is not None and self.network_id != tx.network_id: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_network_id', 'message': 'Invalid Network ID' }) from_address = data_encoder(tx.sender) to_address = data_encoder(tx.to) # prevent spamming of transactions with the same nonce from the same sender async with RedisLock("{}:{}".format(from_address, tx.nonce), raise_when_locked=partial( JsonRPCInvalidParamsError, data={ 'id': 'invalid_nonce', 'message': 'Nonce already used' }), ex=5): # check for transaction overwriting async with self.db: existing = await self.db.fetchrow( "SELECT * FROM transactions WHERE " "from_address = $1 AND nonce = $2 AND " "(status != 'error' or status is NULL)", from_address, tx.nonce) # disallow transaction overwriting when the gas is lower or the transaction is confirmed if existing and (parse_int(existing['gas_price']) >= tx.gasprice or existing['status'] == 'confirmed'): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_nonce', 'message': 'Nonce already used' }) # make sure the account has enough funds for the transaction network_balance, balance, _, _ = await self.get_balances( from_address) if existing: balance += parse_int(existing['value']) + parse_int( existing['gas']) * parse_int(existing['gas_price']) if balance < (tx.value + (tx.startgas * tx.gasprice)): raise JsonRPCInsufficientFundsError( data={ 'id': 'insufficient_funds', 'message': 'Insufficient Funds' }) # validate the nonce (only necessary if tx doesn't already exist) if not existing: c_nonce = await self.get_transaction_count(from_address) if tx.nonce < c_nonce: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_nonce', 'message': 'Provided nonce is too low' }) if tx.nonce > c_nonce: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_nonce', 'message': 'Provided nonce is too high' }) if tx.intrinsic_gas_used > tx.startgas: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_transaction', 'message': 'Transaction gas is too low. There is not enough gas to cover minimal cost of the transaction (minimal: {}, got: {}). Try increasing supplied gas.' .format(tx.intrinsic_gas_used, tx.startgas) }) # now this tx fits enough of the criteria to allow it # onto the transaction queue tx_hash = calculate_transaction_hash(tx) if existing: log.info( "Setting tx '{}' to error due to forced overwrite".format( existing['hash'])) manager_dispatcher.update_transaction( existing['transaction_id'], 'error') data = data_encoder(tx.data) if data and \ ((data.startswith("0xa9059cbb") and len(data) == 138) or \ (data.startswith("0x23b872dd") and len(data) == 202)): # check if the token is a known erc20 token async with self.db: erc20_token = await self.db.fetchrow( "SELECT * FROM tokens WHERE contract_address = $1", to_address) else: erc20_token = False # add tx to database async with self.db: db_tx = await self.db.fetchrow( "INSERT INTO transactions " "(hash, from_address, to_address, nonce, " "value, gas, gas_price, " "data, v, r, s, " "sender_toshi_id) " "VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) " "RETURNING transaction_id", tx_hash, from_address, to_address, tx.nonce, hex(tx.value), hex(tx.startgas), hex(tx.gasprice), data_encoder(tx.data), hex(tx.v), hex(tx.r), hex(tx.s), self.user_toshi_id) if erc20_token: token_value = int(data[-64:], 16) if data.startswith("0x23b872dd"): erc20_from_address = "0x" + data[34:74] erc20_to_address = "0x" + data[98:138] else: erc20_from_address = from_address erc20_to_address = "0x" + data[34:74] await self.db.execute( "INSERT INTO token_transactions " "(transaction_id, transaction_log_index, contract_address, from_address, to_address, value) " "VALUES ($1, $2, $3, $4, $5, $6)", db_tx['transaction_id'], 0, erc20_token['contract_address'], erc20_from_address, erc20_to_address, hex(token_value)) await self.db.commit() # trigger processing the transaction queue manager_dispatcher.process_transaction_queue(from_address) # analytics # use notification registrations to try find toshi ids for users if self.user_toshi_id: sender_toshi_id = self.user_toshi_id else: async with self.db: sender_toshi_id = await self.db.fetchval( "SELECT toshi_id FROM notification_registrations WHERE " "eth_address = $1", from_address) async with self.db: receiver_toshi_id = await self.db.fetchval( "SELECT toshi_id FROM notification_registrations WHERE " "eth_address = $1", to_address) self.track(sender_toshi_id, "Sent transaction") # it doesn't make sense to add user agent here as we # don't know the receiver's user agent self.track(receiver_toshi_id, "Received transaction", add_user_agent=False) return tx_hash
from tornado.escape import json_decode from tornado.testing import gen_test from tornado.platform.asyncio import to_asyncio_future from tornado.ioloop import IOLoop from toshiid.app import urls from toshiid.handlers import AVATAR_URL_HASH_LENGTH from toshi.test.moto_server import requires_moto, BotoTestMixin from toshi.analytics import encode_id from toshi.test.database import requires_database from toshi.test.base import AsyncHandlerTest from toshi.ethereum.utils import data_decoder from PIL import Image TEST_PRIVATE_KEY = data_decoder( "0xe8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35") TEST_ADDRESS = "0x056db290f8ba3250ca64a45d16284d04bc6f5fbf" TEST_PAYMENT_ADDRESS = "0x444433335555ffffaaaa222211119999ffff7777" TEST_ADDRESS_2 = "0x056db290f8ba3250ca64a45d16284d04bc000000" def body_producer(boundary, files): buf = BytesIO() write = buf.write for (filename, data) in files: write('--{}\r\n'.format(boundary).encode('utf-8')) write('Content-Disposition: form-data; name="{}"; filename="{}"\r\n'. format(filename, filename).encode('utf-8'))
async def process_block_for_contract(self, collectible_address): if collectible_address in self._processing: return self._processing[collectible_address] = True async with self.pool.acquire() as con: latest_block_number = await con.fetchval( "SELECT blocknumber FROM last_blocknumber") collectible = await con.fetchrow("SELECT * FROM collectibles WHERE contract_address = $1", collectible_address) if collectible['type'] == 1: events = await con.fetch("SELECT * FROM collectible_transfer_events " "WHERE collectible_address = $1", collectible_address) elif collectible['type'] == 721: # use default erc721 event # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md events = [{ 'collectible_address': collectible_address, 'contract_address': collectible_address, 'name': 'Transfer', 'topic_hash': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', 'arguments': ['address', 'address', 'uint256'], 'indexed_arguments': [True, True, False], 'to_address_offset': 1, 'token_id_offset': 2 }] from_block_number = collectible['last_block'] + 1 if latest_block_number < from_block_number: del self._processing[collectible_address] return to_block_number = min(from_block_number + 1000, latest_block_number) updates = {} for event in events: contract_address = event['contract_address'] while True: try: logs = await self.eth.eth_getLogs( fromBlock=from_block_number, toBlock=to_block_number, topics=[[event['topic_hash']]], address=contract_address) break except: log.exception("error getting logs for block") continue if len(logs): for _log in logs: indexed_data = _log['topics'][1:] data_types = [t for t, i in zip(event['arguments'], event['indexed_arguments']) if i is False] try: data = decode_abi(data_types, data_decoder(_log['data'])) except: log.exception("Error decoding log data: {} {}".format(data_types, _log['data'])) del self._processing[collectible_address] return arguments = [] try: for t, i in zip(event['arguments'], event['indexed_arguments']): if i is True: arguments.append(decode_single(process_type(t), data_decoder(indexed_data.pop(0)))) else: arguments.append(data.pop(0)) except: log.exception("Error compiling event data") to_address = arguments[event['to_address_offset']] token_id = parse_int(arguments[event['token_id_offset']]) log.debug("{} #{} -> {} -> {}".format(collectible['name'], token_id, event['name'], to_address)) token_image = config['collectibles']['image_format'].format( contract_address=collectible_address, token_id=token_id) updates[hex(token_id)] = (collectible_address, hex(token_id), to_address, token_image) if len(updates) > 0: async with self.pool.acquire() as con: await con.executemany( "INSERT INTO collectible_tokens (contract_address, token_id, owner_address, image) " "VALUES ($1, $2, $3, $4) " "ON CONFLICT (contract_address, token_id) DO UPDATE " "SET owner_address = EXCLUDED.owner_address", list(updates.values())) ready = collectible['ready'] or to_block_number == latest_block_number self.last_block = to_block_number async with self.pool.acquire() as con: await con.execute("UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3", to_block_number, ready, collectible_address) del self._processing[collectible_address] #log.info("Processed blocks #{} -> #{} for {} in {} seconds".format( # from_block_number, to_block_number, collectible['name'], time.time() - starttime)) if to_block_number < latest_block_number: asyncio.ensure_future(self.process_block_for_contract(contract_address))
def verify_request(self): """Verifies that the signature and the payload match the expected address raising a JSONHTTPError (400) if something is wrong with the request""" if TOSHI_ID_ADDRESS_HEADER in self.request.headers: expected_address = self.request.headers[ TOSHI_ID_ADDRESS_HEADER] elif self.get_argument(TOSHI_ID_ADDRESS_QUERY_ARG, None): expected_address = self.get_argument( TOSHI_ID_ADDRESS_QUERY_ARG) elif TOKEN_ID_ADDRESS_HEADER in self.request.headers: expected_address = self.request.headers[ TOKEN_ID_ADDRESS_HEADER] elif self.get_argument(TOKEN_ID_ADDRESS_QUERY_ARG, None): expected_address = self.get_argument( TOKEN_ID_ADDRESS_QUERY_ARG) else: raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'bad_arguments', 'message': 'Missing Toshi-ID-Address' }] }) if TOSHI_SIGNATURE_HEADER in self.request.headers: signature = self.request.headers[TOSHI_SIGNATURE_HEADER] elif self.get_argument(TOSHI_SIGNATURE_QUERY_ARG, None): signature = self.get_argument(TOSHI_SIGNATURE_QUERY_ARG) elif TOKEN_SIGNATURE_HEADER in self.request.headers: signature = self.request.headers[TOKEN_SIGNATURE_HEADER] elif self.get_argument(TOKEN_SIGNATURE_QUERY_ARG, None): signature = self.get_argument(TOKEN_SIGNATURE_QUERY_ARG) else: raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'bad_arguments', 'message': 'Missing Toshi-Signature' }] }) if TOSHI_TIMESTAMP_HEADER in self.request.headers: timestamp = self.request.headers[TOSHI_TIMESTAMP_HEADER] elif self.get_argument(TOSHI_TIMESTAMP_QUERY_ARG, None): timestamp = self.get_argument(TOSHI_TIMESTAMP_QUERY_ARG) elif TOKEN_TIMESTAMP_HEADER in self.request.headers: timestamp = self.request.headers[TOKEN_TIMESTAMP_HEADER] elif self.get_argument(TOKEN_TIMESTAMP_QUERY_ARG, None): timestamp = self.get_argument(TOKEN_TIMESTAMP_QUERY_ARG) else: raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'bad_arguments', 'message': 'Missing Toshi-Timestamp' }] }) timestamp = parse_int(timestamp) if timestamp is None: raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'invalid_timestamp', 'message': 'Given Toshi-Timestamp is invalid' }] }) if not validate_address(expected_address): raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'invalid_id_address', 'message': 'Invalid Toshi-ID-Address' }] }) if not validate_signature(signature): raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'invalid_signature', 'message': 'Invalid Toshi-Signature' }] }) try: signature = data_decoder(signature) except Exception: raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'invalid_signature', 'message': 'Invalid Toshi-Signature' }] }) verb = self.request.method uri = self.request.path if self.request.body: datahash = self.request.body else: datahash = "" data_string = generate_request_signature_data_string( verb, uri, timestamp, datahash) if not ecrecover(data_string, signature, expected_address): raise JSONHTTPError(400, body={ 'errors': [{ 'id': 'invalid_signature', 'message': 'Invalid Toshi-Signature' }] }) if abs(int(time.time()) - timestamp) > TIMESTAMP_EXPIRY: raise JSONHTTPError( 400, body={ 'errors': [{ 'id': 'invalid_timestamp', 'message': 'The difference between the timestamp and the current time is too large' }] }) return expected_address
async def send_transaction(self, *, tx, signature=None): try: tx = decode_transaction(tx) except: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_transaction', 'message': 'Invalid Transaction' }) if is_transaction_signed(tx): tx_sig = data_encoder(signature_from_transaction(tx)) if signature: if tx_sig != signature: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_signature', 'message': 'Invalid Signature: Signature in payload and signature of transaction do not match' }) else: signature = tx_sig else: if signature is None: raise JsonRPCInvalidParamsError(data={ 'id': 'missing_signature', 'message': 'Missing Signature' }) if not validate_signature(signature): raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_signature', 'message': 'Invalid Signature: {}'.format('Invalid length' if len( signature) != 132 else 'Invalid hex value') }) try: sig = data_decoder(signature) except Exception: log.exception( "Unexpected error decoding valid signature: {}".format( signature)) raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_signature', 'message': 'Invalid Signature' }) add_signature_to_transaction(tx, sig) # validate network id, if it's not for "all networks" if tx.network_id is not None and self.network_id != tx.network_id: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_network_id', 'message': 'Invalid Network ID' }) from_address = data_encoder(tx.sender) to_address = data_encoder(tx.to) # prevent spamming of transactions with the same nonce from the same sender with RedisLock(self.redis, "{}:{}".format(from_address, tx.nonce), raise_when_locked=partial(JsonRPCInvalidParamsError, data={ 'id': 'invalid_nonce', 'message': 'Nonce already used' }), ex=5): # disallow transaction overwriting for known transactions async with self.db: existing = await self.db.fetchrow( "SELECT * FROM transactions WHERE " "from_address = $1 AND nonce = $2 AND status != $3", from_address, tx.nonce, 'error') if existing: # debugging checks existing_tx = await self.eth.eth_getTransactionByHash( existing['hash']) raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_nonce', 'message': 'Nonce already used' }) # make sure the account has enough funds for the transaction network_balance, balance, _, _ = await self.get_balances( from_address) #log.info("Attempting to send transaction\nHash: {}\n{} -> {}\nValue: {} + {} (gas) * {} (startgas) = {}\nSender's Balance {} ({} unconfirmed)".format( # calculate_transaction_hash(tx), from_address, to_address, tx.value, tx.startgas, tx.gasprice, tx.value + (tx.startgas * tx.gasprice), network_balance, balance)) if balance < (tx.value + (tx.startgas * tx.gasprice)): raise JsonRPCInsufficientFundsError( data={ 'id': 'insufficient_funds', 'message': 'Insufficient Funds' }) # validate the nonce c_nonce = await self.get_transaction_count(from_address) if tx.nonce < c_nonce: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_nonce', 'message': 'Provided nonce is too low' }) if tx.nonce > c_nonce: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_nonce', 'message': 'Provided nonce is too high' }) if tx.intrinsic_gas_used > tx.startgas: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_transaction', 'message': 'Transaction gas is too low. There is not enough gas to cover minimal cost of the transaction (minimal: {}, got: {}). Try increasing supplied gas.' .format(tx.intrinsic_gas_used, tx.startgas) }) # now this tx fits enough of the criteria to allow it # onto the transaction queue tx_hash = calculate_transaction_hash(tx) # add tx to database async with self.db: await self.db.execute( "INSERT INTO transactions " "(hash, from_address, to_address, nonce, " "value, gas, gas_price, " "data, v, r, s, " "sender_toshi_id) " "VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)", tx_hash, from_address, to_address, tx.nonce, hex(tx.value), hex(tx.startgas), hex(tx.gasprice), data_encoder(tx.data), hex(tx.v), hex(tx.r), hex(tx.s), self.user_toshi_id) await self.db.commit() # trigger processing the transaction queue self.tasks.process_transaction_queue(from_address) # analytics # use notification registrations to try find toshi ids for users if self.user_toshi_id: sender_toshi_id = self.user_toshi_id else: async with self.db: sender_toshi_id = await self.db.fetchval( "SELECT toshi_id FROM notification_registrations WHERE " "eth_address = $1", from_address) async with self.db: receiver_toshi_id = await self.db.fetchval( "SELECT toshi_id FROM notification_registrations WHERE " "eth_address = $1", to_address) self.track(sender_toshi_id, "Sent transaction") # it doesn't make sense to add user agent here as we # don't know the receiver's user agent self.track(receiver_toshi_id, "Received transaction", add_user_agent=False) return tx_hash
import asyncio import os from tornado.escape import json_decode from tornado.testing import gen_test from toshieth.test.base import EthServiceBaseTest, requires_full_stack from toshi.test.ethereum.parity import FAUCET_PRIVATE_KEY, FAUCET_ADDRESS, ParityServer from toshi.test.ethereum.ethminer import EthMiner from toshi.test.ethereum.faucet import FaucetMixin from toshi.ethereum.utils import data_decoder, data_encoder, private_key_to_address from toshi.ethereum.tx import decode_transaction, sign_transaction, DEFAULT_STARTGAS, DEFAULT_GASPRICE from toshi.utils import parse_int from toshi.jsonrpc.client import JsonRPCClient TEST_PRIVATE_KEY_1 = data_decoder("0xe8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35") TEST_ADDRESS_1 = "0x056db290f8ba3250ca64a45d16284d04bc6f5fbf" TEST_PRIVATE_KEY_2 = data_decoder("0x0ffdb88a7a0a40831ca0b19bd31f3f6085764ef8b7db1bd6b57072e5eaea24ff") TEST_ADDRESS_2 = "0x35351b44e03ec8515664a955146bf9c6e503a381" TEST_PRIVATE_KEY_3 = data_decoder("0xbc91668394b936efc05aecbf30213746ce7967374918ba75d0bc751a30463cd5") TEST_ADDRESS_3 = "0x4aaddc8923dfbe6053291c8ba785af5bda49d905" TEST_PRIVATE_KEY_4 = data_decoder("0x3d60b77d9bd1e63775e4666baeda12d1d236265a85a5a4b620bcbe47ca7bcde4") TEST_ADDRESS_4 = "0xc4867418f9950f1c32f79fee9098f9058c06823c" TEST_PRIVATE_KEY_5 = data_decoder("0x40ba61d699250a190ec87e78be117007c7acbacb2c18c88f649c31dfc28bfb2a") TEST_ADDRESS_5 = "0x3fa28bbf036821db8342f66b7ce8ff583e0ebfd5" TEST_PRIVATE_KEY_6 = data_decoder("0x006876936f5bf4f404a080e474f94aa3fcd7c413009533fcde51e951abb7f724") TEST_ADDRESS_6 = "0xb33bcd070ed7c5effa861051a1ec25b818fea111"
async def process_block_for_contract(self, collectible_address): if collectible_address in self._processing: log.warning("Already processing {}".format(collectible_address)) return self._processing[collectible_address] = True async with self.pool.acquire() as con: latest_block_number = await con.fetchval( "SELECT blocknumber FROM last_blocknumber") collectible = await con.fetchrow( "SELECT * FROM collectibles WHERE contract_address = $1", collectible_address) if collectible is None: log.error( "Unable to find collectible with contract_address {}". format(collectible_address)) del self._processing[collectible_address] return if collectible['type'] == 1: events = await con.fetch( "SELECT * FROM collectible_transfer_events " "WHERE collectible_address = $1", collectible_address) elif collectible['type'] == 721: # use default erc721 event # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md events = [{ 'collectible_address': collectible_address, 'contract_address': collectible_address, 'name': 'Transfer', 'topic_hash': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', 'arguments': ['address', 'address', 'uint256'], 'indexed_arguments': [True, True, False], 'to_address_offset': 1, 'token_id_offset': 2 }] from_block_number = collectible['last_block'] + 1 if latest_block_number < from_block_number: del self._processing[collectible_address] log.info( "Aborting {} because latest block number < collectible's next block" .format(collectible_address)) return to_block_number = min(from_block_number + 1000, latest_block_number) updates = {} for event in events: contract_address = event['contract_address'] while True: try: logs = await self.eth.eth_getLogs( fromBlock=from_block_number, toBlock=to_block_number, topics=[[event['topic_hash']]], address=contract_address) break except: log.exception("error getting logs for block") continue if len(logs): for _log in logs: indexed_data = _log['topics'][1:] data_types = [ t for t, i in zip(event['arguments'], event['indexed_arguments']) if i is False ] try: data = decode_abi(data_types, data_decoder(_log['data'])) except: log.exception("Error decoding log data: {} {}".format( data_types, _log['data'])) del self._processing[collectible_address] return arguments = [] try: for t, i in zip(event['arguments'], event['indexed_arguments']): if i is True: arguments.append( decode_single( process_type(t), data_decoder(indexed_data.pop(0)))) else: arguments.append(data.pop(0)) except: log.exception("Error compiling event data") log.info("EVENT: {}".format(event)) log.info("LOG: {}".format(_log)) del self._processing[collectible_address] return to_address = arguments[event['to_address_offset']] token_id = parse_int(arguments[event['token_id_offset']]) log.debug("{} #{} -> {} -> {}".format( collectible['name'], token_id, event['name'], to_address)) updates[hex(token_id)] = (collectible_address, hex(token_id), to_address) if len(updates) > 0: new_tokens = [] for token_id in list(updates.keys()): async with self.pool.acquire() as con: token = await con.fetchrow( "SELECT * FROM collectible_tokens WHERE contract_address = $1 AND token_id = $2", collectible_address, token_id) if token is None: # get token details token_uri = None token_uri_data = await self.eth.eth_call( to_address=collectible_address, data="{}{:064x}".format(TOKEN_URI_CALL_DATA, int(token_id, 16))) if token_uri_data and token_uri_data != "0x": try: token_uri = decode_abi( ['string'], data_decoder(token_uri_data))[0].decode( 'utf-8', errors='replace') except: log.exception("Error decoding tokenURI data") token_image = None token_name = None token_description = None # if token_uri points to a valid url check if it points to json (for the erc721 metadata) parsed_uri = urlparse(token_uri) if token_uri and parsed_uri.netloc and parsed_uri.scheme in [ 'http', 'https' ]: try: resp = await AsyncHTTPClient( max_clients=100).fetch(parsed_uri.geturl()) metadata = json_decode(resp.body) if "properties" in metadata: metadata = metadata['properties'] if 'name' in metadata: if type( metadata['name'] ) == dict and 'description' in metadata['name']: token_name = metadata['name'][ 'description'] elif type(metadata['name']) == str: token_name = metadata['name'] if 'description' in metadata: if type( metadata['description'] ) == dict and 'description' in metadata[ 'description']: token_description = metadata[ 'description']['description'] elif type(metadata['description']) == str: token_description = metadata['description'] if 'image' in metadata: if type( metadata['image'] ) == dict and 'description' in metadata[ 'image']: token_image = metadata['image'][ 'description'] elif type(metadata['image']) == str: token_image = metadata['image'] except: log.exception( "Error getting token metadata for {}:{} from {}" .format(collectible_address, token_id, token_uri)) pass if not token_image: if collectible['image_url_format_string'] is not None: image_format_string = collectible[ 'image_url_format_string'] else: image_format_string = config['collectibles'][ 'image_format'] token_image = image_format_string.format( contract_address=collectible_address, token_id_hex=token_id, token_id_int=int(token_id, 16), token_uri=token_uri) new_token = updates.pop(token_id, ()) + ( token_uri, token_name, token_description, token_image) new_tokens.append(new_token) async with self.pool.acquire() as con: if len(new_tokens) > 0: await con.executemany( "INSERT INTO collectible_tokens (contract_address, token_id, owner_address, token_uri, name, description, image) " "VALUES ($1, $2, $3, $4, $5, $6, $7)", new_tokens) await con.executemany( "INSERT INTO collectible_tokens (contract_address, token_id, owner_address) " "VALUES ($1, $2, $3) " "ON CONFLICT (contract_address, token_id) DO UPDATE " "SET owner_address = EXCLUDED.owner_address", list(updates.values())) ready = collectible['ready'] or to_block_number == latest_block_number self.last_block = to_block_number async with self.pool.acquire() as con: await con.execute( "UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3", to_block_number, ready, collectible_address) del self._processing[collectible_address] if to_block_number < latest_block_number: asyncio.get_event_loop().create_task( self.process_block_for_contract(collectible_address))
import asyncio import os from tornado.testing import gen_test from tornado.escape import json_decode from toshieth.test.base import EthServiceBaseTest, requires_full_stack from toshi.ethereum.utils import private_key_to_address, data_decoder from toshi.utils import parse_int from toshi.test.ethereum.faucet import FAUCET_PRIVATE_KEY from toshi.ethereum.contract import Contract TEST_PRIVATE_KEY = data_decoder( "0xe8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35") TEST_ADDRESS = private_key_to_address(TEST_PRIVATE_KEY) TEST_PRIVATE_KEY_2 = data_decoder( "0x0ffdb88a7a0a40831ca0b19bd31f3f6085764ef8b7db1bd6b57072e5eaea24ff") TEST_ADDRESS_2 = private_key_to_address(TEST_PRIVATE_KEY_2) TEST_PRIVATE_KEY_3 = data_decoder( "0x46e2301e4af216b64479ec6661814276fe7ac1812b1d463762d86df39a8f50dd") TEST_ADDRESS_3 = private_key_to_address(TEST_PRIVATE_KEY_3) TEST_PRIVATE_KEY_4 = data_decoder( "0x97838fae42fe69f2fb5e34496cfa8d4c36a8f7c92a0871535302e62dda37f6ff") TEST_ADDRESS_4 = private_key_to_address(TEST_PRIVATE_KEY_4) SPLITTER_CONTRACT = open( os.path.join(os.path.dirname(__file__), "splitter.sol")).read()
async def update_transaction(self, transaction_id, status, retry_start_time=0): async with self.db: tx = await self.db.fetchrow( "SELECT * FROM transactions WHERE transaction_id = $1", transaction_id) if tx is None or tx['status'] == status: return token_txs = await self.db.fetch( "SELECT tok.symbol, tok.name, tok.decimals, tx.contract_address, tx.value, tx.from_address, tx.to_address, tx.transaction_log_index, tx.status " "FROM token_transactions tx " "JOIN tokens tok " "ON tok.contract_address = tx.contract_address " "WHERE tx.transaction_id = $1", transaction_id) # check if we're trying to update the state of a tx that is already confirmed, we have an issue if tx['status'] == 'confirmed': log.warning( "Trying to update status of tx {} to {}, but tx is already confirmed" .format(tx['hash'], status)) return # only log if the transaction is internal if tx['v'] is not None: log.info( "Updating status of tx {} to {} (previously: {})".format( tx['hash'], status, tx['status'])) if status == 'confirmed': try: bulk = self.eth.bulk() transaction = bulk.eth_getTransactionByHash(tx['hash']) tx_receipt = bulk.eth_getTransactionReceipt(tx['hash']) await bulk.execute() transaction = transaction.result() tx_receipt = tx_receipt.result() except: log.exception("Error getting transaction: {}".format( tx['hash'])) transaction = None tx_receipt = None if transaction and 'blockNumber' in transaction and transaction[ 'blockNumber'] is not None: if retry_start_time > 0: log.info( "successfully confirmed tx {} after {} seconds".format( tx['hash'], round(time.time() - retry_start_time, 2))) token_tx_updates = [] updated_token_txs = [] for token_tx in token_txs: from_address = token_tx['from_address'] to_address = token_tx['to_address'] # check transaction receipt to make sure the transfer was successful has_transfer_event = False token_tx_status = 'confirmed' if tx_receipt[ 'logs'] is not None: # should always be [], but checking just incase for _log in tx_receipt['logs']: if len(_log['topics']) > 0 and _log['topics'][ 0] == TRANSFER_TOPIC: if len(_log['topics']) == 3 and len(_log['data']) == 66 and \ decode_single_address(_log['topics'][1]) == from_address and \ decode_single_address(_log['topics'][2]) == to_address: has_transfer_event = True break elif len(_log['topics']) == 1 and len( _log['data']) == 194: erc20_from_address, erc20_to_address, erc20_value = decode_abi( ['address', 'address', 'uint256'], data_decoder(_log['data'])) if erc20_from_address == from_address and \ erc20_to_address == to_address: has_transfer_event = True break elif _log['address'] == WETH_CONTRACT_ADDRESS: if _log['topics'][ 0] == DEPOSIT_TOPIC and decode_single_address( _log['topics'][1]) == to_address: has_transfer_event = True break elif _log['topics'][ 0] == WITHDRAWAL_TOPIC and decode_single_address( _log['topics'][1]) == from_address: has_transfer_event = True break if not has_transfer_event: # there was no Transfer event matching this transaction, this means something went wrong token_tx_status = 'error' else: erc20_dispatcher.update_token_cache( token_tx['contract_address'], from_address, to_address, blocknumber=parse_int( transaction['blockNumber'])) else: log.error( "Unexpectedly got null for tx receipt logs for tx: {}" .format(tx['hash'])) token_tx_status = 'error' token_tx_updates.append( (token_tx_status, tx['transaction_id'], token_tx['transaction_log_index'])) token_tx = dict(token_tx) token_tx['status'] = token_tx_status updated_token_txs.append(token_tx) token_txs = updated_token_txs blocknumber = parse_int(transaction['blockNumber']) async with self.db: await self.db.execute( "UPDATE transactions SET status = $1, blocknumber = $2, updated = (now() AT TIME ZONE 'utc') " "WHERE transaction_id = $3", status, blocknumber, transaction_id) if token_tx_updates: await self.db.executemany( "UPDATE token_transactions SET status = $1 " "WHERE transaction_id = $2 AND transaction_log_index = $3", token_tx_updates) await self.db.commit() else: # this is probably because the node hasn't caught up with the latest block yet, retry in a "bit" (but only retry up to 60 seconds) if retry_start_time > 0 and time.time( ) - retry_start_time >= 60: if transaction is None: log.error( "requested transaction {}'s status to be set to confirmed, but cannot find the transaction" .format(tx['hash'])) else: log.error( "requested transaction {}'s status to be set to confirmed, but transaction is not confirmed on the node" .format(tx['hash'])) return await asyncio.sleep(random.random()) manager_dispatcher.update_transaction( transaction_id, status, retry_start_time=retry_start_time or time.time()) return else: async with self.db: await self.db.execute( "UPDATE transactions SET status = $1, updated = (now() AT TIME ZONE 'utc') WHERE transaction_id = $2", status, transaction_id) await self.db.commit() # render notification # don't send "queued" if status == 'queued': status = 'unconfirmed' elif status == 'unconfirmed' and tx['status'] == 'queued': # there's already been a tx for this so no need to send another return messages = [] # check if this is an erc20 transaction, if so use those values if token_txs: for token_tx in token_txs: token_tx_status = token_tx['status'] from_address = token_tx['from_address'] to_address = token_tx['to_address'] # TokenPayment PNs are not shown at the moment, so i'm removing # this for the time being until they're required # if token_tx_status == 'confirmed': # data = { # "txHash": tx['hash'], # "fromAddress": from_address, # "toAddress": to_address, # "status": token_tx_status, # "value": token_tx['value'], # "contractAddress": token_tx['contract_address'] # } # messages.append((from_address, to_address, token_tx_status, "SOFA::TokenPayment: " + json_encode(data))) # if a WETH deposit or withdrawal, we need to let the client know to # update their ETHER balance using a normal SOFA:Payment if token_tx['contract_address'] == WETH_CONTRACT_ADDRESS and ( from_address == "0x0000000000000000000000000000000000000000" or to_address == "0x0000000000000000000000000000000000000000"): payment = SofaPayment( value=parse_int(token_tx['value']), txHash=tx['hash'], status=status, fromAddress=from_address, toAddress=to_address, networkId=config['ethereum']['network_id']) messages.append( (from_address, to_address, status, payment.render())) else: from_address = tx['from_address'] to_address = tx['to_address'] payment = SofaPayment(value=parse_int(tx['value']), txHash=tx['hash'], status=status, fromAddress=from_address, toAddress=to_address, networkId=config['ethereum']['network_id']) messages.append( (from_address, to_address, status, payment.render())) # figure out what addresses need pns # from address always needs a pn for from_address, to_address, status, message in messages: manager_dispatcher.send_notification(from_address, message) # no need to check to_address for contract deployments if to_address == "0x": # TODO: update any notification registrations to be marked as a contract return # check if this is a brand new tx with no status if tx['status'] == 'new': # if an error has happened before any PNs have been sent # we only need to send the error to the sender, thus we # only add 'to' if the new status is not an error if status != 'error': manager_dispatcher.send_notification(to_address, message) else: manager_dispatcher.send_notification(to_address, message) # trigger a processing of the to_address's queue incase it has # things waiting on this transaction manager_dispatcher.process_transaction_queue(to_address)
async def sanity_check(self, frequency): async with self.db: rows = await self.db.fetch( "SELECT DISTINCT from_address FROM transactions WHERE (status = 'unconfirmed' OR status = 'queued' OR status = 'new') " "AND v IS NOT NULL AND created < (now() AT TIME ZONE 'utc') - interval '3 minutes'" ) rows2 = await self.db.fetch( "WITH t1 AS (SELECT DISTINCT from_address FROM transactions WHERE (status = 'new' OR status = 'queued') AND v IS NOT NULL), " "t2 AS (SELECT from_address, COUNT(*) FROM transactions WHERE (status = 'unconfirmed' AND v IS NOT NULL) GROUP BY from_address) " "SELECT t1.from_address FROM t1 LEFT JOIN t2 ON t1.from_address = t2.from_address WHERE t2.count IS NULL;" ) if rows or rows2: log.debug( "sanity check found {} addresses with potential problematic transactions" .format(len(rows) + len(rows2))) rows = set([row['from_address'] for row in rows ]).union(set([row['from_address'] for row in rows2])) addresses_to_check = set() old_and_unconfirmed = [] for ethereum_address in rows: # check on queued transactions async with self.db: queued_transactions = await self.db.fetch( "SELECT * FROM transactions " "WHERE from_address = $1 " "AND (status = 'new' OR status = 'queued') AND v IS NOT NULL", ethereum_address) if queued_transactions: # make sure there are pending incoming transactions async with self.db: incoming_transactions = await self.db.fetch( "SELECT * FROM transactions " "WHERE to_address = $1 " "AND (status = 'unconfirmed' OR status = 'queued' OR status = 'new')", ethereum_address) if not incoming_transactions: log.error( "ERROR: {} has transactions in it's queue, but no unconfirmed transactions!" .format(ethereum_address)) # trigger queue processing as last resort addresses_to_check.add(ethereum_address) else: # check health of the incoming transaction for transaction in incoming_transactions: if transaction['v'] is None: try: tx = await self.eth.eth_getTransactionByHash( transaction['hash']) except: log.exception( "Error getting transaction {} in sanity check", transaction['hash']) continue if tx is None: log.warning( "external transaction (id: {}) no longer found on nodes" .format(transaction['transaction_id'])) await self.update_transaction( transaction['transaction_id'], 'error') addresses_to_check.add(ethereum_address) elif tx['blockNumber'] is not None: log.warning( "external transaction (id: {}) confirmed on node, but wasn't confirmed in db" .format(transaction['transaction_id'])) await self.update_transaction( transaction['transaction_id'], 'confirmed') addresses_to_check.add(ethereum_address) # no need to continue with dealing with unconfirmed transactions if there are queued ones continue async with self.db: unconfirmed_transactions = await self.db.fetch( "SELECT * FROM transactions " "WHERE from_address = $1 " "AND status = 'unconfirmed' AND v IS NOT NULL", ethereum_address) if unconfirmed_transactions: for transaction in unconfirmed_transactions: # check on unconfirmed transactions first if transaction['status'] == 'unconfirmed': # we neehed to check the true status of unconfirmed transactions # as the block monitor may be inbetween calls and not have seen # this transaction to mark it as confirmed. try: tx = await self.eth.eth_getTransactionByHash( transaction['hash']) except: log.exception( "Error getting transaction {} in sanity check", transaction['hash']) continue # sanity check to make sure the tx still exists if tx is None: # if not, try resubmit # NOTE: it may just be an issue with load balanced nodes not seeing all pending transactions # so we don't want to adjust the status of the transaction at all at this stage value = parse_int(transaction['value']) gas = parse_int(transaction['gas']) gas_price = parse_int(transaction['gas_price']) data = data_decoder( transaction['data'] ) if transaction['data'] else b'' tx = create_transaction( nonce=transaction['nonce'], value=value, gasprice=gas_price, startgas=gas, to=transaction['to_address'], data=data, v=parse_int(transaction['v']), r=parse_int(transaction['r']), s=parse_int(transaction['s'])) if calculate_transaction_hash( tx) != transaction['hash']: log.warning( "error resubmitting transaction {}: regenerating tx resulted in a different hash" .format(transaction['hash'])) else: tx_encoded = encode_transaction(tx) try: await self.eth.eth_sendRawTransaction( tx_encoded) addresses_to_check.add( transaction['from_address']) except Exception as e: # note: usually not critical, don't panic log.warning( "error resubmitting transaction {}: {}" .format(transaction['hash'], str(e))) elif tx['blockNumber'] is not None: # confirmed! update the status await self.update_transaction( transaction['transaction_id'], 'confirmed') addresses_to_check.add(transaction['from_address']) addresses_to_check.add(transaction['to_address']) else: old_and_unconfirmed.append(transaction['hash']) if len(old_and_unconfirmed): log.warning( "WARNING: {} transactions are old and unconfirmed!".format( len(old_and_unconfirmed))) for address in addresses_to_check: # make sure we don't try process any contract deployments if address != "0x": manager_dispatcher.process_transaction_queue(address) if frequency: manager_dispatcher.sanity_check(frequency).delay(frequency)
async def _process_transaction_queue(self, ethereum_address): log.debug("processing tx queue for {}".format(ethereum_address)) # check for un-scheduled transactions async with self.db: # get the last block number to use in ethereum calls # to avoid race conditions in transactions being confirmed # on the network before the block monitor sees and updates them in the database last_blocknumber = ( await self.db.fetchval("SELECT blocknumber FROM last_blocknumber")) transactions_out = await self.db.fetch( "SELECT * FROM transactions " "WHERE from_address = $1 " "AND (status = 'new' OR status = 'queued') " "AND r IS NOT NULL " # order by nonce reversed so that .pop() can # be used in the loop below "ORDER BY nonce DESC", ethereum_address) # any time the state of a transaction is changed we need to make # sure those changes cascade down to the receiving address as well # this keeps a list of all the receiving addresses that need to be # checked after the current address's queue has been processed addresses_to_check = set() if transactions_out: # TODO: make sure the block number isn't too far apart from the current # if this is the case then we should just come back later! # get the current network balance for this address balance = await self.eth.eth_getBalance(ethereum_address, block=last_blocknumber or "latest") # get the unconfirmed_txs async with self.db: unconfirmed_txs = await self.db.fetch( "SELECT nonce, value, gas, gas_price FROM transactions " "WHERE from_address = $1 " "AND (status = 'unconfirmed' " "OR (status = 'confirmed' AND blocknumber > $2)) " "ORDER BY nonce", ethereum_address, last_blocknumber or 0) network_nonce = await self.eth.eth_getTransactionCount( ethereum_address, block=last_blocknumber or "latest") if unconfirmed_txs: nonce = unconfirmed_txs[-1]['nonce'] + 1 balance -= sum( parse_int(tx['value']) + (parse_int(tx['gas']) * parse_int(tx['gas_price'])) for tx in unconfirmed_txs) else: # use the nonce from the network nonce = network_nonce # marker for whether a previous transaction had an error (signaling # that all the following should also be an error previous_error = False # for each one, check if we can schedule them yet while transactions_out: transaction = transactions_out.pop() # if there was a previous error in the queue, abort! if previous_error: log.info("Setting tx '{}' to error due to previous error". format(transaction['hash'])) await self.update_transaction( transaction['transaction_id'], 'error') addresses_to_check.add(transaction['to_address']) continue # make sure the nonce is still valid if nonce != transaction[ 'nonce'] and network_nonce != transaction['nonce']: # check if this is an overwrite if transaction['status'] == 'new': async with self.db: old_tx = await self.db.fetchrow( "SELECT * FROM transactions where from_address = $1 AND nonce = $2 AND hash != $3", ethereum_address, transaction['nonce'], transaction['hash']) if old_tx: if old_tx['status'] == 'error': # expected state for overwrites pass elif old_tx['status'] == 'unconfirmed' or old_tx[ 'status'] == 'confirmed': previous_error = True log.info(( "Setting tx '{}' to error due to another unconfirmed transaction" "with nonce ({}) already existing in the system" ).format(transaction['hash'], transaction['nonce'])) await self.update_transaction( transaction['transaction_id'], 'error') addresses_to_check.add( transaction['to_address']) continue else: # two transactions with the same nonce on the queue # lets pick the one with the highest gas price and error the other if transaction['nonce'] > old_tx['nonce']: # lets use this one! log.info(( "Setting tx '{}' to error due to another unconfirmed transaction" "with nonce ({}) already existing in the system" ).format(old_tx['hash'], transaction['nonce'])) await self.update_transaction( old_tx['transaction_id'], 'error') addresses_to_check.add( old_tx['to_address']) # make sure the other transaction is pulled out of the queue try: idx = next(i for i, e in enumerate( transactions_out) if e['transaction_id'] == old_tx['transaction_id']) del transactions_out[idx] except: # old_tx not in the transactions_out list pass else: # we'll use the other one log.info(( "Setting tx '{}' to error due to another unconfirmed transaction" "with nonce ({}) already existing in the system" ).format(old_tx['hash'], transaction['nonce'])) await self.update_transaction( transaction['transaction_id'], 'error') addresses_to_check.add( transaction['to_address']) addresses_to_check.add( transaction['from_address']) # this case is actually pretty weird, so emptying the # transactions_out so we restart the queue check # completely transactions_out = [] continue else: # well this is awkward! may as well let things go on in this case because # it means a transaction in the nonce sequence is missing pass elif transaction['status'] == 'queued': # then this and all the following transactions are now invalid previous_error = True log.info( "Setting tx '{}' to error due to the nonce ({}) not matching the network ({})" .format(transaction['hash'], transaction['nonce'], nonce)) await self.update_transaction( transaction['transaction_id'], 'error') addresses_to_check.add(transaction['to_address']) continue else: # this is a really weird state # it's not clear what should be done here log.error( "Found unconfirmed transaction with out of order nonce for address: {}" .format(ethereum_address)) return value = parse_int(transaction['value']) gas = parse_int(transaction['gas']) gas_price = parse_int(transaction['gas_price']) cost = value + (gas * gas_price) # check if the current balance is high enough to send to the network if balance >= cost: # check if gas price is high enough that it makes sense to send the transaction safe_gas_price = parse_int( await self.redis.get('gas_station_safelow_gas_price')) if safe_gas_price and safe_gas_price > gas_price: log.debug( "Not queuing tx '{}' as current gas price would not support it" .format(transaction['hash'])) # retry this address in a minute manager_dispatcher.process_transaction_queue( ethereum_address).delay(60) # abort the rest of the processing after sending PNs for any "new" transactions while transaction: if transaction['status'] == 'new': await self.update_transaction( transaction['transaction_id'], 'queued') transaction = transactions_out.pop( ) if transactions_out else None break # if so, send the transaction # create the transaction data = data_decoder( transaction['data']) if transaction['data'] else b'' tx = create_transaction(nonce=transaction['nonce'], value=value, gasprice=gas_price, startgas=gas, to=transaction['to_address'], data=data, v=parse_int(transaction['v']), r=parse_int(transaction['r']), s=parse_int(transaction['s'])) # make sure the signature was valid if data_encoder(tx.sender) != ethereum_address: # signature is invalid for the user log.error( "ERROR signature invalid for sender of tx: {}". format(transaction['hash'])) log.error("queue: {}, db: {}, tx: {}".format( ethereum_address, transaction['from_address'], data_encoder(tx.sender))) previous_error = True addresses_to_check.add(transaction['to_address']) await self.update_transaction( transaction['transaction_id'], 'error') continue # send the transaction try: tx_encoded = encode_transaction(tx) await self.eth.eth_sendRawTransaction(tx_encoded) await self.update_transaction( transaction['transaction_id'], 'unconfirmed') except JsonRPCError as e: # if something goes wrong with sending the transaction # simply abort for now. # TODO: depending on error, just break and queue to retry later log.error( "ERROR sending queued transaction: {}".format( e.format())) if e.message and (e.message.startswith( "Transaction nonce is too low" ) or e.message.startswith( "Transaction with the same hash was already imported" )): existing_tx = await self.eth.eth_getTransactionByHash( transaction['hash']) if existing_tx: if existing_tx['blockNumber']: await self.update_transaction( transaction['transaction_id'], 'confirmed') else: await self.update_transaction( transaction['transaction_id'], 'unconfirmed') continue previous_error = True await self.update_transaction( transaction['transaction_id'], 'error') addresses_to_check.add(transaction['to_address']) continue # adjust the balance values for checking the other transactions balance -= cost if nonce == transaction['nonce']: nonce += 1 continue else: # make sure the pending_balance would support this transaction # otherwise there's no way this transaction will be able to # be send, so trigger a failure on all the remaining transactions async with self.db: transactions_in = await self.db.fetch( "SELECT * FROM transactions " "WHERE to_address = $1 " "AND (" "(status = 'new' OR status = 'queued' OR status = 'unconfirmed') " "OR (status = 'confirmed' AND blocknumber > $2))", ethereum_address, last_blocknumber or 0) # TODO: test if loops in the queue chain are problematic pending_received = sum( (parse_int(p['value']) or 0) for p in transactions_in) if balance + pending_received < cost: previous_error = True log.info( "Setting tx '{}' to error due to insufficient pending balance" .format(transaction['hash'])) await self.update_transaction( transaction['transaction_id'], 'error') addresses_to_check.add(transaction['to_address']) continue else: if any(t['blocknumber'] is not None and t['blocknumber'] > last_blocknumber for t in transactions_in): addresses_to_check.add(ethereum_address) # there's no reason to continue on here since all the # following transaction in the queue cannot be processed # until this one is # but we still need to send PNs for any "new" transactions while transaction: if transaction['status'] == 'new': await self.update_transaction( transaction['transaction_id'], 'queued') transaction = transactions_out.pop( ) if transactions_out else None break for address in addresses_to_check: # make sure we don't try process any contract deployments if address != "0x": manager_dispatcher.process_transaction_queue(address) if transactions_out: manager_dispatcher.process_transaction_queue(ethereum_address)
async def __call__(self, *args, startgas=None, gasprice=20000000000, value=0, wait_for_confirmation=True): # TODO: figure out if we can validate args validated_args = [] for (type, name), arg in zip( self.contract.translator.function_data[self.name]['signature'], args): if type == 'address' and isinstance(arg, str): validated_args.append(data_decoder(arg)) elif (type.startswith("uint") or type.startswith("int")) and isinstance(arg, str): validated_args.append(int(arg, 16)) else: validated_args.append(arg) ethurl = get_url() ethclient = JsonRPCClient(ethurl) data = self.contract.translator.encode_function_call( self.name, validated_args) # TODO: figure out if there's a better way to tell if the function needs to be called via sendTransaction if self.is_constant: result = await ethclient.eth_call(from_address=self.from_address or '', to_address=self.contract.address, data=data) result = data_decoder(result) if result: decoded = self.contract.translator.decode_function_result( self.name, result) # decode string results decoded = [ val.decode('utf-8') if isinstance(val, bytes) and type == 'string' else val for val, type in zip( decoded, self.contract.translator.function_data[ self.name]['decode_types']) ] # return the single value if there is only a single return value if len(decoded) == 1: return decoded[0] return decoded return None else: if self.from_address is None: raise Exception( "Cannot call non-constant function without a sender") nonce = await ethclient.eth_getTransactionCount(self.from_address) balance = await ethclient.eth_getBalance(self.from_address) if startgas is None: startgas = await ethclient.eth_estimateGas( self.from_address, self.contract.address, data=data, nonce=nonce, value=value, gasprice=gasprice) if startgas == 50000000 or startgas is None: raise Exception( "Unable to estimate gas cost, possibly something wrong with the transaction arguments" ) if balance < (startgas * gasprice): raise Exception("Given account doesn't have enough funds") tx = Transaction(nonce, gasprice, startgas, self.contract.address, value, data, 0, 0, 0) tx.sign(self.from_key) tx_encoded = data_encoder(rlp.encode(tx, Transaction)) if self.return_raw_tx: return tx_encoded try: tx_hash = await ethclient.eth_sendRawTransaction(tx_encoded) except: print(balance, startgas * gasprice, startgas) raise # wait for the contract to be deployed if wait_for_confirmation: print("waiting on transaction: {}".format(tx_hash)) starttime = time.time() warnlevel = 0 while wait_for_confirmation: resp = await ethclient.eth_getTransactionByHash(tx_hash) if resp is None or resp['blockNumber'] is None: await asyncio.sleep(0.1) if resp is None and warnlevel == 0 and time.time( ) - starttime < 10: print( "WARNING: 10 seconds have passed and transaction is not showing as a pending transaction" ) warnlevel = 1 elif resp is None and warnlevel == 1 and time.time( ) - starttime < 60: print( "WARNING: 60 seconds have passed and transaction is not showing as a pending transaction" ) raise Exception( "Unexpected error waiting for transaction to complete" ) else: receipt = await ethclient.eth_getTransactionReceipt(tx_hash ) if 'status' in receipt and receipt['status'] != "0x1": raise Exception( "Transaction status returned {}".format( receipt['status'])) break # TODO: is it possible for non-const functions to have return types? return tx_hash
async def process_block_for_asset_creation_contract(self, collectible_address): if collectible_address in self._processing and not self._processing[collectible_address].done(): log.debug("Already processing {}".format(collectible_address)) self._queue.add(collectible_address) return self._processing[collectible_address] = asyncio.Task.current_task() async with self.pool.acquire() as con: latest_block_number = await con.fetchval( "SELECT blocknumber FROM last_blocknumber") collectible = await con.fetchrow("SELECT * FROM collectibles WHERE contract_address = $1", collectible_address) from_block_number = collectible['last_block'] + 1 if latest_block_number < from_block_number: del self._processing[collectible_address] return to_block_number = min(from_block_number + 1000, latest_block_number) topics = [[ASSET_CREATED_TOPIC]] log.debug("Getting logs for {} from blocks {}->{}".format(collectible_address, from_block_number, to_block_number)) req_start = time.time() while True: try: logs = await self.eth.eth_getLogs( fromBlock=from_block_number, toBlock=to_block_number, topics=topics, address=collectible['contract_address']) if time.time() - req_start > 10: log.warning("eth_getLogs(fromBlock={}, toBlock={}, topics={}, address={}) took {} seconds to complete".format( from_block_number, to_block_number, topics, collectible['contract_address'], time.time() - req_start)) break except JsonRPCError as e: if e.message != "Unknown block number": log.exception("unexpected error getting logs for fungible creation contract: {} (after {} seconds)".format(collectible_address, time.time() - req_start)) await asyncio.sleep(random.random()) continue except: log.exception("unexpected error getting logs for fungible creation contract: {} (after {} seconds)".format(collectible_address, time.time() - req_start)) await asyncio.sleep(random.random()) continue if len(logs): log.debug("Found {} logs for {} in blocks {}->{}".format(len(logs), collectible_address, from_block_number, to_block_number)) for i, _log in enumerate(logs): log_block_number = int(_log['blockNumber'], 16) if log_block_number < from_block_number or log_block_number > to_block_number: log.error("go unexpected block number in logs: {} (fromBlock={}, toBlock={}, collectible_address={})".format( log_block_number, from_block_number, to_block_number, collectible['contract_address'])) del self._processing[collectible_address] return topic = _log['topics'][0] if topic != ASSET_CREATED_TOPIC: continue asset_contract_address = decode_single( process_type('address'), data_decoder(_log['topics'][1])) try: token_uri_data = await self.eth.eth_call(to_address=asset_contract_address, data=TOKEN_URI_CALL_DATA) except: log.exception("Error getting token uri for fungible collectible asset {}".format(asset_contract_address)) continue asset_token_uri = decode_abi(['string'], data_decoder(token_uri_data)) try: asset_token_uri = asset_token_uri[0].decode('utf-8', errors='replace') except: log.exception("Invalid tokenURI for fungible collectible asset {}".format(asset_contract_address)) continue try: name_data = await self.eth.eth_call(to_address=asset_contract_address, data=NAME_CALL_DATA) except: log.exception("Error getting name for fungible collectible asset {}".format(asset_contract_address)) continue asset_name = decode_abi(['string'], data_decoder(name_data)) try: asset_name = asset_name[0].decode('utf-8', errors='replace') except: log.exception("Invalid name for fungible collectible asset {}".format(asset_contract_address)) continue try: creator_data = await self.eth.eth_call(to_address=asset_contract_address, data=CREATOR_CALL_DATA) except: log.exception("Error getting creator for fungible collectible asset {}".format(asset_contract_address)) continue asset_creator = decode_abi(['address'], data_decoder(creator_data))[0] try: total_supply_data = await self.eth.eth_call(to_address=asset_contract_address, data=TOTAL_SUPPLY_CALL_DATA) except: log.exception("Error getting total supply for fungible collectible asset {}".format(asset_contract_address)) continue total_supply = decode_abi(['uint256'], data_decoder(total_supply_data))[0] # owner is currently always the address that triggered the AssetCreate event tx = await self.eth.eth_getTransactionByHash(_log['transactionHash']) asset_owner = tx['from'] asset_image = None asset_description = None parsed_uri = urlparse(asset_token_uri) if asset_token_uri and parsed_uri.netloc and parsed_uri.scheme in ['http', 'https']: try: resp = await AsyncHTTPClient(max_clients=100).fetch(parsed_uri.geturl()) metadata = json_decode(resp.body) if "properties" in metadata: metadata = metadata['properties'] if 'name' in metadata: if type(metadata['name']) == dict and 'description' in metadata['name']: asset_name = metadata['name']['description'] elif type(metadata['name']) == str: asset_name = metadata['name'] if 'description' in metadata: if type(metadata['description']) == dict and 'description' in metadata['description']: asset_description = metadata['description']['description'] elif type(metadata['description']) == str: asset_description = metadata['description'] if 'image' in metadata: if type(metadata['image']) == dict and 'description' in metadata['image']: asset_image = metadata['image']['description'] elif type(metadata['image']) == str: asset_image = metadata['image'] except: log.exception("Error getting token metadata for {}:{} from {}".format( collectible_address, asset_contract_address, asset_token_uri)) pass if asset_image is None: if collectible['image_url_format_string'] is not None: asset_image = collectible['image_url_format_string'].format( contract_address=asset_contract_address, collectible_address=collectible_address, name=asset_name, token_uri=asset_token_uri, creator_address=asset_creator) async with self.pool.acquire() as con: await con.execute( "INSERT INTO fungible_collectibles (contract_address, collectible_address, name, description, token_uri, creator_address, last_block, image) " "VALUES ($1, $2, $3, $4, $5, $6, $7, $8) " "ON CONFLICT (contract_address) DO NOTHING", asset_contract_address, collectible_address, asset_name, asset_description, asset_token_uri, asset_creator, log_block_number, asset_image) await con.execute( "INSERT INTO fungible_collectible_balances (contract_address, owner_address, balance) " "VALUES ($1, $2, $3)", asset_contract_address, asset_owner, hex(total_supply)) asyncio.get_event_loop().create_task(self.process_block_for_asset_contract(asset_contract_address)) else: log.debug("No logs found for {} in blocks {}->{}".format(collectible_address, from_block_number, to_block_number)) ready = collectible['ready'] or to_block_number == latest_block_number async with self.pool.acquire() as con: await con.execute("UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3", to_block_number, ready, collectible_address) del self._processing[collectible_address] if to_block_number < latest_block_number or collectible_address in self._queue: self._queue.discard(collectible_address) asyncio.get_event_loop().create_task(self.process_block_for_asset_creation_contract(collectible_address))
from tornado.escape import json_decode from tornado.testing import gen_test from toshieth.test.base import EthServiceBaseTest, requires_full_stack from toshi.test.ethereum.faucet import FAUCET_PRIVATE_KEY, FAUCET_ADDRESS from toshi.ethereum.utils import private_key_to_address, data_decoder from toshi.ethereum.contract import Contract from ethereum.utils import sha3 ABC_TOKEN_ADDRESS = "0x056db290f8ba3250ca64a45d16284d04bc6f5fbf" YAC_TOKEN_ADDRESS = "0x9ab6c6111577c51da46e2c4c93a3622671578657" ARTTOKEN_CONTRACT = open(os.path.join(os.path.dirname(__file__), "arttokencreator.sol")).read().encode('utf-8') TEST_PRIVATE_KEY = data_decoder("0xe8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35") TEST_PRIVATE_KEY_2 = data_decoder("0x8945608e66736aceb34a83f94689b4e98af497ffc9dc2004a93824096330fa77") TEST_ADDRESS = private_key_to_address(TEST_PRIVATE_KEY) TEST_ADDRESS_2 = private_key_to_address(TEST_PRIVATE_KEY_2) TEST_APN_ID = "64be4fe95ba967bb533f0c240325942b9e1f881b5cd2982568a305dd4933e0bd" class FakeIPFSHandler(tornado.web.RequestHandler): def get(self, key): self.write({"properties": { "image": {"description": "http://{}.png".format(key)} }}) class ERC721Test(EthServiceBaseTest):
from tornado.testing import gen_test from tornado.escape import json_decode from toshieth.test.base import EthServiceBaseTest, requires_full_stack from toshi.test.ethereum.parity import FAUCET_ADDRESS from toshi.ethereum.utils import data_decoder from toshi.ethereum.tx import DEFAULT_GASPRICE TEST_PRIVATE_KEY = data_decoder( "0xe8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35") TEST_ADDRESS = "0x056db290f8ba3250ca64a45d16284d04bc6f5fbf" TEST_PRIVATE_KEY_2 = data_decoder( "0x0ffdb88a7a0a40831ca0b19bd31f3f6085764ef8b7db1bd6b57072e5eaea24ff") TEST_ADDRESS_2 = "0x35351b44e03ec8515664a955146bf9c6e503a381" class TransactionWhitelistTest(EthServiceBaseTest): @gen_test(timeout=15) @requires_full_stack async def test_gas_price_whitelist(self): gas_station_gas_price = 50000000000 custom_gas_price = 2000000000 assert (gas_station_gas_price != DEFAULT_GASPRICE) assert (custom_gas_price != DEFAULT_GASPRICE) assert (gas_station_gas_price != custom_gas_price) self.redis.set("gas_station_standard_gas_price", hex(gas_station_gas_price)) async with self.pool.acquire() as con: await con.execute(
async def create_transaction_skeleton(self, *, to_address, from_address, value=0, nonce=None, gas=None, gas_price=None, data=None): if not validate_address(from_address): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_from_address', 'message': 'Invalid From Address' }) if to_address is not None and not validate_address(to_address): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_to_address', 'message': 'Invalid To Address' }) if value: value = parse_int(value) if value is None or value < 0: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_value', 'message': 'Invalid Value' }) # check optional arguments if nonce is None: # check cache for nonce nonce = await self.get_transaction_count(from_address) else: nonce = parse_int(nonce) if nonce is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_nonce', 'message': 'Invalid Nonce' }) if data is not None: if isinstance(data, int): data = hex(data) if isinstance(data, str): try: data = data_decoder(data) except binascii.Error: pass if not isinstance(data, bytes): raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_data', 'message': 'Invalid Data field' }) else: data = b'' if gas is None: try: gas = await self.eth.eth_estimateGas(from_address, to_address, data=data, value=value) except JsonRPCError: # this can occur if sending a transaction to a contract that doesn't match a valid method # and the contract has no default method implemented raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_data', 'message': 'Invalid Data field' }) else: gas = parse_int(gas) if gas is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_gas', 'message': 'Invalid Gas' }) if gas_price is None: gas_price = self.application.config['ethereum'].getint( 'default_gasprice', DEFAULT_GASPRICE) else: gas_price = parse_int(gas_price) if gas_price is None: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_gas_price', 'message': 'Invalid Gas Price' }) try: tx = create_transaction(nonce=nonce, gasprice=gas_price, startgas=gas, to=to_address, value=value, data=data, network_id=self.network_id) except InvalidTransaction as e: raise JsonRPCInvalidParamsError(data={ 'id': 'invalid_transaction', 'message': str(e) }) if tx.intrinsic_gas_used > gas: raise JsonRPCInvalidParamsError( data={ 'id': 'invalid_transaction', 'message': 'Transaction gas is too low. There is not enough gas to cover minimal cost of the transaction (minimal: {}, got: {}). Try increasing supplied gas.' .format(tx.intrinsic_gas_used, gas) }) transaction = encode_transaction(tx) return transaction
async def from_source_code(cls, sourcecode, contract_name, constructor_data=None, *, address=None, deployer_private_key=None, import_mappings=None, libraries=None, optimize=False, deploy=True, cwd=None, wait_for_confirmation=True): if deploy: ethurl = get_url() if address is None and deployer_private_key is None: raise TypeError( "requires either address or deployer_private_key") if address is None and not isinstance(constructor_data, (list, type(None))): raise TypeError( "must supply constructor_data as a list (hint: use [] if args should be empty)" ) args = ['solc', '--combined-json', 'bin,abi'] if libraries: args.extend([ '--libraries', ','.join(['{}:{}'.format(*library) for library in libraries]) ]) if optimize: args.append('--optimize') if import_mappings: args.extend([ "{}={}".format(path, mapping) for path, mapping in import_mappings ]) # check if sourcecode is actually a filename if cwd: filename = os.path.join(cwd, sourcecode) else: filename = sourcecode if os.path.exists(filename): args.append(filename) sourcecode = None else: filename = '<stdin>' process = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) output, stderrdata = process.communicate(input=sourcecode) try: output = json_decode(output) except json.JSONDecodeError: if output and stderrdata: output += b'\n' + stderrdata elif stderrdata: output = stderrdata raise Exception("Failed to compile source: {}\n{}\n{}".format( filename, ' '.join(args), output.decode('utf-8'))) try: contract = output['contracts']['{}:{}'.format( filename, contract_name)] except KeyError: print(output) raise abi = json_decode(contract['abi']) # deploy contract translator = ContractTranslator(abi) # fix things that don't have a constructor if not deploy: return Contract(abi=abi, address=address, translator=translator) ethclient = JsonRPCClient(ethurl) if address is not None: # verify there is code at the given address for i in range(10): code = await ethclient.eth_getCode(address) if code == "0x": await asyncio.sleep(1) continue break else: raise Exception("No code found at given address") return Contract(abi=abi, address=address, translator=translator) try: bytecode = data_decoder(contract['bin']) except binascii.Error: print(contract['bin']) raise if constructor_data is not None: constructor_call = translator.encode_constructor_arguments( constructor_data) bytecode += constructor_call if isinstance(deployer_private_key, str): deployer_private_key = data_decoder(deployer_private_key) deployer_address = private_key_to_address(deployer_private_key) nonce = await ethclient.eth_getTransactionCount(deployer_address) balance = await ethclient.eth_getBalance(deployer_address) gasprice = 20000000000 value = 0 startgas = await ethclient.eth_estimateGas(deployer_address, '', data=bytecode, nonce=nonce, value=0, gasprice=gasprice) if balance < (startgas * gasprice): raise Exception("Given account doesn't have enough funds") tx = Transaction(nonce, gasprice, startgas, '', value, bytecode, 0, 0, 0) tx.sign(deployer_private_key) tx_encoded = data_encoder(rlp.encode(tx, Transaction)) contract_address = data_encoder(tx.creates) tx_hash = await ethclient.eth_sendRawTransaction(tx_encoded) # wait for the contract to be deployed while wait_for_confirmation: resp = await ethclient.eth_getTransactionByHash(tx_hash) if resp is None or resp['blockNumber'] is None: await asyncio.sleep(0.1) else: code = await ethclient.eth_getCode(contract_address) if code == '0x': raise Exception( "Failed to deploy contract: resulting address '{}' has no code" .format(contract_address)) break return Contract(abi=abi, address=contract_address, translator=translator, creation_tx_hash=tx_hash)