def test_encode_decode_address(): address1 = b'\x11' * 20 address2 = b'\x22' * 20 address3 = b'\x33' * 20 all_addresses = [ address1, address2, address3, ] all_addresses_encoded = [ encode_hex(address1), encode_hex(address2), encode_hex(address3), ] assert decode_abi(['address'], encode_abi(['address'], [address1]))[0] == encode_hex(address1) addresses_encoded_together = encode_abi(['address[]'], [all_addresses]) assert decode_abi(['address[]'], addresses_encoded_together)[0] == all_addresses_encoded address_abi = ['address', 'address', 'address'] addreses_encoded_splited = encode_abi(address_abi, all_addresses) assert decode_abi(address_abi, addreses_encoded_splited) == all_addresses_encoded
def listen(cls, log_, address=None, callback=None): if not len(log_.topics) or log_.topics[0] != cls.event_id(): return if address and address != log_.address: return # o = dict(address=log_.address) o = dict() for i, t in enumerate(log_.topics[1:]): name = cls.args[i]['name'] if cls.arg_types()[i] in ('string', 'bytes'): assert t < 2**256, "error with {}, user bytes32".format( cls.args[i]) d = encode_int(t) else: assert t < 2**256 d = zpad(encode_int(t), 32) data = abi.decode_abi([cls.arg_types()[i]], d)[0] o[name] = data o['event_type'] = cls.__name__ unindexed_types = [a['type'] for a in cls.args if not a['indexed']] o['args'] = abi.decode_abi(unindexed_types, log_.data) if callback: callback(o) else: print(o)
def abi_decode_return_vals(method, data): assert issubclass(method.im_class, NativeABIContract) return_types = method.im_class._get_method_abi(method)['return_types'] if not len(data): if return_types is None: return None return b'' elif not isinstance(return_types, (list, tuple)): return abi.decode_abi((return_types, ), data)[0] else: return abi.decode_abi(return_types, data)
async def _get_token_details(self, contract_address): """Get token details from the contract's metadata endpoints""" bulk = self.eth.bulk() balanceof_future = bulk.eth_call(to_address=contract_address, data="{}000000000000000000000000{}".format( ERC20_BALANCEOF_CALL_DATA, self.user_toshi_id[2:] if self.user_toshi_id is not None else "0000000000000000000000000000000000000000")) name_future = bulk.eth_call(to_address=contract_address, data=ERC20_NAME_CALL_DATA) sym_future = bulk.eth_call(to_address=contract_address, data=ERC20_SYMBOL_CALL_DATA) decimals_future = bulk.eth_call(to_address=contract_address, data=ERC20_DECIMALS_CALL_DATA) try: await bulk.execute() except: log.exception("failed getting token details") return None balance = data_decoder(unwrap_or(balanceof_future, "0x")) if balance and balance != "0x": try: balance = decode_abi(['uint256'], balance)[0] except: log.exception("Invalid erc20.balanceOf() result: {}".format(balance)) balance = None else: balance = None name = data_decoder(unwrap_or(name_future, "0x")) if name and name != "0x": try: name = decode_abi(['string'], name)[0].decode('utf-8') except: log.exception("Invalid erc20.name() data: {}".format(name)) name = None else: name = None symbol = data_decoder(unwrap_or(sym_future, "0x")) if symbol and symbol != "0x": try: symbol = decode_abi(['string'], symbol)[0].decode('utf-8') except: log.exception("Invalid erc20.symbol() data: {}".format(symbol)) symbol = None else: symbol = None decimals = data_decoder(unwrap_or(decimals_future, "0x")) if decimals and decimals != "0x": try: decimals = decode_abi(['uint256'], decimals)[0] except: log.exception("Invalid erc20.decimals() data: {}".format(decimals)) decimals = None else: decimals = None return balance, name, symbol, decimals
def decode_execute(self, txdata): # get rid of signature and 0x txdata = txdata[10:] # unfortunately the pyethapp way does not work # fndata = c.wallet_translator.function_data['execute'] # return decode_abi(fndata['encode_types'], txdata.decode('hex')) # ... but decoding each arg individually does work sent_to = decode_abi(['address'], txdata.decode('hex')[:32])[0] amount_in_wei = decode_abi(['uint256'], txdata.decode('hex')[32:64])[0] token_value = self.decode_token_transfer(txdata[256:], sent_to) return sent_to, amount_in_wei, token_value
def run_abi_test(params, mode): types, args = params['types'], params['args'] out = abi.encode_abi(types, args) assert abi.decode_abi(types, out) == args if mode == FILL: params['result'] = encode_hex(out) return params elif mode == VERIFY: assert params['result'] == encode_hex(out) elif mode == TIME: x = time.time() abi.encode_abi(types, args) y = time.time() abi.decode_abi(out, args) return {'encoding': y - x, 'decoding': time.time() - y}
def _db_decode_type(cls, value_type, data): if value_type in ('string', 'bytes', 'binary'): return int_to_big_endian(data) if value_type == 'address': return zpad(int_to_big_endian(data), 20) return abi.decode_abi([value_type], zpad(int_to_big_endian(data), 32))[0]
def revert_log(self, log): if log.has_key("removed"): del log['removed'] hash = _utils.hash_log(log) transfer_table = FLAGS.token_prefix + self.type balance_table = FLAGS.balance_prefix + self.type data = log['data'][2:].decode('hex') data_params = self.data_params(self.abi, self.event) value = _abi.decode_abi(data_params, data)[0] f = '0x' + log['topics'][1].lower()[26:] to = '0x' + log['topics'][2].lower()[26:] deleted_count = self.db_proxy.delete(transfer_table, { "hash" : hash, }, multi = False).deleted_count if deleted_count == 0: self.logger.info("event log %s has been deleted, ignore it", hash) return # update balance # TODO parse demical of token operation = { "$inc" : {"balance" : value * 1} } self.db_proxy.update(balance_table, {"account" : f}, operation, upsert = True) operation2 = { "$inc" : {"balance" : value * -1} } self.db_proxy.update(balance_table, {"account" : to}, operation2, upsert = True)
def run_abi_test(params, mode): types, args = params['types'], params['args'] out = abi.encode_abi(types, args) assert abi.decode_abi(types, out) == args if mode == FILL: params['result'] = encode_hex(out) return params elif mode == VERIFY: assert params['result'] == encode_hex(out) elif mode == TIME: x = time.time() abi.encode_abi(types, args) y = time.time() abi.decode_abi(out, args) return { 'encoding': y - x, 'decoding': time.time() - y }
def call(self, address, sig, args, result_types): ''' Call a contract function on the RPC server, without sending a transaction (useful for reading data) ''' data = self._encode_function(sig, args) data_hex = data.encode('hex') response = self.eth_call(to_address=address, data=data_hex) return decode_abi(result_types, response[2:].decode('hex'))
def call(self, address, sig, args, result_types): ''' Call a contract function on the RPC server, without sending a transaction (useful for reading data) ''' data = self._encode_function(sig, args) data_hex = encode_hex(data) response = self.eth_call(to_address=address, data=data_hex) return decode_abi(result_types, response[2:].decode('hex'))
def call(self, address, sig, args, result_types): ''' Call a contract function on the RPC server, without sending a transaction (useful for reading data) ''' data = self._encode_function(sig, args) data_hex = '0x' + data.hex() response = self.eth_call(to_address=address, data=data_hex) return decode_abi(result_types, binascii.unhexlify(response[2:]))
def decode_multi(types, outputs): res = abi.decode_abi( types, binascii.a2b_hex(strip_0x_prefix(outputs)), ) processed_res = [ "0x" + strip_0x_prefix(v) if t == "address" else v for t, v in zip(types, res) ] return processed_res
def test_encode_decode_int(): int8 = ('int', '8', []) int32 = ('int', '32', []) int256 = ('int', '256', []) int8_values = [ 1, -1, 127, -128, ] int32_values = [ 1, -1, 127, -128, 2**31 - 1, -2**31, ] int256_values = [ 1, -1, 127, -128, 2**31 - 1, -2**31, 2**255 - 1, -2**255, ] for value in int8_values: assert encode_abi(['int8'], [value]) == encode_single(int8, value) assert decode_abi(['int8'], encode_abi(['int8'], [value]))[0] == value for value in int32_values: assert encode_abi(['int32'], [value]) == encode_single(int32, value) assert decode_abi(['int32'], encode_abi(['int32'], [value]))[0] == value for value in int256_values: assert encode_abi(['int256'], [value]) == encode_single(int256, value) assert decode_abi(['int256'], encode_abi(['int256'], [value]))[0] == value
def call(self, address, sig, args, result_types): ''' Call a contract function on the IPC server, without sending a transaction (useful for reading data) ''' data = self._encode_function(sig, args) data_hex = data.encode('hex') # could be made to use web3py directly, but instead uses eth_call which is adapted response = self.eth_call(to_address=address, data=data_hex) return decode_abi(result_types, response[2:].decode('hex'))
def call(self, address, sig, args, result_types): ''' Call a contract function on the RPC server, without sending a transaction (useful for reading data) ''' data = self._encode_function(sig, args) data_hex = hexlify(data) response = self.eth_call(to_address=address, data=data_hex) # XXX: horrible hack for when RPC returns '0x0'... if (len(result_types) == 0 or result_types[0] == 'uint256') and response == '0x0': response = '0x' + ('0' * 64) return decode_abi(result_types, unhexlify(response[2:]))
def decode_contract_call(contract_abi: list, call_data: str): call_data_bin = decode_hex(call_data) method_signature = call_data_bin[:4] for description in contract_abi: if description.get('type') != 'function': continue method_name = normalize_abi_method_name(description['name']) arg_types = [item['type'] for item in description['inputs']] method_id = get_abi_method_id(method_name, arg_types) if zpad(encode_int(method_id), 4) == method_signature: args = decode_abi(arg_types, call_data_bin[4:]) return method_name, args
def listen(cls, log, address=None, callback=None): if not len(log.topics) or log.topics[0] != cls.event_id(): return if address and address != log.address: return o = {} for i, t in enumerate(log.topics[1:]): name = cls.args[i]['name'] if cls.arg_types()[i] in ('string', 'bytes'): d = encode_int(t) else: d = zpad(encode_int(t), 32) data = abi.decode_abi([cls.arg_types()[i]], d)[0] o[name] = data o['event_type'] = cls.__name__ unindexed_types = [a['type'] for a in cls.args if not a['indexed']] o['args'] = abi.decode_abi(unindexed_types, log.data) if callback: callback(o) else: print(o)
def decode_token_transfer(self, txdata, to_address): if len(txdata) < 8 or txdata[:8] != 'a9059cbb': return None # get rid of signature txdata = txdata[8:] # here we got ourselves a token transfer # transfer(address _from, uint256 _value) token_name = self.tokens.address_is_token(to_address) if token_name is None: print('WARNING: Unknown token {} transferred'.format(to_address)) token_name = 'UNKNOWN' hexdata = txdata.decode('hex') transfer_to = decode_abi(['address'], hexdata[:32])[0] transfer_value = decode_abi(['uint256'], hexdata[32:])[0] if address_is_whitehat(transfer_to) is None: print('WARNING: {} token sent to non-whitehat address?'.format(token_name)) return transfer_value
def decode_event_data(topic, data): if isinstance(data, str): data = data_decoder(data) name, types = _process_topic(topic) decoded = decode_abi(types, data) arguments = [] for typ, val in zip(types, decoded): m = TYPES_RE.match(typ) if m is None: continue atyp, arr = m.groups() if arr is None or arr == '': arguments.append(_convert_type(atyp, val)) else: arguments.append(_convert_array(atyp, arr[1:-1].split(']['), val)) return arguments
def handle_log(self, log): if log.has_key("removed"): del log['removed'] hash = _utils.hash_log(log) transfer_table = FLAGS.token_prefix + self.type balance_table = FLAGS.balance_prefix + self.type data = log['data'][2:].decode('hex') data_params = self.data_params(self.abi, self.event) value = _abi.decode_abi(data_params, data)[0] f = '0x' + log['topics'][1].lower()[26:] to = '0x' + log['topics'][2].lower()[26:] operation = { "$set": { "hash" : hash, "from" : f, "to" : to, "value" : value, "transactionHash" : log["transactionHash"], "logIndex" : log["logIndex"], "block" : int(log["blockNumber"], 16), "blockHash" : log["blockHash"], "type" : self.event } } objectId = self.db_proxy.update(transfer_table, {"hash":hash}, operation, multi = False, upsert = True).upserted_id if objectId is None: self.logger.info("event log %s has been add, ignore it", hash) return # update balance # TODO parse demical of token operation = { "$inc" : {"balance" : value * -1} } self.db_proxy.update(balance_table, {"account" : f}, operation, upsert = True) operation2 = { "$inc" : {"balance" : value } } self.db_proxy.update(balance_table, {"account" : to}, operation2, upsert = True)
def decode_contract_call(contract_abi, call_data): call_data = call_data.lower().replace("0x", "") call_data_bin = decode_hex(call_data) method_signature = call_data_bin[:4] for description in contract_abi: if description.get('type') != 'function': continue method_name = normalize_abi_method_name(description['name']) arg_types = [item['type'] for item in description['inputs']] method_id = get_abi_method_id(method_name, arg_types) if zpad(encode_int(method_id), 4) == method_signature: try: # TODO: ethereum.abi.decode_abi vs eth_abi.decode_abi args = decode_abi(arg_types, call_data_bin[4:]) except AssertionError: # Invalid args continue return method_name, args
def decode_contract_call(self, contract_abi: list, TID: str): Transaction = self.w3.eth.getTransaction(TID) call_data = str(Transaction.input) call_data_bin = decode_hex(call_data) method_signature = call_data_bin[:4] for description in contract_abi: if description.get('type') != 'function': continue method_name = normalize_abi_method_name(description['name']) arg_types = [item['type'] for item in description['inputs']] method_id = get_abi_method_id(method_name, arg_types) if zpad(encode_int(method_id), 4) == method_signature: try: args = decode_abi(arg_types, call_data_bin[4:]) except AssertionError: # Invalid args continue return method_name, args
def call(self, address, sig, args, result_types): ''' Call a contract function on the RPC server, without sending a transaction (useful for reading data) ''' data = self._encode_function(sig, args) data_hex = '0x' + encode_hex(data) response = self.eth_call(to_address=address, data=data_hex) result_bytes = decode_abi(result_types, decode_hex(response[2:])) try: return [ result.decode('utf-8') if (type(result) is bytes) else result for result in result_bytes ] except: return [ encode_hex(result) if (type(result) is bytes) else result for result in result_bytes ]
def _safe_call(self): calldata = self._msg.data.extract_all() # get method m_id = big_endian_to_int(calldata[:4]) # first 4 bytes encode method_id m_abi = self._find_method(m_id) if not m_abi: # 404 method not found log.warn('method not found, calling default', methodid=m_id) return 1, self.gas, [] # no default methods supported # decode abi args args = abi.decode_abi(m_abi['arg_types'], calldata[4:]) # call (unbound) method method = m_abi['method'] log.debug('calling', method=method.__name__, _args=args) try: res = method(self, *args) except RuntimeError as e: log.warn("error in method", method=method.__name__, error=e) return 0, self.gas, [] log.debug('call returned', result=res) return 1, self.gas, memoryview(abi_encode_return_vals(method, res)).tolist()
def contract_instant_call( self, to_address, function_signature, function_parameters=None, result_types=None, default_block=BLOCK_TAG_LATEST, ): """ This method makes a instant call on a contract function without the need to have the contract source code. Examples of function_signature in solidity: mult(uint x, uint y) => sig: mult(uint256,uint256) (all uint should be transformed to uint256) setAddress(address entity_address) => sig:setAddress(address) doSomething() => sig: doSomething() (functions with no parameters must end with the '()') In serpent, all functions parameter signatures are int256. Example: setXYZ(x, y, z) => sig: setXYZ(int256,int256,int256) """ data = self._encode_function(function_signature, function_parameters) params = [{"to": to_address, "data": "0x{0}".format(data.encode("hex"))}, default_block] response = self._call("eth_call", params) return decode_abi(result_types, response[2:].decode("hex"))
def _safe_call(self): calldata = self._msg.data.extract_all() # get method m_id = big_endian_to_int( calldata[:4]) # first 4 bytes encode method_id m_abi = self._find_method(m_id) if not m_abi: # 404 method not found log.warn('method not found, calling default', methodid=m_id) return 1, self.gas, [] # no default methods supported # decode abi args args = abi.decode_abi(m_abi['arg_types'], calldata[4:]) # call (unbound) method method = m_abi['method'] log.debug('calling', method=method.__name__, _args=args) try: res = method(self, *args) except RuntimeError as e: log.warn("error in method", method=method.__name__, error=e) return 0, self.gas, [] log.debug('call returned', result=res) return 1, self.gas, memoryview(abi_encode_return_vals(method, res)).tolist()
def _decode_input(contract_abi, call_data): """ Decode input data of a transaction according to a contract ABI Solution from https://ethereum.stackexchange.com/questions/20897/how-to-decode-input-data-from-tx-using-python3?rq=1 Parameters ---------- contract_abi : list List of contract methods specifications call_data : str Input of transaction in a form of 0x(4 bytes of method)(arguments), i.e. 0x12345678000000000000.... Returns ------- dict Name and parsed parameters extracted from the input None, if there is no such method in ABI, or there was a problem with method arguments """ call_data_bin = decode_hex(call_data) method_signature = call_data_bin[:4] for description in contract_abi: if description.get('type') not in ['function', 'event']: continue method_name = normalize_abi_method_name(description['name']) arg_types = [item['type'] for item in description['inputs']] method_id = get_abi_method_id(method_name, arg_types) if zpad(encode_int(method_id), 4) == method_signature: try: args = decode_abi(arg_types, call_data_bin[4:]) args = [{'type': arg_types[index], 'value': str(value)} for index, value in enumerate(args)] except AssertionError: continue return { 'name': method_name, 'params.type': [arg["type"] for arg in args], 'params.value': [arg["value"] for arg in args] }
async def update_transaction(self, transaction_id, status, retry_start_time=0): async with self.db: tx = await self.db.fetchrow( "SELECT * FROM transactions WHERE transaction_id = $1", transaction_id) if tx is None or tx['status'] == status: return token_txs = await self.db.fetch( "SELECT tok.symbol, tok.name, tok.decimals, tx.contract_address, tx.value, tx.from_address, tx.to_address, tx.transaction_log_index, tx.status " "FROM token_transactions tx " "JOIN tokens tok " "ON tok.contract_address = tx.contract_address " "WHERE tx.transaction_id = $1", transaction_id) # check if we're trying to update the state of a tx that is already confirmed, we have an issue if tx['status'] == 'confirmed': log.warning( "Trying to update status of tx {} to {}, but tx is already confirmed" .format(tx['hash'], status)) return # only log if the transaction is internal if tx['v'] is not None: log.info( "Updating status of tx {} to {} (previously: {})".format( tx['hash'], status, tx['status'])) if status == 'confirmed': try: bulk = self.eth.bulk() transaction = bulk.eth_getTransactionByHash(tx['hash']) tx_receipt = bulk.eth_getTransactionReceipt(tx['hash']) await bulk.execute() transaction = transaction.result() tx_receipt = tx_receipt.result() except: log.exception("Error getting transaction: {}".format( tx['hash'])) transaction = None tx_receipt = None if transaction and 'blockNumber' in transaction and transaction[ 'blockNumber'] is not None: if retry_start_time > 0: log.info( "successfully confirmed tx {} after {} seconds".format( tx['hash'], round(time.time() - retry_start_time, 2))) token_tx_updates = [] updated_token_txs = [] for token_tx in token_txs: from_address = token_tx['from_address'] to_address = token_tx['to_address'] # check transaction receipt to make sure the transfer was successful has_transfer_event = False token_tx_status = 'confirmed' if tx_receipt[ 'logs'] is not None: # should always be [], but checking just incase for _log in tx_receipt['logs']: if len(_log['topics']) > 0 and _log['topics'][ 0] == TRANSFER_TOPIC: if len(_log['topics']) == 3 and len(_log['data']) == 66 and \ decode_single_address(_log['topics'][1]) == from_address and \ decode_single_address(_log['topics'][2]) == to_address: has_transfer_event = True break elif len(_log['topics']) == 1 and len( _log['data']) == 194: erc20_from_address, erc20_to_address, erc20_value = decode_abi( ['address', 'address', 'uint256'], data_decoder(_log['data'])) if erc20_from_address == from_address and \ erc20_to_address == to_address: has_transfer_event = True break elif _log['address'] == WETH_CONTRACT_ADDRESS: if _log['topics'][ 0] == DEPOSIT_TOPIC and decode_single_address( _log['topics'][1]) == to_address: has_transfer_event = True break elif _log['topics'][ 0] == WITHDRAWAL_TOPIC and decode_single_address( _log['topics'][1]) == from_address: has_transfer_event = True break if not has_transfer_event: # there was no Transfer event matching this transaction, this means something went wrong token_tx_status = 'error' else: erc20_dispatcher.update_token_cache( token_tx['contract_address'], from_address, to_address, blocknumber=parse_int( transaction['blockNumber'])) else: log.error( "Unexpectedly got null for tx receipt logs for tx: {}" .format(tx['hash'])) token_tx_status = 'error' token_tx_updates.append( (token_tx_status, tx['transaction_id'], token_tx['transaction_log_index'])) token_tx = dict(token_tx) token_tx['status'] = token_tx_status updated_token_txs.append(token_tx) token_txs = updated_token_txs blocknumber = parse_int(transaction['blockNumber']) async with self.db: await self.db.execute( "UPDATE transactions SET status = $1, blocknumber = $2, updated = (now() AT TIME ZONE 'utc') " "WHERE transaction_id = $3", status, blocknumber, transaction_id) if token_tx_updates: await self.db.executemany( "UPDATE token_transactions SET status = $1 " "WHERE transaction_id = $2 AND transaction_log_index = $3", token_tx_updates) await self.db.commit() else: # this is probably because the node hasn't caught up with the latest block yet, retry in a "bit" (but only retry up to 60 seconds) if retry_start_time > 0 and time.time( ) - retry_start_time >= 60: if transaction is None: log.error( "requested transaction {}'s status to be set to confirmed, but cannot find the transaction" .format(tx['hash'])) else: log.error( "requested transaction {}'s status to be set to confirmed, but transaction is not confirmed on the node" .format(tx['hash'])) return await asyncio.sleep(random.random()) manager_dispatcher.update_transaction( transaction_id, status, retry_start_time=retry_start_time or time.time()) return else: async with self.db: await self.db.execute( "UPDATE transactions SET status = $1, updated = (now() AT TIME ZONE 'utc') WHERE transaction_id = $2", status, transaction_id) await self.db.commit() # render notification # don't send "queued" if status == 'queued': status = 'unconfirmed' elif status == 'unconfirmed' and tx['status'] == 'queued': # there's already been a tx for this so no need to send another return messages = [] # check if this is an erc20 transaction, if so use those values if token_txs: for token_tx in token_txs: token_tx_status = token_tx['status'] from_address = token_tx['from_address'] to_address = token_tx['to_address'] # TokenPayment PNs are not shown at the moment, so i'm removing # this for the time being until they're required # if token_tx_status == 'confirmed': # data = { # "txHash": tx['hash'], # "fromAddress": from_address, # "toAddress": to_address, # "status": token_tx_status, # "value": token_tx['value'], # "contractAddress": token_tx['contract_address'] # } # messages.append((from_address, to_address, token_tx_status, "SOFA::TokenPayment: " + json_encode(data))) # if a WETH deposit or withdrawal, we need to let the client know to # update their ETHER balance using a normal SOFA:Payment if token_tx['contract_address'] == WETH_CONTRACT_ADDRESS and ( from_address == "0x0000000000000000000000000000000000000000" or to_address == "0x0000000000000000000000000000000000000000"): payment = SofaPayment( value=parse_int(token_tx['value']), txHash=tx['hash'], status=status, fromAddress=from_address, toAddress=to_address, networkId=config['ethereum']['network_id']) messages.append( (from_address, to_address, status, payment.render())) else: from_address = tx['from_address'] to_address = tx['to_address'] payment = SofaPayment(value=parse_int(tx['value']), txHash=tx['hash'], status=status, fromAddress=from_address, toAddress=to_address, networkId=config['ethereum']['network_id']) messages.append( (from_address, to_address, status, payment.render())) # figure out what addresses need pns # from address always needs a pn for from_address, to_address, status, message in messages: manager_dispatcher.send_notification(from_address, message) # no need to check to_address for contract deployments if to_address == "0x": # TODO: update any notification registrations to be marked as a contract return # check if this is a brand new tx with no status if tx['status'] == 'new': # if an error has happened before any PNs have been sent # we only need to send the error to the sender, thus we # only add 'to' if the new status is not an error if status != 'error': manager_dispatcher.send_notification(to_address, message) else: manager_dispatcher.send_notification(to_address, message) # trigger a processing of the to_address's queue incase it has # things waiting on this transaction manager_dispatcher.process_transaction_queue(to_address)
def decode_multi(types, outputs): res = abi.decode_abi(types, binascii.a2b_hex(strip_0x_prefix(outputs))) return res
async def process_block_for_contract(self, collectible_address): if collectible_address in self._processing: return self._processing[collectible_address] = True async with self.pool.acquire() as con: latest_block_number = await con.fetchval( "SELECT blocknumber FROM last_blocknumber") collectible = await con.fetchrow("SELECT * FROM collectibles WHERE contract_address = $1", collectible_address) if collectible['type'] == 1: events = await con.fetch("SELECT * FROM collectible_transfer_events " "WHERE collectible_address = $1", collectible_address) elif collectible['type'] == 721: # use default erc721 event # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md events = [{ 'collectible_address': collectible_address, 'contract_address': collectible_address, 'name': 'Transfer', 'topic_hash': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', 'arguments': ['address', 'address', 'uint256'], 'indexed_arguments': [True, True, False], 'to_address_offset': 1, 'token_id_offset': 2 }] from_block_number = collectible['last_block'] + 1 if latest_block_number < from_block_number: del self._processing[collectible_address] return to_block_number = min(from_block_number + 1000, latest_block_number) updates = {} for event in events: contract_address = event['contract_address'] while True: try: logs = await self.eth.eth_getLogs( fromBlock=from_block_number, toBlock=to_block_number, topics=[[event['topic_hash']]], address=contract_address) break except: log.exception("error getting logs for block") continue if len(logs): for _log in logs: indexed_data = _log['topics'][1:] data_types = [t for t, i in zip(event['arguments'], event['indexed_arguments']) if i is False] try: data = decode_abi(data_types, data_decoder(_log['data'])) except: log.exception("Error decoding log data: {} {}".format(data_types, _log['data'])) del self._processing[collectible_address] return arguments = [] try: for t, i in zip(event['arguments'], event['indexed_arguments']): if i is True: arguments.append(decode_single(process_type(t), data_decoder(indexed_data.pop(0)))) else: arguments.append(data.pop(0)) except: log.exception("Error compiling event data") to_address = arguments[event['to_address_offset']] token_id = parse_int(arguments[event['token_id_offset']]) log.debug("{} #{} -> {} -> {}".format(collectible['name'], token_id, event['name'], to_address)) token_image = config['collectibles']['image_format'].format( contract_address=collectible_address, token_id=token_id) updates[hex(token_id)] = (collectible_address, hex(token_id), to_address, token_image) if len(updates) > 0: async with self.pool.acquire() as con: await con.executemany( "INSERT INTO collectible_tokens (contract_address, token_id, owner_address, image) " "VALUES ($1, $2, $3, $4) " "ON CONFLICT (contract_address, token_id) DO UPDATE " "SET owner_address = EXCLUDED.owner_address", list(updates.values())) ready = collectible['ready'] or to_block_number == latest_block_number self.last_block = to_block_number async with self.pool.acquire() as con: await con.execute("UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3", to_block_number, ready, collectible_address) del self._processing[collectible_address] #log.info("Processed blocks #{} -> #{} for {} in {} seconds".format( # from_block_number, to_block_number, collectible['name'], time.time() - starttime)) if to_block_number < latest_block_number: asyncio.ensure_future(self.process_block_for_contract(contract_address))
def abi_decode_args(method, data): # data is payload w/o method_id assert issubclass(method.im_class, NativeABIContract), method.im_class arg_types = method.im_class._get_method_abi(method)['arg_types'] return abi.decode_abi(arg_types, data)
def test_abi_encode_signed_int(): assert abi.decode_abi(['int8'], abi.encode_abi(['int8'], [1]))[0] == 1 assert abi.decode_abi(['int8'], abi.encode_abi(['int8'], [-1]))[0] == -1
def decodeABI(tinput, sig='setNewUserState(string,bytes,string)'): abi = tinput[2:] hash = utils.sha3(sig)[:4].encode('hex') if abi[:8] != hash: return None return decode_abi(['string', 'bytes', 'string'], abi[8:].decode('hex'))
def decode_abi(types, data): return abi.decode_abi(types, data)
def decodeABI(tinput, sig='uploadEssay(bytes,string,string)'):#? abi = tinput[2 :] hash = utils.sha3(sig)[: 4].encode('hex') if abi[: 8] != hash: return None return decode_abi(['bytes','string', 'string'], abi[8 :].decode('hex'))
def decode_multi(types, outputs): res = abi.decode_abi(types, binascii.a2b_hex(outputs[2:])) return res
async def process_block_for_contract(self, collectible_address): if collectible_address in self._processing: log.warning("Already processing {}".format(collectible_address)) return self._processing[collectible_address] = True async with self.pool.acquire() as con: latest_block_number = await con.fetchval( "SELECT blocknumber FROM last_blocknumber") collectible = await con.fetchrow( "SELECT * FROM collectibles WHERE contract_address = $1", collectible_address) if collectible is None: log.error( "Unable to find collectible with contract_address {}". format(collectible_address)) del self._processing[collectible_address] return if collectible['type'] == 1: events = await con.fetch( "SELECT * FROM collectible_transfer_events " "WHERE collectible_address = $1", collectible_address) elif collectible['type'] == 721: # use default erc721 event # https://github.com/ethereum/EIPs/blob/master/EIPS/eip-721.md events = [{ 'collectible_address': collectible_address, 'contract_address': collectible_address, 'name': 'Transfer', 'topic_hash': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', 'arguments': ['address', 'address', 'uint256'], 'indexed_arguments': [True, True, False], 'to_address_offset': 1, 'token_id_offset': 2 }] from_block_number = collectible['last_block'] + 1 if latest_block_number < from_block_number: del self._processing[collectible_address] log.info( "Aborting {} because latest block number < collectible's next block" .format(collectible_address)) return to_block_number = min(from_block_number + 1000, latest_block_number) updates = {} for event in events: contract_address = event['contract_address'] while True: try: logs = await self.eth.eth_getLogs( fromBlock=from_block_number, toBlock=to_block_number, topics=[[event['topic_hash']]], address=contract_address) break except: log.exception("error getting logs for block") continue if len(logs): for _log in logs: indexed_data = _log['topics'][1:] data_types = [ t for t, i in zip(event['arguments'], event['indexed_arguments']) if i is False ] try: data = decode_abi(data_types, data_decoder(_log['data'])) except: log.exception("Error decoding log data: {} {}".format( data_types, _log['data'])) del self._processing[collectible_address] return arguments = [] try: for t, i in zip(event['arguments'], event['indexed_arguments']): if i is True: arguments.append( decode_single( process_type(t), data_decoder(indexed_data.pop(0)))) else: arguments.append(data.pop(0)) except: log.exception("Error compiling event data") log.info("EVENT: {}".format(event)) log.info("LOG: {}".format(_log)) del self._processing[collectible_address] return to_address = arguments[event['to_address_offset']] token_id = parse_int(arguments[event['token_id_offset']]) log.debug("{} #{} -> {} -> {}".format( collectible['name'], token_id, event['name'], to_address)) updates[hex(token_id)] = (collectible_address, hex(token_id), to_address) if len(updates) > 0: new_tokens = [] for token_id in list(updates.keys()): async with self.pool.acquire() as con: token = await con.fetchrow( "SELECT * FROM collectible_tokens WHERE contract_address = $1 AND token_id = $2", collectible_address, token_id) if token is None: # get token details token_uri = None token_uri_data = await self.eth.eth_call( to_address=collectible_address, data="{}{:064x}".format(TOKEN_URI_CALL_DATA, int(token_id, 16))) if token_uri_data and token_uri_data != "0x": try: token_uri = decode_abi( ['string'], data_decoder(token_uri_data))[0].decode( 'utf-8', errors='replace') except: log.exception("Error decoding tokenURI data") token_image = None token_name = None token_description = None # if token_uri points to a valid url check if it points to json (for the erc721 metadata) parsed_uri = urlparse(token_uri) if token_uri and parsed_uri.netloc and parsed_uri.scheme in [ 'http', 'https' ]: try: resp = await AsyncHTTPClient( max_clients=100).fetch(parsed_uri.geturl()) metadata = json_decode(resp.body) if "properties" in metadata: metadata = metadata['properties'] if 'name' in metadata: if type( metadata['name'] ) == dict and 'description' in metadata['name']: token_name = metadata['name'][ 'description'] elif type(metadata['name']) == str: token_name = metadata['name'] if 'description' in metadata: if type( metadata['description'] ) == dict and 'description' in metadata[ 'description']: token_description = metadata[ 'description']['description'] elif type(metadata['description']) == str: token_description = metadata['description'] if 'image' in metadata: if type( metadata['image'] ) == dict and 'description' in metadata[ 'image']: token_image = metadata['image'][ 'description'] elif type(metadata['image']) == str: token_image = metadata['image'] except: log.exception( "Error getting token metadata for {}:{} from {}" .format(collectible_address, token_id, token_uri)) pass if not token_image: if collectible['image_url_format_string'] is not None: image_format_string = collectible[ 'image_url_format_string'] else: image_format_string = config['collectibles'][ 'image_format'] token_image = image_format_string.format( contract_address=collectible_address, token_id_hex=token_id, token_id_int=int(token_id, 16), token_uri=token_uri) new_token = updates.pop(token_id, ()) + ( token_uri, token_name, token_description, token_image) new_tokens.append(new_token) async with self.pool.acquire() as con: if len(new_tokens) > 0: await con.executemany( "INSERT INTO collectible_tokens (contract_address, token_id, owner_address, token_uri, name, description, image) " "VALUES ($1, $2, $3, $4, $5, $6, $7)", new_tokens) await con.executemany( "INSERT INTO collectible_tokens (contract_address, token_id, owner_address) " "VALUES ($1, $2, $3) " "ON CONFLICT (contract_address, token_id) DO UPDATE " "SET owner_address = EXCLUDED.owner_address", list(updates.values())) ready = collectible['ready'] or to_block_number == latest_block_number self.last_block = to_block_number async with self.pool.acquire() as con: await con.execute( "UPDATE collectibles SET last_block = $1, ready = $2 WHERE contract_address = $3", to_block_number, ready, collectible_address) del self._processing[collectible_address] if to_block_number < latest_block_number: asyncio.get_event_loop().create_task( self.process_block_for_contract(collectible_address))
def decodeABI(tinput, sig, returnVals): abi = tinput[2:] hash = utils.sha3(sig)[:4].encode('hex') if abi[:8] != hash: return None return decode_abi(returnVals, abi[8:].decode('hex'))
def test_encode_decode_bool(): assert decode_abi(['bool'], encode_abi(['bool'], [True]))[0] is True assert decode_abi(['bool'], encode_abi(['bool'], [False]))[0] is False