def parse_event(self, event): signature = Web3.toHex(event['topics'][0]) codec = ABICodec(default_registry) if signature == "0x7e8881001566f9f89aedb9c5dc3d856a2b81e5235a8196413ed484be91cc0df6": event_data = get_event_data(codec, self.kick_abi, event) return Flopper.KickLog(event_data) else: event_data = get_event_data(codec, self.log_note_abi, event) return LogNote(event_data)
def parse_event(self, event): signature = Web3.toHex(event['topics'][0]) codec = ABICodec(default_registry) if signature == "0xc84ce3a1172f0dec3173f04caaa6005151a4bfe40d4c9f3ea28dba5f719b2a7a": event_data = get_event_data(codec, self.kick_abi, event) return Flipper.KickLog(event_data) else: event_data = get_event_data(codec, self.log_note_abi, event) return LogNote(event_data)
def parse_event(self, event): signature = Web3.toHex(event['topics'][0]) codec = ABICodec(default_registry) if signature == "0xe6dde59cbc017becba89714a037778d234a84ce7f0a137487142a007e580d609": event_data = get_event_data(codec, self.kick_abi, event) return Flapper.KickLog(event_data) else: event_data = get_event_data(codec, self.log_note_abi, event) return LogNote(event_data)
def parse_event(self, event): signature = Web3.toHex(event['topics'][0]) codec = ABICodec(default_registry) if signature == "0x9102bd0b66dcb83f469f1122a583dc797657b114141460c59230fc1b41f48229": event_data = get_event_data(codec, self.start_auction_abi, event) return DebtAuctionHouse.StartAuctionLog(event_data) elif signature == "0x8c63feacc784a7f735e454365ba433f17d17293b02c57d98dad113977dbf0f13": event_data = get_event_data(codec, self.decrease_sold_amount_abi, event) return DebtAuctionHouse.DecreaseSoldAmountLog(event_data) elif signature == "0xef063949eb6ef5abef19139d9c75a558424ffa759302cfe445f8d2d327376fe4": event_data = get_event_data(codec, self.settle_auction_abi, event) return DebtAuctionHouse.SettleAuctionLog(event_data)
def populate_volume(infos: List[ExchangeInfo]) -> List[ExchangeInfo]: for info in infos: volume = list() info.volume = list() exchange = web3.eth.contract(abi=UNISWAP_EXCHANGE_ABI, address=info.exchange_address) i = 0 total_trade_volume = defaultdict(int) for block_number in get_chart_range(): trade_volume = defaultdict(int) while i < len( info.logs) and info.logs[i]['blockNumber'] < block_number: log = info.logs[i] i += 1 topic = log['topics'][0].hex() if topic == EVENT_ETH_PURCHASE: event = get_event_data( web3.codec, exchange.events.EthPurchase._get_event_abi(), log) trade_volume[event['args'][ 'buyer']] += event['args']['eth_bought'] / 0.997 total_trade_volume[event['args'][ 'buyer']] += event['args']['eth_bought'] / 0.997 elif topic == EVENT_TOKEN_PURCHASE: event = get_event_data( web3.codec, exchange.events.TokenPurchase._get_event_abi(), log) trade_volume[event['args'] ['buyer']] += event['args']['eth_sold'] total_trade_volume[event['args'] ['buyer']] += event['args']['eth_sold'] volume.append(trade_volume) total_volume = sum(total_trade_volume.values()) valuable_traders = { t for (t, v) in total_trade_volume.items() if v > total_volume / 1000 } info.valuable_traders = list(valuable_traders) for vol in volume: filtered_vol = defaultdict(int) for (t, v) in vol.items(): if t in valuable_traders: filtered_vol[t] = v else: filtered_vol['Other'] += v info.volume.append(filtered_vol) logging.info('Volumes of {} exchanges populated'.format(len(infos))) return infos
def __init__(self, receipt): self.raw_receipt = receipt self.transaction_hash = receipt['transactionHash'] self.gas_used = receipt['gasUsed'] self.transfers = [] self.result = None receipt_logs = receipt['logs'] if (receipt_logs is not None) and (len(receipt_logs) > 0): self.successful = True for receipt_log in receipt_logs: if len(receipt_log['topics']) > 0: # $ seth keccak $(seth --from-ascii "Transfer(address,address,uint256)") # 0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef if receipt_log['topics'][0] == HexBytes('0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef'): from pymaker.token import ERC20Token transfer_abi = [abi for abi in ERC20Token.abi if abi.get('name') == 'Transfer'][0] codec = ABICodec(default_registry) event_data = get_event_data(codec, transfer_abi, receipt_log) self.transfers.append(Transfer(token_address=Address(event_data['address']), from_address=Address(event_data['args']['from']), to_address=Address(event_data['args']['to']), value=Wad(event_data['args']['value']))) # $ seth keccak $(seth --from-ascii "Mint(address,uint256)") # 0x0f6798a560793a54c3bcfe86a93cde1e73087d944c0ea20544137d4121396885 if receipt_log['topics'][0] == HexBytes('0x0f6798a560793a54c3bcfe86a93cde1e73087d944c0ea20544137d4121396885'): from pymaker.token import DSToken transfer_abi = [abi for abi in DSToken.abi if abi.get('name') == 'Mint'][0] codec = ABICodec(default_registry) event_data = get_event_data(codec, transfer_abi, receipt_log) self.transfers.append(Transfer(token_address=Address(event_data['address']), from_address=Address('0x0000000000000000000000000000000000000000'), to_address=Address(event_data['args']['guy']), value=Wad(event_data['args']['wad']))) # $ seth keccak $(seth --from-ascii "Burn(address,uint256)") # 0xcc16f5dbb4873280815c1ee09dbd06736cffcc184412cf7a71a0fdb75d397ca5 if receipt_log['topics'][0] == HexBytes('0xcc16f5dbb4873280815c1ee09dbd06736cffcc184412cf7a71a0fdb75d397ca5'): from pymaker.token import DSToken transfer_abi = [abi for abi in DSToken.abi if abi.get('name') == 'Burn'][0] codec = ABICodec(default_registry) event_data = get_event_data(codec, transfer_abi, receipt_log) self.transfers.append(Transfer(token_address=Address(event_data['address']), from_address=Address(event_data['args']['guy']), to_address=Address('0x0000000000000000000000000000000000000000'), value=Wad(event_data['args']['wad']))) else: self.successful = False
def parse_event(self, event): signature = Web3.toHex(event['topics'][0]) codec = ABICodec(default_registry) if signature == "0x7c5bfdc0a5e8192f6cd4972f382cec69116862fb62e6abff8003874c58e064b8": event_data = get_event_data(codec, self.kick_abi, event) return Clipper.KickLog(event_data) elif signature == "0x05e309fd6ce72f2ab888a20056bb4210df08daed86f21f95053deb19964d86b1": event_data = get_event_data(codec, self.take_abi, event) self._get_sender_for_eventlog(event_data) return Clipper.TakeLog(event_data, self._get_sender_for_eventlog(event_data)) elif signature == "0x275de7ecdd375b5e8049319f8b350686131c219dd4dc450a08e9cf83b03c865f": event_data = get_event_data(codec, self.redo_abi, event) return Clipper.RedoLog(event_data) else: logger.debug(f"Found event signature {signature}")
def parse_event(self, event): signature = Web3.toHex(event['topics'][0]) codec = ABICodec(default_registry) if signature == "0xdf7b5cd0ee6547c7389d2ac00ee0c1cd3439542399d6c8c520cc69c7409c0990": event_data = get_event_data(codec, self.start_auction_abi, event) return FixedDiscountCollateralAuctionHouse.StartAuctionLog( event_data) elif signature == "0xa4a1133e32fac37643a1fe1db4631daadb462c8662ae16004e67f0b8bb608383": event_data = get_event_data(codec, self.buy_collateral_abi, event) return FixedDiscountCollateralAuctionHouse.BuyCollateralLog( event_data) elif signature == "0xef063949eb6ef5abef19139d9c75a558424ffa759302cfe445f8d2d327376fe4": event_data = get_event_data(codec, self.settle_auction_abi, event) return FixedDiscountCollateralAuctionHouse.SettleAuctionLog( event_data)
def parse_event(self, event): signature = Web3.toHex(event['topics'][0]) codec = ABICodec(default_registry) if signature == "0xa4863af70e77aecfe2769e0569806782ba7c6f86fc9a307290a3816fb8a563e5": event_data = get_event_data(codec, self.start_auction_abi, event) return PreSettlementSurplusAuctionHouse.StartAuctionLog(event_data) elif signature == "0xd87c815d5a67c2e130ad04b714d87a6fb69d5a6df0dbb0f1639cd9fe292201f9": event_data = get_event_data(codec, self.increase_bid_size_abi, event) return PreSettlementSurplusAuctionHouse.IncreaseBidSizeLog( event_data) elif signature == "0x03af424b0e12d91ea31fe7f2c199fc02c9ede38f9aa1bdc019a8087b41445f7a": event_data = get_event_data(codec, self.settle_auction_abi, event) return PreSettlementSurplusAuctionHouse.SettleAuctionLog( event_data)
def _parse_logs(self, txn_receipt): for log in txn_receipt['logs']: try: decoded_log = get_event_data(self.abi, log) except MismatchedABI: continue yield decoded_log
def test_event_data_extraction(web3, emitter, wait_for_transaction, emitter_log_topics, emitter_event_ids, contract_fn, event_name, call_args, expected_args): emitter_fn = emitter.functions[contract_fn] event_id = getattr(emitter_event_ids, event_name) txn_hash = emitter_fn(event_id, *call_args).transact() txn_receipt = wait_for_transaction(web3, txn_hash) assert len(txn_receipt['logs']) == 1 log_entry = txn_receipt['logs'][0] event_abi = emitter._find_matching_event_abi(event_name) event_topic = getattr(emitter_log_topics, event_name) is_anonymous = event_abi['anonymous'] if is_anonymous: assert event_topic not in log_entry['topics'] else: assert event_topic in log_entry['topics'] event_data = get_event_data(event_abi, log_entry) assert event_data['args'] == expected_args assert event_data['blockHash'] == txn_receipt['blockHash'] assert event_data['blockNumber'] == txn_receipt['blockNumber'] assert event_data['transactionIndex'] == txn_receipt['transactionIndex'] assert is_same_address(event_data['address'], emitter.address) assert event_data['event'] == event_name
def test_event_data_extraction_bytes(web3, emitter, wait_for_transaction, emitter_log_topics, emitter_event_ids, call_args, expected_args): emitter_fn = emitter.functions.logListArgs txn_hash = emitter_fn(*call_args).transact() txn_receipt = wait_for_transaction(web3, txn_hash) assert len(txn_receipt['logs']) == 1 log_entry = txn_receipt['logs'][0] event_name = 'LogListArgs' event_abi = emitter._find_matching_event_abi(event_name) event_topic = getattr(emitter_log_topics, event_name) assert event_topic in log_entry['topics'] event_data = get_event_data(web3.codec, event_abi, log_entry) assert event_data['args'] == expected_args assert event_data['blockHash'] == txn_receipt['blockHash'] assert event_data['blockNumber'] == txn_receipt['blockNumber'] assert event_data['transactionIndex'] == txn_receipt['transactionIndex'] assert is_same_address(event_data['address'], emitter.address) assert event_data['event'] == event_name
def test_event_data_extraction_bytes_with_warning(web3, emitter, wait_for_transaction, emitter_log_topics): with pytest.warns( DeprecationWarning, match= 'in v6 it will be invalid to pass a hex string without the "0x" prefix' ): txn_hash = emitter.functions.logListArgs(['13'], ['54']).transact() txn_receipt = wait_for_transaction(web3, txn_hash) assert len(txn_receipt['logs']) == 1 log_entry = txn_receipt['logs'][0] event_name = 'LogListArgs' event_abi = emitter._find_matching_event_abi(event_name) event_topic = getattr(emitter_log_topics, event_name) assert event_topic in log_entry['topics'] event_data = get_event_data(web3.codec, event_abi, log_entry) expected_args = { 'arg0': b']\x0b\xf6sp\xbe\xa2L\xa9is\xe4\xab\xb7\xfa+nVJpgt\xa7\x8f:\xa4\x9f\xdb\x93\xf0\x8f\xae', # noqa: E501 'arg1': [b'T\x00'] } assert event_data['args'] == expected_args assert event_data['blockHash'] == txn_receipt['blockHash'] assert event_data['blockNumber'] == txn_receipt['blockNumber'] assert event_data['transactionIndex'] == txn_receipt[ 'transactionIndex'] assert is_same_address(event_data['address'], emitter.address) assert event_data['event'] == event_name
def test_dynamic_length_argument_extraction(web3, emitter, wait_for_transaction, emitter_log_topics, emitter_event_ids): string_0 = "this-is-the-first-string-which-exceeds-32-bytes-in-length" string_1 = "this-is-the-second-string-which-exceeds-32-bytes-in-length" txn_hash = emitter.functions.logDynamicArgs(string_0, string_1).transact() txn_receipt = wait_for_transaction(web3, txn_hash) assert len(txn_receipt['logs']) == 1 log_entry = txn_receipt['logs'][0] event_abi = emitter._find_matching_event_abi('LogDynamicArgs') event_topic = emitter_log_topics.LogDynamicArgs assert event_topic in log_entry['topics'] string_0_topic = web3.keccak(text=string_0) assert string_0_topic in log_entry['topics'] event_data = get_event_data(web3.codec, event_abi, log_entry) expected_args = { "arg0": string_0_topic, "arg1": string_1, } assert event_data['args'] == expected_args assert event_data['blockHash'] == txn_receipt['blockHash'] assert event_data['blockNumber'] == txn_receipt['blockNumber'] assert event_data['transactionIndex'] == txn_receipt['transactionIndex'] assert is_same_address(event_data['address'], emitter.address) assert event_data['event'] == 'LogDynamicArgs'
def test_event_data_extraction(web3, emitter, wait_for_transaction, emitter_log_topics, emitter_event_ids, contract_fn, event_name, call_args, expected_args): emitter_fn = emitter.functions[contract_fn] event_id = getattr(emitter_event_ids, event_name) txn_hash = emitter_fn(event_id, *call_args).transact() txn_receipt = wait_for_transaction(web3, txn_hash) assert len(txn_receipt['logs']) == 1 log_entry = txn_receipt['logs'][0] event_abi = emitter._find_matching_event_abi(event_name) event_topic = getattr(emitter_log_topics, event_name) is_anonymous = event_abi['anonymous'] if is_anonymous: assert event_topic not in log_entry['topics'] else: assert event_topic in log_entry['topics'] event_data = get_event_data(web3.codec, event_abi, log_entry) assert event_data['args'] == expected_args assert event_data['blockHash'] == txn_receipt['blockHash'] assert event_data['blockNumber'] == txn_receipt['blockNumber'] assert event_data['transactionIndex'] == txn_receipt['transactionIndex'] assert is_same_address(event_data['address'], emitter.address) assert event_data['event'] == event_name
def test_dynamic_length_argument_extraction(web3, emitter, wait_for_transaction, emitter_log_topics, emitter_event_ids): string_0 = "this-is-the-first-string-which-exceeds-32-bytes-in-length" string_1 = "this-is-the-second-string-which-exceeds-32-bytes-in-length" txn_hash = emitter.functions.logDynamicArgs(string_0, string_1).transact() txn_receipt = wait_for_transaction(web3, txn_hash) assert len(txn_receipt['logs']) == 1 log_entry = txn_receipt['logs'][0] event_abi = emitter._find_matching_event_abi('LogDynamicArgs') event_topic = emitter_log_topics.LogDynamicArgs assert event_topic in log_entry['topics'] string_0_topic = web3.keccak(text=string_0) assert string_0_topic in log_entry['topics'] event_data = get_event_data(event_abi, log_entry) expected_args = { "arg0": string_0_topic, "arg1": string_1, } assert event_data['args'] == expected_args assert event_data['blockHash'] == txn_receipt['blockHash'] assert event_data['blockNumber'] == txn_receipt['blockNumber'] assert event_data['transactionIndex'] == txn_receipt['transactionIndex'] assert is_same_address(event_data['address'], emitter.address) assert event_data['event'] == 'LogDynamicArgs'
def test_argument_extraction_strict_bytes_types(w3_strict_abi, strict_emitter, wait_for_transaction, emitter_log_topics): arg_0 = [b'12'] arg_1 = [b'12'] txn_hash = strict_emitter.functions.logListArgs(arg_0, arg_1).transact() txn_receipt = wait_for_transaction(w3_strict_abi, txn_hash) assert len(txn_receipt['logs']) == 1 log_entry = txn_receipt['logs'][0] assert len(log_entry['topics']) == 2 event_abi = strict_emitter._find_matching_event_abi('LogListArgs') event_topic = emitter_log_topics.LogListArgs assert event_topic in log_entry['topics'] encoded_arg_0 = w3_strict_abi.codec.encode_abi(['bytes2'], arg_0) padded_arg_0 = encoded_arg_0.ljust(32, b'\x00') arg_0_topic = w3_strict_abi.keccak(padded_arg_0) assert arg_0_topic in log_entry['topics'] event_data = get_event_data(w3_strict_abi.codec, event_abi, log_entry) expected_args = {"arg0": arg_0_topic, "arg1": arg_1} assert event_data['args'] == expected_args assert event_data['blockHash'] == txn_receipt['blockHash'] assert event_data['blockNumber'] == txn_receipt['blockNumber'] assert event_data['transactionIndex'] == txn_receipt['transactionIndex'] assert is_same_address(event_data['address'], strict_emitter.address) assert event_data['event'] == 'LogListArgs'
def populate_providers(infos: List[ExchangeInfo], saved_block: int) -> List[ExchangeInfo]: for info in infos: exchange = web3.eth.contract(abi=UNISWAP_EXCHANGE_ABI, address=info.exchange_address) for log in info.logs: if log['blockNumber'] < saved_block: continue if log['topics'][0].hex( ) != EVENT_TRANSFER or log['address'] != info.exchange_address: continue event = get_event_data(exchange.events.Transfer._get_event_abi(), log) if event['args'][ '_from'] == '0x0000000000000000000000000000000000000000': info.providers[event['args']['_to']] += event['args']['_value'] elif event['args'][ '_to'] == '0x0000000000000000000000000000000000000000': info.providers[event['args'] ['_from']] -= event['args']['_value'] owner = exchange.functions.owner().call( block_identifier=log['blockNumber']) info.providers[owner] = exchange.functions.balanceOf( owner).call(block_identifier=log['blockNumber']) else: info.providers[event['args'] ['_from']] -= event['args']['_value'] info.providers[event['args']['_to']] += event['args']['_value'] logging.info('Loaded info about providers of {} exchanges'.format( len(infos))) return infos
def build_filter(self): builder = EventFilterBuilder( self._get_event_abi(), self.web3.codec, formatter=get_event_data(self.web3.codec, self._get_event_abi())) builder.address = self.address return builder
def _parse_logs(self, txn_receipt, errors): try: errors.name except AttributeError: raise AttributeError(f'Error flag must be one of: {EventLogErrorFlags.flag_options()}') for log in txn_receipt['logs']: try: rich_log = get_event_data(self.web3.codec, self.abi, log) except (MismatchedABI, LogTopicError, InvalidEventABI, TypeError) as e: if errors == DISCARD: continue elif errors == IGNORE: new_log = MutableAttributeDict(log) new_log['errors'] = e rich_log = AttributeDict(new_log) elif errors == STRICT: raise e else: warnings.warn( f'The log with transaction hash: {log.transactionHash} and ' f'logIndex: {log.logIndex} encountered the following error ' f'during processing: {type(e).__name__}({e}). It has been discarded.' ) continue yield rich_log
def _fetch_events_for_all_contracts(web3, event, argument_filters: dict, from_block: int, to_block: int) -> Iterable: """Get events using eth_getLoggers API. This method is detached from any contract instance. This is a stateless method, as opposed to createFilter. It can be safely called against nodes which do not provide `eth_newFilter` API, like Infura. """ if from_block is None: raise TypeError( "Missing mandatory keyword argument to getLoggers: fromBlock") # Currently no way to poke this using a public Web3.py API. # This will return raw underlying ABI JSON object for the event abi = event._get_event_abi() # Depending on the Solidity version used to compile # the contract that uses the ABI, # it might have Solidity ABI encoding v1 or v2. # We just assume the default that you set on Web3 object here. # More information here https://eth-abi.readthedocs.io/en/latest/index.html codec: ABICodec = web3.codec # Here we need to poke a bit into Web3 internals, as this # functionality is not exposed by default. # Construct JSON-RPC raw filter presentation based on human readable Python descriptions # Namely, convert event names to their keccak signatures # More information here: # https://github.com/ethereum/web3.py/blob/e176ce0793dafdd0573acc8d4b76425b6eb604ca/web3/_utils/filters.py#L71 data_filter_set, event_filter_params = construct_event_filter_params( abi, codec, address=argument_filters.get("address"), argument_filters=argument_filters, fromBlock=from_block, toBlock=to_block, ) logger.debug("Querying eth_getLoggers with the following parameters: %s", event_filter_params) # Call JSON-RPC API on your Ethereum node. # get_logs() returns raw AttributedDict entries logs = web3.eth.getLogs(event_filter_params) # Convert raw binary data to Python proxy objects as described by ABI all_events = [] for log in logs: # Convert raw JSON-RPC log result to human readable event by using ABI data # More information how processLog works here # https://github.com/ethereum/web3.py/blob/fbaf1ad11b0c7fac09ba34baff2c256cffe0a148/web3/_utils/events.py#L200 evt = get_event_data(codec, abi, log) # Note: This was originally yield, # but deferring the timeout exception caused the throttle logic not to work all_events.append(evt) return all_events
def from_event(cls, event: dict): assert(isinstance(event, dict)) topics = event.get('topics') if topics and topics[0] == HexBytes('0x0bcc4c97732e47d9946f229edb95f5b6323f601300e4690de719993f3c371129'): log_fill_abi = [abi for abi in ZrxExchange.abi if abi.get('name') == 'LogFill'][0] event_data = get_event_data(log_fill_abi, event) return LogFill(event_data)
def from_event(cls, event: dict): assert(isinstance(event, dict)) topics = event.get('topics') if topics and topics[0] == HexBytes('0x3383e3357c77fd2e3a4b30deea81179bc70a795d053d14d5b7f2f01d0fd4596f'): log_take_abi = [abi for abi in SimpleMarket.abi if abi.get('name') == 'LogTake'][0] event_data = get_event_data(log_take_abi, event) return LogTake(event_data)
def createFilter( self, *, # PEP 3102 argument_filters=None, fromBlock=None, toBlock="latest", address=None, topics=None): """ Create filter object that tracks logs emitted by this contract event. :param filter_params: other parameters to limit the events """ if fromBlock is None: raise TypeError("Missing mandatory keyword argument to createFilter: fromBlock") if argument_filters is None: argument_filters = dict() _filters = dict(**argument_filters) event_abi = self._get_event_abi() check_for_forbidden_api_filter_arguments(event_abi, _filters) _, event_filter_params = construct_event_filter_params( self._get_event_abi(), self.web3.codec, contract_address=self.address, argument_filters=_filters, fromBlock=fromBlock, toBlock=toBlock, address=address, topics=topics, ) filter_builder = EventFilterBuilder(event_abi, self.web3.codec) filter_builder.address = event_filter_params.get('address') filter_builder.fromBlock = event_filter_params.get('fromBlock') filter_builder.toBlock = event_filter_params.get('toBlock') match_any_vals = { arg: value for arg, value in _filters.items() if not is_array_type(filter_builder.args[arg].arg_type) and is_list_like(value) } for arg, value in match_any_vals.items(): filter_builder.args[arg].match_any(*value) match_single_vals = { arg: value for arg, value in _filters.items() if not is_array_type(filter_builder.args[arg].arg_type) and not is_list_like(value) } for arg, value in match_single_vals.items(): filter_builder.args[arg].match_single(value) log_filter = filter_builder.deploy(self.web3) log_filter.log_entry_formatter = get_event_data(self.web3.codec, self._get_event_abi()) log_filter.builder = filter_builder return log_filter
def parse_event(self, event): signature = Web3.toHex(event['topics'][0]) codec = ABICodec(default_registry) if signature == "0xdf7b5cd0ee6547c7389d2ac00ee0c1cd3439542399d6c8c520cc69c7409c0990": event_data = get_event_data(codec, self.start_auction_abi, event) return EnglishCollateralAuctionHouse.StartAuctionLog(event_data) elif signature == "0xd87c815d5a67c2e130ad04b714d87a6fb69d5a6df0dbb0f1639cd9fe292201f9": event_data = get_event_data(codec, self.increase_bid_size_abi, event) return EnglishCollateralAuctionHouse.IncreaseBidSizeLog(event_data) elif signature == "0x8c63feacc784a7f735e454365ba433f17d17293b02c57d98dad113977dbf0f13": event_data = get_event_data(codec, self.decrease_sold_amount_abi, event) return EnglishCollateralAuctionHouse.DecreaseSoldAmountLog( event_data) elif signature == "0x03af424b0e12d91ea31fe7f2c199fc02c9ede38f9aa1bdc019a8087b41445f7a": event_data = get_event_data(codec, self.settle_auction_abi, event) return EnglishCollateralAuctionHouse.SettleAuctionLog(event_data)
def decode_log_with_fallback(abis_to_try, log): for abi in abis_to_try: try: log_with_replaced_topic = deepcopy(log) log_with_replaced_topic['topics'][0] = event_abi_to_log_topic(abi) return get_event_data(w3.codec, abi, log_with_replaced_topic) except DecodingError: logger.debug('trying fallback log decoder') raise DecodingError('could not decode log')
def createFilter( self, *, # PEP 3102 argument_filters=None, fromBlock=None, toBlock="latest", address=None, topics=None): """ Create filter object that tracks logs emitted by this contract event. :param filter_params: other parameters to limit the events """ if fromBlock is None: raise TypeError("Missing mandatory keyword argument to createFilter: fromBlock") if argument_filters is None: argument_filters = dict() _filters = dict(**argument_filters) event_abi = self._get_event_abi() check_for_forbidden_api_filter_arguments(event_abi, _filters) _, event_filter_params = construct_event_filter_params( self._get_event_abi(), contract_address=self.address, argument_filters=_filters, fromBlock=fromBlock, toBlock=toBlock, address=address, topics=topics, ) filter_builder = EventFilterBuilder(event_abi) filter_builder.address = event_filter_params.get('address') filter_builder.fromBlock = event_filter_params.get('fromBlock') filter_builder.toBlock = event_filter_params.get('toBlock') match_any_vals = { arg: value for arg, value in _filters.items() if not is_array_type(filter_builder.args[arg].arg_type) and is_list_like(value) } for arg, value in match_any_vals.items(): filter_builder.args[arg].match_any(*value) match_single_vals = { arg: value for arg, value in _filters.items() if not is_array_type(filter_builder.args[arg].arg_type) and not is_list_like(value) } for arg, value in match_single_vals.items(): filter_builder.args[arg].match_single(value) log_filter = filter_builder.deploy(self.web3) log_filter.log_entry_formatter = get_event_data(self._get_event_abi()) log_filter.builder = filter_builder return log_filter
def prepare(tx): tx = get_event_data(ABI, tx) return dict( network='ethereum', direction='in' if tx.args['from'] == ZERO_ADDRESS else 'out', tx=encode_hex(tx.transactionHash), value=Decimal(tx.args.value) / 10**8, block=tx.blockNumber, ts=timestamp(tx.blockNumber), )
def from_receipt(cls, receipt: Receipt): assert(isinstance(receipt, Receipt)) if receipt.logs is not None: for log in receipt.logs: if len(log['topics']) > 0 and log['topics'][0] == HexBytes('0x773ff502687307abfa024ac9f62f9752a0d210dac2ffd9a29e38e12e2ea82c82'): log_make_abi = [abi for abi in SimpleMarket.abi if abi.get('name') == 'LogMake'][0] event_data = get_event_data(log_make_abi, log) yield LogMake(event_data)
def __decode_event(self, log, abi): if isinstance(log["topics"][0], str): log["topics"][0] = decode_hex(log["topics"][0]) elif isinstance(log["topics"][0], int): log["topics"][0] = decode_hex(hex(log["topics"][0])) event_id = log["topics"][0] events = filter_by_type("event", abi) topic_to_event_abi = {event_abi_to_log_topic(event_abi): event_abi for event_abi in events} event_abi = topic_to_event_abi[event_id] return get_event_data(event_abi, log)
def process_transaction(web3, tx_hash, block_gauge, token_flow_counter, fees_counter): tx = web3.eth.getTransaction(tx_hash) tx_logs = web3.eth.getTransactionReceipt(tx_hash).logs block_number = tx.blockNumber block_timestamp = web3.eth.get_block(block_number)["timestamp"] erc20_abi = json.load(open("interfaces/ERC20.json", "r")) erc20 = web3.eth.contract(abi=erc20_abi) erc20_transfer_abi = erc20.events.Transfer._get_event_abi() tokens = { transfer: 0 for transfer in [ "ren_minted", "ren_received", "ren_bought", "ren_burned", "ren_sent", "wbtc_received", "wbtc_sent", "fee_badger", "fee_renvm", ] } for log in tx_logs: try: event_data = get_event_data(web3.codec, erc20_transfer_abi, log) tokens = update_tokens(tx_hash, event_data, tokens) except MismatchedABI: # not ERC20 transfer, so skip continue balances = calc_balances(tokens) # update counters block_gauge.labels("block_number").set(block_number) block_gauge.labels("block_timestamp").set(block_timestamp) token_flow_counter.labels("BTC", "mint", "in").inc(balances["btc_in"]) token_flow_counter.labels("BTC", "burn", "out").inc(balances["btc_out"]) token_flow_counter.labels("WBTC", "burn", "in").inc(balances["wbtc_received"]) token_flow_counter.labels("WBTC", "mint", "out").inc(balances["wbtc_sent"]) token_flow_counter.labels("renBTC", "burn", "in").inc(balances["ren_received"]) token_flow_counter.labels("renBTC", "mint", "out").inc(balances["ren_sent"]) fees_counter.labels("Badger DAO").inc(balances["fee_badger_dao"]) fees_counter.labels("Badger Bridge Team").inc(balances["fee_badger_bridge"]) fees_counter.labels("RenVM Darknodes").inc(balances["fee_darknodes"]) logger.info( f"Processed event: block timestamp {block_timestamp} block number {block_number}, hash {tx_hash}" ) return tokens, balances
def from_event(cls, event: AttributeDict, contract_abi: list): assert isinstance(event, AttributeDict) assert isinstance(contract_abi, list) log_note_abi = [abi for abi in contract_abi if abi.get('name') == 'LogNote'][0] try: codec = ABICodec(default_registry) event_data = get_event_data(codec, log_note_abi, event) return LogNote(event_data) except ValueError: # event is not a LogNote return None
def from_event(cls, event: dict): assert isinstance(event, dict) topics = event.get('topics') if topics and topics[0] == HexBytes('0x99b5620489b6ef926d4518936cfec15d305452712b88bd59da2d9c10fb0953e8'): log_bite_abi = [abi for abi in Cat.abi if abi.get('name') == 'Bite'][0] codec = ABICodec(default_registry) event_data = get_event_data(codec, log_bite_abi, event) return Cat.LogBite(event_data) else: logging.warning(f'[from_event] Invalid topic in {event}')
def build_filter(self): builder = EventFilterBuilder( self._get_event_abi(), formatter=get_event_data(self._get_event_abi())) builder.address = self.address return builder