def find_double_fees(withdrawals, fees, blacklist=None): assert len(withdrawals) and len(fees), "no events loaded" print(f"processing {len(withdrawals)=} and {len(fees)=}") overlap = {log["transactionHash"] for log in withdrawals } & {log["transactionHash"] for log in fees} print(f"{len(overlap)=}") withdrawals = decode_logs( [log for log in withdrawals if log["transactionHash"] in overlap]) fees = decode_logs( [log for log in fees if log["transactionHash"] in overlap]) tx_to_refund = { event.transaction_hash: tuple(event.values())[2] for event in fees } refunds = Counter() for item in withdrawals: tx = item.transaction_hash is_contract = bool(web3.eth.get_code(tuple(item.values())[0])) user = chain.get_transaction(tx).sender if is_contract else tuple( item.values())[0] if blacklist and user in blacklist: continue refunds[user] += tx_to_refund[item.transaction_hash] return dict(refunds.most_common())
def watch_events(self): # TODO keep fresh in background # fetch all registries and factories from address provider log_filter = create_filter(str(self.addres_provider)) for event in decode_logs(log_filter.get_new_entries()): if event.name == 'NewAddressIdentifier': self.identifiers[event['id']].append(event['addr']) if event.name == 'AddressModified': self.identifiers[event['id']].append(event['new_address']) # fetch pools from the latest registry log_filter = create_filter(str(self.registry)) for event in decode_logs(log_filter.get_new_entries()): if event.name == 'PoolAdded': self.pools.add(event['pool']) logger.info(f'loaded {len(self.pools)} pools')
def watch_events(self): start = time.time() self.log_filter = create_filter(self.addresses) for block in chain.new_blocks(height_buffer=12): logs = self.log_filter.get_new_entries() self.process_events(decode_logs(logs)) if not self._done.is_set(): self._done.set() logger.info("loaded v2 registry in %.3fs", time.time() - start) time.sleep(300)
def load_feeds(self): logs = decode_logs( get_logs_asap(str(self.registry), [self.registry.topics['FeedConfirmed']])) self.feeds = { log['asset']: log['latestAggregator'] for log in logs if log['denomination'] == DENOMINATIONS['USD'] } self.feeds.update(ADDITIONAL_FEEDS) logger.info(f'loaded {len(self.feeds)} feeds')
def load_from_ens(self): # track older registries to pull experiments resolver = contract('0x4976fb03C32e5B8cfe2b6cCB31c09Ba78EBaBa41') topics = construct_event_topic_set( filter_by_name('AddressChanged', resolver.abi)[0], web3.codec, {'node': web3.ens.namehash('v2.registry.ychad.eth')}, ) events = decode_logs(get_logs_asap(str(resolver), topics)) logger.info('loaded %d registry versions', len(events)) return [Contract(event['newAddress']) for event in events]
def watch_events(self): start = time.time() self.log_filter = create_filter(str(self.vault), topics=self._topics) for block in chain.new_blocks(height_buffer=12): logs = self.log_filter.get_new_entries() events = decode_logs(logs) self.process_events(events) if not self._done.is_set(): self._done.set() logger.info("loaded %d strategies %s in %.3fs", len(self._strategies), self.name, time.time() - start) time.sleep(300)
def get_token_transfers(token, start_block, end_block) -> pd.DataFrame: topics = construct_event_topic_set( filter_by_name('Transfer', token.abi)[0], web3.codec, ) postgres.cache_token(token.address) decimals = contract(token.address).decimals() events = decode_logs( get_logs_asap(token.address, topics, from_block=start_block, to_block=end_block)) return pd.DataFrame( Parallel(1, 'threading')( delayed(_process_transfer_event)(event, token, decimals) for event in events))
def get_protocol_fees(address): """ Get all protocol fee payouts for a given vault. Fees can be found as vault share transfers to the rewards address. """ vault = Vault.from_address(address) rewards = vault.vault.rewards() topics = construct_event_topic_set( filter_by_name('Transfer', vault.vault.abi)[0], web3.codec, { 'sender': address, 'receiver': rewards }, ) logs = decode_logs(get_logs_asap(address, topics)) return {log.block_number: log['value'] / vault.scale for log in logs}
def process_transfers(self, logs): for log in logs: try: event = decode_logs( [log] ) # NOTE: We have to decode logs here because NFTs prevent us from batch decoding logs self._transfers.append(event) except: if log.address in [ # These are NFTs # TODO '0x57f1887a8BF19b14fC0dF6Fd9B2acc9Af147eA85', # ENS domains '0x01234567bac6fF94d7E4f0EE23119CF848F93245', # EthBlocks '0xD7aBCFd05a9ba3ACbc164624402fB2E95eC41be6', # EthJuanchos '0xeF81c2C98cb9718003A89908e6bd1a5fA8A098A3', # SpaceShiba '0xD1E5b0FF1287aA9f9A268759062E4Ab08b9Dacbe', # .crypto Domain '0x437a6B880d4b3Be9ed93BD66D6B7f872fc0f5b5E', # Soda ]: pass else: print('unable to decode logs, figure out why') print(log)
def unwrap(self) -> List[Wrapper]: registry = Registry() wrappers = [self.wrapper] if isinstance(self.wrapper, str) else self.wrapper topics = construct_event_topic_set( filter_by_name('Transfer', registry.vaults[0].vault.abi)[0], web3.codec, {'receiver': wrappers}, ) addresses = [str(vault.vault) for vault in registry.vaults] from_block = min(ThreadPoolExecutor().map(contract_creation_block, addresses)) # wrapper -> {vaults} deposits = defaultdict(set) for log in decode_logs(get_logs_asap(addresses, topics, from_block)): deposits[log['receiver']].add(log.address) return [ Wrapper(name=vault.name, vault=str(vault.vault), wrapper=wrapper) for wrapper in wrappers for vault in registry.vaults if str(vault.vault) in deposits[wrapper] ]