def export_blocks_and_transactions(start_block, end_block, batch_size, provider_uri, max_workers, blocks_output, transactions_output, chain='ethereum'): """Exports blocks and transactions.""" provider_uri = check_classic_provider_uri(chain, provider_uri) if blocks_output is None and transactions_output is None: raise ValueError( 'Either --blocks-output or --transactions-output options must be provided' ) job = ExportBlocksJob( start_block=start_block, end_block=end_block, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_provider_from_uri(provider_uri, batch=True)), max_workers=max_workers, item_exporter=blocks_and_transactions_item_exporter( blocks_output, transactions_output), export_blocks=blocks_output is not None, export_transactions=transactions_output is not None) job.run()
def test_export_blocks_job(tmpdir, start_block, end_block, batch_size, resource_group, web3_provider_type): blocks_output_file = str(tmpdir.join('actual_blocks.csv')) transactions_output_file = str(tmpdir.join('actual_transactions.csv')) job = ExportBlocksJob( start_block=start_block, end_block=end_block, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy(lambda: get_web3_provider( web3_provider_type, lambda file: read_resource(resource_group, file), batch=True)), max_workers=5, item_exporter=blocks_and_transactions_item_exporter( blocks_output_file, transactions_output_file), export_blocks=blocks_output_file is not None, export_transactions=transactions_output_file is not None) job.run() compare_lines_ignore_order( read_resource(resource_group, 'expected_blocks.csv'), read_file(blocks_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_transactions.csv'), read_file(transactions_output_file))
def export_traces(start_block, end_block, batch_size, output, max_workers, provider_uri, genesis_traces, daofork_traces, timeout=60, chain='ethereum'): """Exports traces from parity node.""" if chain == 'classic' and daofork_traces == True: raise ValueError( 'Classic chain does not include daofork traces. Disable daofork traces with --no-daofork-traces option.' ) job = ExportTracesJob( start_block=start_block, end_block=end_block, batch_size=batch_size, web3=ThreadLocalProxy(lambda: Web3( get_provider_from_uri(provider_uri, timeout=timeout))), item_exporter=traces_item_exporter(output), max_workers=max_workers, include_genesis_traces=genesis_traces, include_daofork_traces=daofork_traces) job.run()
def _extract_tokens(self, contracts): exporter = InMemoryItemExporter(item_types=['token']) job = ExtractTokensJob( contracts_iterable=contracts, web3=ThreadLocalProxy(lambda: Web3(self.batch_web3_provider)), max_workers=self.max_workers, item_exporter=exporter) job.run() tokens = exporter.get_items('token') return tokens
def _export_traces(self, start_block, end_block): exporter = InMemoryItemExporter(item_types=['trace']) job = ExportTracesJob( start_block=start_block, end_block=end_block, batch_size=self.batch_size, web3=ThreadLocalProxy(lambda: Web3(self.batch_web3_provider)), max_workers=self.max_workers, item_exporter=exporter) job.run() traces = exporter.get_items('trace') return traces
def export_token_transfers(start_block, end_block, batch_size, output, max_workers, provider_uri, tokens): """Exports ERC20/ERC721 transfers.""" job = ExportTokenTransfersJob( start_block=start_block, end_block=end_block, batch_size=batch_size, web3=ThreadLocalProxy( lambda: Web3(get_provider_from_uri(provider_uri))), item_exporter=token_transfers_item_exporter(output), max_workers=max_workers, tokens=tokens) job.run()
def export_geth_traces(start_block, end_block, batch_size, output, max_workers, provider_uri): """Exports traces from geth node.""" job = ExportGethTracesJob( start_block=start_block, end_block=end_block, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_provider_from_uri(provider_uri, batch=True)), max_workers=max_workers, item_exporter=geth_traces_item_exporter(output)) job.run()
def export_contracts(batch_size, contract_addresses, output, max_workers, provider_uri, chain='ethereum'): """Exports contracts bytecode and sighashes.""" check_classic_provider_uri(chain, provider_uri) with smart_open(contract_addresses, 'r') as contract_addresses_file: contract_addresses = (contract_address.strip() for contract_address in contract_addresses_file if contract_address.strip()) job = ExportContractsJob( contract_addresses_iterable=contract_addresses, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=True)), item_exporter=contracts_item_exporter(output), max_workers=max_workers) job.run()
def test_export_tokens_job(tmpdir, token_addresses, resource_group, web3_provider_type): output_file = str(tmpdir.join('tokens.csv')) job = ExportTokensJob( token_addresses_iterable=token_addresses, web3=ThreadLocalProxy(lambda: Web3( get_web3_provider(web3_provider_type, lambda file: read_resource( resource_group, file)))), item_exporter=tokens_item_exporter(output_file), max_workers=5) job.run() compare_lines_ignore_order( read_resource(resource_group, 'expected_tokens.csv'), read_file(output_file))
def extract_tokens(contracts, provider_uri, output, max_workers): """Extracts tokens from contracts file.""" set_max_field_size_limit() with smart_open(contracts, 'r') as contracts_file: if contracts.endswith('.json'): contracts_iterable = (json.loads(line) for line in contracts_file) else: contracts_iterable = csv.DictReader(contracts_file) job = ExtractTokensJob( contracts_iterable=contracts_iterable, web3=ThreadLocalProxy(lambda: Web3(get_provider_from_uri(provider_uri))), max_workers=max_workers, item_exporter=tokens_item_exporter(output)) job.run()
def export_tokens(token_addresses, output, max_workers, provider_uri, chain='ethereum'): """Exports ERC20/ERC721 tokens.""" provider_uri = check_classic_provider_uri(chain, provider_uri) with smart_open(token_addresses, 'r') as token_addresses_file: job = ExportTokensJob( token_addresses_iterable=( token_address.strip() for token_address in token_addresses_file), web3=ThreadLocalProxy( lambda: Web3(get_provider_from_uri(provider_uri))), item_exporter=tokens_item_exporter(output), max_workers=max_workers) job.run()
def test_export_traces_job(tmpdir, start_block, end_block, resource_group, web3_provider_type): traces_output_file = str(tmpdir.join('actual_traces.csv')) job = ExportTracesJob( start_block=start_block, end_block=end_block, batch_size=1, web3=ThreadLocalProxy( lambda: Web3(get_web3_provider(web3_provider_type, lambda file: read_resource(resource_group, file))) ), max_workers=5, item_exporter=traces_item_exporter(traces_output_file), ) job.run() print('=====================') print(read_file(traces_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_traces.csv'), read_file(traces_output_file) )
def test_export_token_transfers_job(tmpdir, start_block, end_block, batch_size, resource_group, web3_provider_type): output_file = str(tmpdir.join('token_transfers.csv')) job = ExportTokenTransfersJob( start_block=start_block, end_block=end_block, batch_size=batch_size, web3=ThreadLocalProxy(lambda: Web3( get_web3_provider(web3_provider_type, lambda file: read_resource( resource_group, file)))), item_exporter=token_transfers_item_exporter(output_file), max_workers=5) job.run() compare_lines_ignore_order( read_resource(resource_group, 'expected_token_transfers.csv'), read_file(output_file))
def test_export_geth_traces_job(tmpdir, start_block, end_block, resource_group, web3_provider_type): traces_output_file = str(tmpdir.join('actual_geth_traces.json')) job = ExportGethTracesJob( start_block=start_block, end_block=end_block, batch_size=1, batch_web3_provider=ThreadLocalProxy(lambda: get_web3_provider( web3_provider_type, lambda file: read_resource(resource_group, file), batch=True)), max_workers=5, item_exporter=geth_traces_item_exporter(traces_output_file), ) job.run() compare_lines_ignore_order( read_resource(resource_group, 'geth_traces.json'), read_file(traces_output_file))
def test_export_contracts_job(tmpdir, batch_size, contract_addresses, output_format, resource_group, web3_provider_type): contracts_output_file = str(tmpdir.join('actual_contracts.' + output_format)) job = ExportContractsJob( contract_addresses_iterable=contract_addresses, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_web3_provider(web3_provider_type, lambda file: read_resource(resource_group, file), batch=True) ), max_workers=5, item_exporter=contracts_item_exporter(contracts_output_file) ) job.run() print('=====================') print(read_file(contracts_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_contracts.' + output_format), read_file(contracts_output_file) )
def stream(last_synced_block_file, lag, provider_uri, output, start_block, entity_types, period_seconds=10, batch_size=2, block_batch_size=10, max_workers=5, log_file=None, pid_file=None): """Streams all data types to console or Google Pub/Sub.""" configure_logging(log_file) configure_signals() entity_types = parse_entity_types(entity_types) validate_entity_types(entity_types, output) from celoetl.streaming.item_exporter_creator import create_item_exporter from celoetl.streaming.eth_streamer_adapter import EthStreamerAdapter from blockchainetl.streaming.streamer import Streamer # TODO: Implement fallback mechanism for provider uris instead of picking randomly provider_uri = pick_random_provider_uri(provider_uri) logging.info('Using ' + provider_uri) streamer_adapter = EthStreamerAdapter( batch_web3_provider=ThreadLocalProxy( lambda: get_provider_from_uri(provider_uri, batch=True)), item_exporter=create_item_exporter(output), batch_size=batch_size, max_workers=max_workers, entity_types=entity_types) streamer = Streamer(blockchain_streamer_adapter=streamer_adapter, last_synced_block_file=last_synced_block_file, lag=lag, start_block=start_block, period_seconds=period_seconds, block_batch_size=block_batch_size, pid_file=pid_file) streamer.stream()
def export_receipts_and_logs(batch_size, transaction_hashes, provider_uri, max_workers, receipts_output, logs_output, chain='ethereum'): """Exports receipts and logs.""" provider_uri = check_classic_provider_uri(chain, provider_uri) with smart_open(transaction_hashes, 'r') as transaction_hashes_file: job = ExportReceiptsJob( transaction_hashes_iterable=( transaction_hash.strip() for transaction_hash in transaction_hashes_file), batch_size=batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_provider_from_uri(provider_uri, batch=True)), max_workers=max_workers, item_exporter=receipts_and_logs_item_exporter( receipts_output, logs_output), export_receipts=receipts_output is not None, export_logs=logs_output is not None) job.run()
def test_export_receipts_job(tmpdir, batch_size, transaction_hashes, output_format, resource_group, web3_provider_type): receipts_output_file = str(tmpdir.join('actual_receipts.' + output_format)) logs_output_file = str(tmpdir.join('actual_logs.' + output_format)) job = ExportReceiptsJob( transaction_hashes_iterable=transaction_hashes, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_web3_provider(web3_provider_type, lambda file: read_resource(resource_group, file), batch=True) ), max_workers=5, item_exporter=receipts_and_logs_item_exporter(receipts_output_file, logs_output_file), export_receipts=receipts_output_file is not None, export_logs=logs_output_file is not None ) job.run() compare_lines_ignore_order( read_resource(resource_group, 'expected_receipts.' + output_format), read_file(receipts_output_file) ) compare_lines_ignore_order( read_resource(resource_group, 'expected_logs.' + output_format), read_file(logs_output_file) )
def test_stream(tmpdir, start_block, end_block, batch_size, resource_group, entity_types, provider_type): try: os.remove('last_synced_block.txt') except OSError: pass blocks_output_file = str(tmpdir.join('actual_blocks.json')) transactions_output_file = str(tmpdir.join('actual_transactions.json')) logs_output_file = str(tmpdir.join('actual_logs.json')) token_transfers_output_file = str( tmpdir.join('actual_token_transfers.json')) traces_output_file = str(tmpdir.join('actual_traces.json')) contracts_output_file = str(tmpdir.join('actual_contracts.json')) tokens_output_file = str(tmpdir.join('actual_tokens.json')) streamer_adapter = EthStreamerAdapter( batch_web3_provider=ThreadLocalProxy( lambda: get_web3_provider(provider_type, read_resource_lambda=lambda file: read_resource(resource_group, file), batch=True)), batch_size=batch_size, item_exporter=CompositeItemExporter( filename_mapping={ 'block': blocks_output_file, 'transaction': transactions_output_file, 'log': logs_output_file, 'token_transfer': token_transfers_output_file, 'trace': traces_output_file, 'contract': contracts_output_file, 'token': tokens_output_file, }), entity_types=entity_types, ) streamer = Streamer(blockchain_streamer_adapter=streamer_adapter, start_block=start_block, end_block=end_block, retry_errors=False) streamer.stream() if 'block' in entity_types: print('=====================') print(read_file(blocks_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_blocks.json'), read_file(blocks_output_file)) if 'transaction' in entity_types: print('=====================') print(read_file(transactions_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_transactions.json'), read_file(transactions_output_file)) if 'log' in entity_types: print('=====================') print(read_file(logs_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_logs.json'), read_file(logs_output_file)) if 'token_transfer' in entity_types: print('=====================') print(read_file(token_transfers_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_token_transfers.json'), read_file(token_transfers_output_file)) if 'trace' in entity_types: print('=====================') print(read_file(traces_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_traces.json'), read_file(traces_output_file)) if 'contract' in entity_types: print('=====================') print(read_file(contracts_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_contracts.json'), read_file(contracts_output_file)) if 'token' in entity_types: print('=====================') print(read_file(tokens_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_tokens.json'), read_file(tokens_output_file))
def export_all_common(partitions, output_dir, provider_uri, max_workers, batch_size): for batch_start_block, batch_end_block, partition_dir in partitions: # # # start # # # start_time = time() padded_batch_start_block = str(batch_start_block).zfill(8) padded_batch_end_block = str(batch_end_block).zfill(8) block_range = '{padded_batch_start_block}-{padded_batch_end_block}'.format( padded_batch_start_block=padded_batch_start_block, padded_batch_end_block=padded_batch_end_block, ) file_name_suffix = '{padded_batch_start_block}_{padded_batch_end_block}'.format( padded_batch_start_block=padded_batch_start_block, padded_batch_end_block=padded_batch_end_block, ) # # # blocks_and_transactions # # # blocks_output_dir = '{output_dir}/blocks{partition_dir}'.format( output_dir=output_dir, partition_dir=partition_dir, ) os.makedirs(os.path.dirname(blocks_output_dir), exist_ok=True) transactions_output_dir = '{output_dir}/transactions{partition_dir}'.format( output_dir=output_dir, partition_dir=partition_dir, ) os.makedirs(os.path.dirname(transactions_output_dir), exist_ok=True) blocks_file = '{blocks_output_dir}/blocks_{file_name_suffix}.csv'.format( blocks_output_dir=blocks_output_dir, file_name_suffix=file_name_suffix, ) transactions_file = '{transactions_output_dir}/transactions_{file_name_suffix}.csv'.format( transactions_output_dir=transactions_output_dir, file_name_suffix=file_name_suffix, ) logger.info('Exporting blocks {block_range} to {blocks_file}'.format( block_range=block_range, blocks_file=blocks_file, )) logger.info( 'Exporting transactions from blocks {block_range} to {transactions_file}' .format( block_range=block_range, transactions_file=transactions_file, )) job = ExportBlocksJob( start_block=batch_start_block, end_block=batch_end_block, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_provider_from_uri(provider_uri, batch=True)), max_workers=max_workers, item_exporter=blocks_and_transactions_item_exporter( blocks_file, transactions_file), export_blocks=blocks_file is not None, export_transactions=transactions_file is not None) job.run() # # # token_transfers # # # token_transfers_file = None if is_log_filter_supported(provider_uri): token_transfers_output_dir = '{output_dir}/token_transfers{partition_dir}'.format( output_dir=output_dir, partition_dir=partition_dir, ) os.makedirs(os.path.dirname(token_transfers_output_dir), exist_ok=True) token_transfers_file = '{token_transfers_output_dir}/token_transfers_{file_name_suffix}.csv'.format( token_transfers_output_dir=token_transfers_output_dir, file_name_suffix=file_name_suffix, ) logger.info( 'Exporting ERC20 transfers from blocks {block_range} to {token_transfers_file}' .format( block_range=block_range, token_transfers_file=token_transfers_file, )) job = ExportTokenTransfersJob( start_block=batch_start_block, end_block=batch_end_block, batch_size=batch_size, web3=ThreadLocalProxy( lambda: Web3(get_provider_from_uri(provider_uri))), item_exporter=token_transfers_item_exporter( token_transfers_file), max_workers=max_workers) job.run() # # # receipts_and_logs # # # cache_output_dir = '{output_dir}/.tmp{partition_dir}'.format( output_dir=output_dir, partition_dir=partition_dir, ) os.makedirs(os.path.dirname(cache_output_dir), exist_ok=True) transaction_hashes_file = '{cache_output_dir}/transaction_hashes_{file_name_suffix}.csv'.format( cache_output_dir=cache_output_dir, file_name_suffix=file_name_suffix, ) logger.info( 'Extracting hash column from transaction file {transactions_file}'. format(transactions_file=transactions_file, )) extract_csv_column_unique(transactions_file, transaction_hashes_file, 'hash') receipts_output_dir = '{output_dir}/receipts{partition_dir}'.format( output_dir=output_dir, partition_dir=partition_dir, ) os.makedirs(os.path.dirname(receipts_output_dir), exist_ok=True) logs_output_dir = '{output_dir}/logs{partition_dir}'.format( output_dir=output_dir, partition_dir=partition_dir, ) os.makedirs(os.path.dirname(logs_output_dir), exist_ok=True) receipts_file = '{receipts_output_dir}/receipts_{file_name_suffix}.csv'.format( receipts_output_dir=receipts_output_dir, file_name_suffix=file_name_suffix, ) logs_file = '{logs_output_dir}/logs_{file_name_suffix}.csv'.format( logs_output_dir=logs_output_dir, file_name_suffix=file_name_suffix, ) logger.info( 'Exporting receipts and logs from blocks {block_range} to {receipts_file} and {logs_file}' .format( block_range=block_range, receipts_file=receipts_file, logs_file=logs_file, )) with smart_open(transaction_hashes_file, 'r') as transaction_hashes: job = ExportReceiptsJob( transaction_hashes_iterable=( transaction_hash.strip() for transaction_hash in transaction_hashes), batch_size=batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_provider_from_uri(provider_uri, batch=True)), max_workers=max_workers, item_exporter=receipts_and_logs_item_exporter( receipts_file, logs_file), export_receipts=receipts_file is not None, export_logs=logs_file is not None) job.run() # # # contracts # # # contract_addresses_file = '{cache_output_dir}/contract_addresses_{file_name_suffix}.csv'.format( cache_output_dir=cache_output_dir, file_name_suffix=file_name_suffix, ) logger.info( 'Extracting contract_address from receipt file {receipts_file}'. format(receipts_file=receipts_file)) extract_csv_column_unique(receipts_file, contract_addresses_file, 'contract_address') contracts_output_dir = '{output_dir}/contracts{partition_dir}'.format( output_dir=output_dir, partition_dir=partition_dir, ) os.makedirs(os.path.dirname(contracts_output_dir), exist_ok=True) contracts_file = '{contracts_output_dir}/contracts_{file_name_suffix}.csv'.format( contracts_output_dir=contracts_output_dir, file_name_suffix=file_name_suffix, ) logger.info( 'Exporting contracts from blocks {block_range} to {contracts_file}' .format( block_range=block_range, contracts_file=contracts_file, )) with smart_open(contract_addresses_file, 'r') as contract_addresses_file: contract_addresses = ( contract_address.strip() for contract_address in contract_addresses_file if contract_address.strip()) job = ExportContractsJob( contract_addresses_iterable=contract_addresses, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_provider_from_uri(provider_uri, batch=True)), item_exporter=contracts_item_exporter(contracts_file), max_workers=max_workers) job.run() # # # tokens # # # if token_transfers_file is not None: token_addresses_file = '{cache_output_dir}/token_addresses_{file_name_suffix}'.format( cache_output_dir=cache_output_dir, file_name_suffix=file_name_suffix, ) logger.info( 'Extracting token_address from token_transfers file {token_transfers_file}' .format(token_transfers_file=token_transfers_file, )) extract_csv_column_unique(token_transfers_file, token_addresses_file, 'token_address') tokens_output_dir = '{output_dir}/tokens{partition_dir}'.format( output_dir=output_dir, partition_dir=partition_dir, ) os.makedirs(os.path.dirname(tokens_output_dir), exist_ok=True) tokens_file = '{tokens_output_dir}/tokens_{file_name_suffix}.csv'.format( tokens_output_dir=tokens_output_dir, file_name_suffix=file_name_suffix, ) logger.info( 'Exporting tokens from blocks {block_range} to {tokens_file}'. format( block_range=block_range, tokens_file=tokens_file, )) with smart_open(token_addresses_file, 'r') as token_addresses: job = ExportTokensJob( token_addresses_iterable=( token_address.strip() for token_address in token_addresses), web3=ThreadLocalProxy( lambda: Web3(get_provider_from_uri(provider_uri))), item_exporter=tokens_item_exporter(tokens_file), max_workers=max_workers) job.run() # # # finish # # # shutil.rmtree(os.path.dirname(cache_output_dir)) end_time = time() time_diff = round(end_time - start_time, 5) logger.info( 'Exporting blocks {block_range} took {time_diff} seconds'.format( block_range=block_range, time_diff=time_diff, ))