def test_export_blocks_job(tmpdir, batch_size, resource_group): receipts_output_file = tmpdir.join('actual_receipts.csv') logs_output_file = tmpdir.join('actual_logs.csv') job = ExportReceiptsJob(tx_hashes_iterable=[ '0x04cbcb236043d8fb7839e07bbc7f5eed692fb2ca55d897f1101eac3e3ad4fab8', '0x463d53f0ad57677a3b430a007c1c31d15d62c37fab5eee598551697c297c235c', '0x05287a561f218418892ab053adfb3d919860988b19458c570c5c30f51c146f02', '0xcea6f89720cc1d2f46cc7a935463ae0b99dd5fad9c91bb7357de5421511cee49' ], batch_size=batch_size, ipc_wrapper=ThreadLocalProxy( lambda: MockIPCWrapper( lambda file: read_resource( resource_group, file))), max_workers=5, item_exporter=export_receipts_job_item_exporter( receipts_output_file, logs_output_file), export_receipts=receipts_output_file is not None, export_logs=logs_output_file is not None) job.run() compare_lines_ignore_order( read_resource(resource_group, 'expected_receipts.csv'), read_file(receipts_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_logs.csv'), read_file(logs_output_file))
def test_export_receipts_job(tmpdir, batch_size, transaction_hashes, output_format, resource_group, web3_provider_type): receipts_output_file = tmpdir.join('actual_receipts.' + output_format) logs_output_file = tmpdir.join('actual_logs.' + output_format) job = ExportReceiptsJob( transaction_hashes_iterable=transaction_hashes, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy(lambda: get_web3_provider( web3_provider_type, lambda file: read_resource(resource_group, file), batch=True)), max_workers=5, item_exporter=receipts_and_logs_item_exporter(receipts_output_file, logs_output_file), export_receipts=receipts_output_file is not None, export_logs=logs_output_file is not None) job.run() compare_lines_ignore_order( read_resource(resource_group, 'expected_receipts.' + output_format), read_file(receipts_output_file)) compare_lines_ignore_order( read_resource(resource_group, 'expected_logs.' + output_format), read_file(logs_output_file))
def export_receipts_and_logs(batch_size, transaction_hashes, provider_uri, max_workers, receipts_output, logs_output, chain='ethereum'): """Exports receipts and logs.""" provider_uri = check_classic_provider_uri(chain, provider_uri) with smart_open(transaction_hashes, 'r') as transaction_hashes_file: job = ExportReceiptsJob( transaction_hashes_iterable=(transaction_hash.strip() for transaction_hash in transaction_hashes_file), batch_size=batch_size, batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=True)), max_workers=max_workers, item_exporter=receipts_and_logs_item_exporter(receipts_output, logs_output), export_receipts=receipts_output is not None, export_logs=logs_output is not None) job.run()
def _export_receipts_and_logs(self, transactions): exporter = InMemoryItemExporter(item_types=['receipt', 'log']) job = ExportReceiptsJob( transaction_hashes_iterable=(transaction['hash'] for transaction in transactions), batch_size=self.batch_size, batch_web3_provider=self.batch_web3_provider, max_workers=self.max_workers, item_exporter=exporter, export_receipts=self._should_export(EntityType.RECEIPT), export_logs=self._should_export(EntityType.LOG)) job.run() receipts = exporter.get_items('receipt') logs = exporter.get_items('log') return receipts, logs
def export_receipts_and_logs( self, transactions: Iterable) -> Tuple[Iterable, Iterable]: """Export receipts and logs for specified transaction hashes.""" exporter = InMemoryItemExporter(item_types=["receipt", "log"]) job = ExportReceiptsJob( transaction_hashes_iterable=(transaction["hash"] for transaction in transactions), batch_size=self.batch_size, batch_web3_provider=self.batch_web3_provider, max_workers=self.max_workers, item_exporter=exporter, export_receipts=True, export_logs=False, ) job.run() receipts = exporter.get_items("receipt") logs = exporter.get_items("log") return receipts, logs
def test_export_receipts_job(tmpdir, batch_size, tx_hashes, resource_group): receipts_output_file = tmpdir.join('actual_receipts.csv') logs_output_file = tmpdir.join('actual_logs.csv') job = ExportReceiptsJob( tx_hashes_iterable=tx_hashes, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy(lambda: MockBatchWeb3Provider(lambda file: read_resource(resource_group, file))), max_workers=5, item_exporter=export_receipts_job_item_exporter(receipts_output_file, logs_output_file), export_receipts=receipts_output_file is not None, export_logs=logs_output_file is not None ) job.run() compare_lines_ignore_order( read_resource(resource_group, 'expected_receipts.csv'), read_file(receipts_output_file) ) compare_lines_ignore_order( read_resource(resource_group, 'expected_logs.csv'), read_file(logs_output_file) )
parser.add_argument( '--receipts-output', default=None, type=str, help= 'The output file for receipts. If not provided receipts will not be exported. ' 'Use "-" for stdout') parser.add_argument( '--logs-output', default=None, type=str, help= 'The output file for receipt logs. If not provided receipt logs will not be exported. ' 'Use "-" for stdout') args = parser.parse_args() with smart_open(args.tx_hashes, 'r') as tx_hashes_file: job = ExportReceiptsJob( tx_hashes_iterable=(tx_hash.strip() for tx_hash in tx_hashes_file), batch_size=args.batch_size, batch_web3_provider=ThreadLocalProxy( lambda: get_provider_from_uri(args.provider_uri, batch=True)), max_workers=args.max_workers, item_exporter=receipts_and_logs_item_exporter(args.receipts_output, args.logs_output), export_receipts=args.receipts_output is not None, export_logs=args.logs_output is not None) job.run()