def export_traces(start_block, end_block, batch_size, output, max_workers, provider_uri, genesis_traces, daofork_traces, timeout=60, chain='ethereum'): """Exports traces from parity node.""" if chain == 'classic' and daofork_traces == True: raise ValueError( 'Classic chain does not include daofork traces. Disable daofork traces with --no-daofork-traces option.' ) job = ExportTracesJob( start_block=start_block, end_block=end_block, batch_size=batch_size, web3=ThreadLocalProxy(lambda: Web3( get_provider_from_uri(provider_uri, timeout=timeout))), item_exporter=traces_item_exporter(output), max_workers=max_workers, include_genesis_traces=genesis_traces, include_daofork_traces=daofork_traces) job.run()
def _export_traces(self, start_block, end_block): exporter = InMemoryItemExporter(item_types=['trace']) job = ExportTracesJob( start_block=start_block, end_block=end_block, batch_size=self.batch_size, web3=ThreadLocalProxy(lambda: Web3(self.batch_web3_provider)), max_workers=self.max_workers, item_exporter=exporter) job.run() traces = exporter.get_items('trace') return traces
def export_traces(start_block, end_block, batch_size, output, max_workers, provider_uri, genesis_traces, daofork_traces): """Exports traces from parity node.""" job = ExportTracesJob( start_block=start_block, end_block=end_block, batch_size=batch_size, web3=ThreadLocalProxy( lambda: Web3(get_provider_from_uri(provider_uri))), item_exporter=traces_item_exporter(output), max_workers=max_workers, include_genesis_traces=genesis_traces, include_daofork_traces=daofork_traces) job.run()
def test_export_traces_job(tmpdir, start_block, end_block, resource_group, web3_provider_type): traces_output_file = tmpdir.join('actual_traces.csv') job = ExportTracesJob( start_block=start_block, end_block=end_block, batch_size=1, web3=ThreadLocalProxy(lambda: Web3( get_web3_provider(web3_provider_type, lambda file: read_resource( resource_group, file)))), max_workers=5, item_exporter=traces_item_exporter(traces_output_file), ) job.run() compare_lines_ignore_order( read_resource(resource_group, 'expected_traces.csv'), read_file(traces_output_file))
def export_traces( self, start_block: int, end_block: int, include_genesis_traces: bool = False, include_daofork_traces: bool = False, ) -> Iterable[Dict]: """Export traces for specified block range.""" exporter = InMemoryItemExporter(item_types=["trace"]) job = ExportTracesJob( start_block=start_block, end_block=end_block, batch_size=self.batch_size, web3=ThreadLocalProxy(lambda: Web3(self.batch_web3_provider)), max_workers=self.max_workers, item_exporter=exporter, include_genesis_traces=include_genesis_traces, include_daofork_traces=include_daofork_traces, ) job.run() traces = exporter.get_items("trace") return traces
'--output', default='-', type=str, help='The output file. If not specified stdout is used.') parser.add_argument('-w', '--max-workers', default=5, type=int, help='The maximum number of workers.') parser.add_argument( '-p', '--provider-uri', required=True, type=str, help='The URI of the web3 provider e.g. ' 'file://$HOME/.local/share/io.parity.ethereum/jsonrpc.ipc or http://localhost:8545/' ) args = parser.parse_args() job = ExportTracesJob( start_block=args.start_block, end_block=args.end_block, batch_size=args.batch_size, web3=ThreadLocalProxy( lambda: Web3(get_provider_from_uri(args.provider_uri))), item_exporter=traces_item_exporter(args.output), max_workers=args.max_workers) job.run()