示例#1
0
    def _start(self):
        self.blocks_output_file = get_file_handle(self.blocks_output,
                                                  binary=True)
        self.transactions_output_file = get_file_handle(
            self.transactions_output, binary=True)

        self.blocks_exporter = CsvItemExporter(self.blocks_output_file)
        self.transactions_exporter = CsvItemExporter(
            self.transactions_output_file)
    def _start(self):
        super()._start()

        self.blocks_output_file = get_file_handle(self.blocks_output, binary=True)
        self.blocks_exporter = CsvItemExporter(
            self.blocks_output_file, fields_to_export=self.block_fields_to_export)

        self.transactions_output_file = get_file_handle(self.transactions_output, binary=True)
        self.transactions_exporter = CsvItemExporter(
            self.transactions_output_file, fields_to_export=self.transaction_fields_to_export)
示例#3
0
    def _start(self):
        # Using bounded executor prevents unlimited queue growth
        # and allows monitoring in-progress futures and failing fast in case of errors.
        self.executor = FailSafeExecutor(BoundedExecutor(1, self.max_workers))

        self.blocks_output_file = get_file_handle(self.blocks_output,
                                                  binary=True)
        self.blocks_exporter = CsvItemExporter(
            self.blocks_output_file,
            fields_to_export=self.block_fields_to_export)

        self.transactions_output_file = get_file_handle(
            self.transactions_output, binary=True)
        self.transactions_exporter = CsvItemExporter(
            self.transactions_output_file,
            fields_to_export=self.transaction_fields_to_export)
 def open(self):
     for item_type, filename in self.filename_mapping.items():
         self.file_mapping[item_type] = get_file_handle(filename,
                                                        binary=True)
         self.exporter_mapping[item_type] = CsvItemExporter(
             self.file_mapping[item_type],
             fields_to_export=self.field_mapping[item_type])
    def _start(self):
        super()._start()

        self.output_file = get_file_handle(self.output,
                                           binary=True,
                                           create_parent_dirs=True)
        self.exporter = CsvItemExporter(self.output_file,
                                        fields_to_export=self.fields_to_export)
示例#6
0
    def open(self):
        for item_type, filename in self.filename_mapping.items():
            file = get_file_handle(filename, binary=True)
            fields = self.field_mapping[item_type]
            self.file_mapping[item_type] = file
            if str(filename).endswith('.json'):
                item_exporter = JsonLinesItemExporter(file, fields_to_export=fields)
            else:
                item_exporter = CsvItemExporter(file, fields_to_export=fields)
            self.exporter_mapping[item_type] = item_exporter

            self.counter_mapping[item_type] = AtomicCounter()
示例#7
0
 def _start(self):
     self.output_file = get_file_handle(self.output, binary=True)
     self.exporter = CsvItemExporter(self.output_file)
    description='Exports ERC20 transfers using eth_newFilter and eth_getFilterLogs JSON RPC APIs.')
parser.add_argument('--start-block', default=0, type=int, help='Start block')
parser.add_argument('--end-block', required=True, type=int, help='End block')
parser.add_argument('--output', default=None, type=str, help='The output file. If not specified stdout is used.')
parser.add_argument('--ipc-path', required=True, type=str, help='The full path to the ipc socket file.')
parser.add_argument('--ipc-timeout', default=300, type=int, help='The timeout in seconds for ipc calls.')
parser.add_argument('--batch-size', default=100, type=int, help='The number of blocks to filter at a time.')

args = parser.parse_args()


with smart_open(args.output, binary=True) as output_file:
    transaction_receipt_log_mapper = EthTransactionReceiptLogMapper()
    erc20_transfer_mapper = EthErc20TransferMapper()
    erc20_processor = EthErc20Processor()
    exporter = CsvItemExporter(output_file)

    web3 = Web3(IPCProvider(args.ipc_path, timeout=args.ipc_timeout))

    for batch_start_block in range(args.start_block, args.end_block + 1, args.batch_size):
        batch_end_block = min(batch_start_block + args.batch_size - 1, args.end_block)

        event_filter = web3.eth.filter({
            "fromBlock": batch_start_block,
            "toBlock": batch_end_block,
            "topics": [TRANSFER_EVENT_TOPIC]
        })

        events = event_filter.get_all_entries()

        for event in events:
                    help='Whether or not to extract transactions.')
parser.add_argument(
    '--transactions-output',
    default=None,
    type=str,
    help='The output file for transactions. If not specified stdout is used.')

args = parser.parse_args()

with smart_open(args.input, 'r') as input_file, \
        smart_open(args.blocks_output, binary=True) if args.extract_blocks else None as blocks_output_file , \
        smart_open(args.transactions_output, binary=True) if args.extract_transactions else None as tx_output_file:
    block_mapper = EthBlockMapper()
    tx_mapper = EthTransactionMapper()

    blocks_exporter = CsvItemExporter(
        blocks_output_file) if blocks_output_file is not None else None
    tx_exporter = CsvItemExporter(
        tx_output_file) if tx_output_file is not None else None

    for line in input_file:
        json_line = json.loads(line)
        result = json_line.get('result', None)
        if result is None:
            continue
        block = block_mapper.json_dict_to_block(result)
        if blocks_exporter is not None:
            blocks_exporter.export_item(block_mapper.block_to_dict(block))

        if block.transactions is not None and tx_exporter is not None:
            for transaction in block.transactions:
                tx_exporter.export_item(
示例#10
0
parser = argparse.ArgumentParser(
    description='Extract blocks and transactions from eth_getBlockByNumber JSON RPC output')
parser.add_argument('--input', default=None, type=str, help='The input file. If not specified stdin is used.')
parser.add_argument('--blocks-output', default=None, type=str,
                    help='The output file for blocks. If not specified stdout is used.')
parser.add_argument('--transactions-output', default=None, type=str,
                    help='The output file for transactions. If not specified stdout is used.')

args = parser.parse_args()

with smart_open(args.input, 'r') as input_file, \
        smart_open(args.blocks_output, binary=True) as blocks_output_file, \
        smart_open(args.transactions_output, binary=True) as tx_output_file:
    block_mapper = EthBlockMapper()
    tx_mapper = EthTransactionMapper()

    blocks_exporter = CsvItemExporter(blocks_output_file)
    tx_exporter = CsvItemExporter(tx_output_file)

    for line in input_file:
        json_line = json.loads(line)
        result = json_line.get('result', None)
        if result is None:
            continue
        block = block_mapper.json_dict_to_block(result)
        blocks_exporter.export_item(block_mapper.block_to_dict(block))

        if block.transactions is not None:
            for transaction in block.transactions:
                tx_exporter.export_item(tx_mapper.transaction_to_dict(transaction))