def _export(self): for batch_start, batch_end in split_to_batches(self.start_block, self.end_block, self.batch_size): try: self._export_batch(batch_start, batch_end) except (Timeout, SocketTimeoutException): # try exporting blocks one by one for block_number in range(batch_start, batch_end + 1): self._export_batch(block_number, block_number)
from ethereumetl.utils import split_to_batches # The below partitioning tries to make each partition of equal size. # The first million blocks are in a single partition. # The next 3 million blocks are in 100k partitions. # The next 1 million blocks are in 10k partitions. # Note that there is a limit in Data Pipeline on the number of objects, which can be # increased in the Support Center # https://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-limits.html EXPORT_PARTITIONS = [(0, 999999)] + \ [(start, end) for start, end in split_to_batches(1000000, 1999999, 100000)] + \ [(start, end) for start, end in split_to_batches(2000000, 2999999, 100000)] + \ [(start, end) for start, end in split_to_batches(3000000, 3999999, 100000)] + \ [(start, end) for start, end in split_to_batches(4000000, 4999999, 10000)] DEFAULT_BUCKET = "example.com" EXPORT_BLOCKS_AND_TRANSACTIONS = True EXPORT_RECEIPTS_AND_LOGS = False EXPORT_CONTRACTS = False EXPORT_TOKEN_TRANSFERS = True EXPORT_TOKENS = True IS_GETH = False if IS_GETH: IPC_PATH = 'file:///home/ec2-user/.ethereum/geth.ipc' else: IPC_PATH = 'file:///home/ec2-user/.local/share/io.parity.ethereum/jsonrpc.ipc' SETUP_COMMAND = \
def _export(self): for batch_start, batch_end in split_to_batches(self.start_block, self.end_block, self.batch_size): self.executor.submit(self._fail_safe_export_batch, batch_start, batch_end)