Пример #1
0
 def _export_contracts(self, traces):
     exporter = InMemoryItemExporter(item_types=['contract'])
     job = ExtractContractsJob(traces_iterable=traces,
                               batch_size=self.batch_size,
                               max_workers=self.max_workers,
                               item_exporter=exporter)
     job.run()
     contracts = exporter.get_items('contract')
     return contracts
Пример #2
0
 def _extract_token_transfers(self, logs):
     exporter = InMemoryItemExporter(item_types=['token_transfer'])
     job = ExtractTokenTransfersJob(logs_iterable=logs,
                                    batch_size=self.batch_size,
                                    max_workers=self.max_workers,
                                    item_exporter=exporter)
     job.run()
     token_transfers = exporter.get_items('token_transfer')
     return token_transfers
Пример #3
0
 def _extract_tokens(self, contracts):
     exporter = InMemoryItemExporter(item_types=['token'])
     job = ExtractTokensJob(
         contracts_iterable=contracts,
         web3=ThreadLocalProxy(lambda: Web3(self.batch_web3_provider)),
         max_workers=self.max_workers,
         item_exporter=exporter)
     job.run()
     tokens = exporter.get_items('token')
     return tokens
Пример #4
0
 def _export_traces(self, start_block, end_block):
     exporter = InMemoryItemExporter(item_types=['trace'])
     job = ExportTracesJob(
         start_block=start_block,
         end_block=end_block,
         batch_size=self.batch_size,
         web3=ThreadLocalProxy(lambda: Web3(self.batch_web3_provider)),
         max_workers=self.max_workers,
         item_exporter=exporter)
     job.run()
     traces = exporter.get_items('trace')
     return traces
    def _extract_tokens(self, contracts):
        exporter = InMemoryItemExporter(item_types=['token'])
        web3 = Web3(self.batch_web3_provider)
        web3.middleware_stack.inject(geth_poa_middleware, layer=0)

        job = ExtractTokensJob(contracts_iterable=contracts,
                               web3=ThreadLocalProxy(lambda: web3),
                               max_workers=self.max_workers,
                               item_exporter=exporter)
        job.run()
        tokens = exporter.get_items('token')
        return tokens
Пример #6
0
 def _export_receipts_and_logs(self, transactions):
     exporter = InMemoryItemExporter(item_types=['receipt', 'log'])
     job = ExportReceiptsJob(
         transaction_hashes_iterable=(transaction['hash']
                                      for transaction in transactions),
         batch_size=self.batch_size,
         batch_web3_provider=self.batch_web3_provider,
         max_workers=self.max_workers,
         item_exporter=exporter,
         export_receipts=self._should_export(EntityType.RECEIPT),
         export_logs=self._should_export(EntityType.LOG))
     job.run()
     receipts = exporter.get_items('receipt')
     logs = exporter.get_items('log')
     return receipts, logs
    def _export_traces(self, start_block, end_block):
        exporter = InMemoryItemExporter(item_types=['trace'])

        web3 = Web3(self.batch_web3_provider)
        web3.middleware_stack.inject(geth_poa_middleware, layer=0)

        job = ExportTracesJob(start_block=start_block,
                              end_block=end_block,
                              batch_size=self.batch_size,
                              web3=ThreadLocalProxy(lambda: web3),
                              max_workers=self.max_workers,
                              item_exporter=exporter)
        job.run()
        traces = exporter.get_items('trace')
        return traces
Пример #8
0
 def _export_blocks_and_transactions(self, start_block, end_block):
     blocks_and_transactions_item_exporter = InMemoryItemExporter(item_types=['block', 'transaction'])
     blocks_and_transactions_job = ExportBlocksJob(
         start_block=start_block,
         end_block=end_block,
         batch_size=self.batch_size,
         batch_web3_provider=self.batch_web3_provider,
         max_workers=self.max_workers,
         item_exporter=blocks_and_transactions_item_exporter,
         export_blocks=self._should_export(EntityType.BLOCK),
         export_transactions=self._should_export(EntityType.TRANSACTION)
     )
     blocks_and_transactions_job.run()
     blocks = blocks_and_transactions_item_exporter.get_items('block')
     transactions = blocks_and_transactions_item_exporter.get_items('transaction')
     return blocks, transactions
Пример #9
0
    def export_all(self, start_block, end_block):
        # Export blocks and transactions
        blocks_and_transactions_item_exporter = InMemoryItemExporter(
            item_types=['block', 'transaction'])

        blocks_and_transactions_job = ExportBlocksJob(
            start_block=start_block,
            end_block=end_block,
            batch_size=self.batch_size,
            bitcoin_rpc=self.bitcoin_rpc,
            max_workers=self.max_workers,
            item_exporter=blocks_and_transactions_item_exporter,
            chain=self.chain,
            export_blocks=True,
            export_transactions=True)
        blocks_and_transactions_job.run()

        blocks = blocks_and_transactions_item_exporter.get_items('block')
        transactions = blocks_and_transactions_item_exporter.get_items(
            'transaction')

        if self.enable_enrich:
            # Enrich transactions
            enriched_transactions_item_exporter = InMemoryItemExporter(
                item_types=['transaction'])

            enrich_transactions_job = EnrichTransactionsJob(
                transactions_iterable=transactions,
                batch_size=self.batch_size,
                bitcoin_rpc=self.bitcoin_rpc,
                max_workers=self.max_workers,
                item_exporter=enriched_transactions_item_exporter,
                chain=self.chain)
            enrich_transactions_job.run()
            enriched_transactions = enriched_transactions_item_exporter.get_items(
                'transaction')
            if len(enriched_transactions) != len(transactions):
                raise ValueError('The number of transactions is wrong ' +
                                 str(transactions))
            transactions = enriched_transactions

        logging.info('Exporting with ' + type(self.item_exporter).__name__)

        all_items = blocks + transactions

        self.calculate_item_ids(all_items)

        self.item_exporter.export_items(all_items)
    def _export_enrich_block_group(self, start_block, end_block):
        block_group_item_exporter = InMemoryItemExporter(
            item_types=['block', 'transaction', 'log', 'token_transfer'])
        block_group_job = ExportEnrichBlockGroupJob(
            start_block=start_block,
            end_block=end_block,
            batch_size=self.batch_size,
            batch_web3_provider=self.batch_web3_provider,
            max_workers=self.max_workers,
            item_exporter=block_group_item_exporter,
            export_blocks=self._should_export(EntityType.BLOCK),
            export_transactions=self._should_export(EntityType.TRANSACTION),
            export_logs=self._should_export(EntityType.LOG),
            export_token_transfers=self._should_export(
                EntityType.TOKEN_TRANSFER))
        block_group_job.run()
        blocks = block_group_item_exporter.get_items('block')
        transactions = block_group_item_exporter.get_items('transaction')
        logs = block_group_item_exporter.get_items('log')
        token_transfers = block_group_item_exporter.get_items('token_transfer')

        return blocks, transactions, logs, token_transfers
Пример #11
0
def stream(
        eos_rpc,
        last_synced_block_file='last_synced_block.txt',
        lag=0,
        item_exporter=ConsoleItemExporter(),
        start_block=None,
        end_block=None,
        chain=Chain.BITCOIN,
        period_seconds=10,
        batch_size=2,
        block_batch_size=10,
        max_workers=5):
    if start_block is not None or not os.path.isfile(last_synced_block_file):
        init_last_synced_block_file((start_block or 0) - 1, last_synced_block_file)

    last_synced_block = read_last_synced_block(last_synced_block_file)
    btc_service = EosService(eos_rpc, chain)

    item_exporter.open()

    while True and (end_block is None or last_synced_block < end_block):
        blocks_to_sync = 0

        try:
            current_block = int(btc_service.get_latest_block().number)
            target_block = current_block - lag
            target_block = min(target_block, last_synced_block + block_batch_size)
            target_block = min(target_block, end_block) if end_block is not None else target_block
            blocks_to_sync = max(target_block - last_synced_block, 0)
            logging.info('Current block {}, target block {}, last synced block {}, blocks to sync {}'.format(
                current_block, target_block, last_synced_block, blocks_to_sync))

            if blocks_to_sync == 0:
                logging.info('Nothing to sync. Sleeping for {} seconds...'.format(period_seconds))
                time.sleep(period_seconds)
                continue

            # Export blocks and transactions
            blocks_and_transactions_item_exporter = InMemoryItemExporter(item_types=['block', 'transaction'])

            blocks_and_transactions_job = ExportBlocksJob(
                start_block=last_synced_block + 1,
                end_block=target_block,
                batch_size=batch_size,
                eos_rpc=eos_rpc,
                max_workers=max_workers,
                item_exporter=blocks_and_transactions_item_exporter,
                chain=chain,
                export_blocks=True,
                export_transactions=True
            )
            blocks_and_transactions_job.run()

            blocks = blocks_and_transactions_item_exporter.get_items('block')
            transactions = blocks_and_transactions_item_exporter.get_items('transaction')

            # Enrich transactions
            enriched_transactions_item_exporter = InMemoryItemExporter(item_types=['transaction'])

            enrich_transactions_job = EnrichTransactionsJob(
                transactions_iterable=transactions,
                batch_size=batch_size,
                eos_rpc=eos_rpc,
                max_workers=max_workers,
                item_exporter=enriched_transactions_item_exporter,
                chain=chain
            )
            enrich_transactions_job.run()
            enriched_transactions = enriched_transactions_item_exporter.get_items('transaction')
            if len(enriched_transactions) != len(transactions):
                raise ValueError('The number of transactions is wrong ' + str(transactions))

            logging.info('Exporting with ' + type(item_exporter).__name__)
            item_exporter.export_items(blocks + enriched_transactions)

            logging.info('Writing last synced block {}'.format(target_block))
            write_last_synced_block(last_synced_block_file, target_block)
            last_synced_block = target_block
        except Exception as e:
            # https://stackoverflow.com/a/4992124/1580227
            logging.exception('An exception occurred while fetching block data.')

        if blocks_to_sync != block_batch_size and last_synced_block != end_block:
            logging.info('Sleeping {} seconds...'.format(period_seconds))
            time.sleep(period_seconds)

    item_exporter.close()