def address_reuse_finder(self):
        print("DEBUG: Started address_reuse_finder process.")
        database_connector = address_reuse.db.Database(TEMP_DB_FILENAME)
        local_block_reader = address_reuse.blockchain_reader.LocalBlockchainRPCReader(
            database_connector)
        block_processor = address_reuse.block_processor.BlockProcessor(
            local_block_reader, database_connector)

        deferred_blame_reuse_producer_identity = address_reuse.data_subscription.DataProducer.BLOCK_ADDRESS_REUSE_WITH_DEFERRED_BLAME
        announcer = address_reuse.data_subscription.BlockDataProductionAnnouncer(
            deferred_blame_reuse_producer_identity,
            database=database_connector)

        for i in range(0, 171):
            #print("DEBUG: address_reuse_finder @ height %d. May sleep now..." % i)
            print("DEBUG: address_reuse_finder @ height %d." % i)
            #relayed_by_subscription.do_sleep_until_producers_ready()
            #print("DEBUG: address_reuse_finder @ height %d. Produers are now ready." % i)
            block_processor.process_block(i,
                                          defer_blaming=True,
                                          use_tx_out_addr_cache_only=True)
            announcer.increment_announced_block_available()
            print(
                "DEBUG: address_reuse_finder @ height %d. Announcing completion of block."
                % i)
 def test_is_first_transaction_for_reused_address(self):
     block_height = 170
     
     #in order for the results at block height 170 to be accurate, we must
     #   first process the first 170 blocks.
     block_processor = address_reuse.block_processor.BlockProcessor(
         self.reader, self.database_connector)
     benchmarker = address_reuse.benchmark.block_reader_benchmark.Benchmark()
     #We don't need to blame particular parties for this test, so skip
     #   the blaming functions that would require remote HTTP requests.
     for height in range(0,170):
         block_processor.process_block(height, benchmarker, 
                                       defer_blaming=True)
     benchmarker.stop()
     benchmarker.print_stats()
     tx_id = 'f4184fc596403b9d638783cf57adfe4c75c605f6356fbc91338530e9831e9e16'
     addr = '12cbQLTFMXRnSzktFkuoG3eHoMeFtpTu3S'
     self.do_is_first_transaction_for_address(addr, tx_id, block_height, 
                                              False)
    def address_reuse_finder(self):
        print("DEBUG: Started address_reuse_finder process.")
        database_connector = address_reuse.db.Database(TEMP_DB_FILENAME)
        local_block_reader = address_reuse.blockchain_reader.LocalBlockchainRPCReader(database_connector)
        block_processor = address_reuse.block_processor.BlockProcessor(local_block_reader, database_connector)

        deferred_blame_reuse_producer_identity = (
            address_reuse.data_subscription.DataProducer.BLOCK_ADDRESS_REUSE_WITH_DEFERRED_BLAME
        )
        announcer = address_reuse.data_subscription.BlockDataProductionAnnouncer(
            deferred_blame_reuse_producer_identity, database=database_connector
        )

        for i in range(0, 171):
            # print("DEBUG: address_reuse_finder @ height %d. May sleep now..." % i)
            print("DEBUG: address_reuse_finder @ height %d." % i)
            # relayed_by_subscription.do_sleep_until_producers_ready()
            # print("DEBUG: address_reuse_finder @ height %d. Produers are now ready." % i)
            block_processor.process_block(i, defer_blaming=True, use_tx_out_addr_cache_only=True)
            announcer.increment_announced_block_available()
            print("DEBUG: address_reuse_finder @ height %d. Announcing completion of block." % i)
Exemple #4
0
    current_height_iterated = last_height_iterated + 1

#TODO: deal with orphans if I want the accuracy to be solid
#Determine max number of blocks to process. -1 blocks = infinity, as all negative values are True in python, and this will continue to be decremented, at least until it hits some minimum
config = address_reuse.config.Config()
MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN = config.MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN
num_blocks_remaining_to_process = MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN

benchmarker = address_reuse.benchmark.block_reader_benchmark.Benchmark()
try:
    #Process blocks until we're caught up, or have hit the max # blocks to process in this run.
    while (current_height_iterated < current_blockchain_height and num_blocks_remaining_to_process):
        print("DEBUG: update.py: current block height of blockchain is %d, last block processed in db is %d, %d remaining blocks to process in this run." % (current_blockchain_height, current_height_iterated, num_blocks_remaining_to_process))
        #instantiate a processor object to compile stats on this block and store them in the db
        block_processor = address_reuse.block_processor.BlockProcessor(blockchain_reader, db)
        block_processor.process_block(current_height_iterated, benchmarker)
        print("Completed processing of block at height %d." % current_height_iterated)
        
        #Log successful processing of this block
        address_reuse.logger.log_status('Processed block %d.' % current_height_iterated)
        
        current_height_iterated = current_height_iterated + 1

        #continue going through blocks until there are no more or hit MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN
        num_blocks_remaining_to_process = num_blocks_remaining_to_process - 1
        address_reuse.validate.check_int_and_die(num_blocks_remaining_to_process, 'num_blocks_remaining_to_process', THIS_FILE)

        
except Exception as e:
    traceback.print_exc()
finally:
def main():
    db = address_reuse.db.Database(
        blockchain_mode = address_reuse.config.BlockchainMode.BITCOIND_RPC)

    #Determine the current furthest block out in the blockchain according to remote 
    #   API
    api_reader = address_reuse.blockchain_reader.ThrottledBlockchainReader(db)
    current_blockchain_height = int(api_reader.get_current_blockchain_block_height())
    api_reader = None #Done with API lookups :>

    blockchain_reader = address_reuse.blockchain_reader.LocalBlockchainRPCReader(db)

    #Determine the last block I've updated in the db
    last_height_in_db = db.get_last_block_height_in_db()
    num_blocks_processed = 0
    if last_height_in_db is None:
        current_height_iterated = 0
    else:
        current_height_iterated = last_height_in_db + 1

    #TODO: deal with orphans

    #Determine max number of blocks to process. -1 blocks = infinity
    config = address_reuse.config.Config(
        blockchain_mode = address_reuse.config.BlockchainMode.BITCOIND_RPC)
    MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN = config.MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN
    num_blocks_remaining_to_process = MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN

    last_block_height_processed = None

    benchmarker = address_reuse.benchmark.block_reader_benchmark.Benchmark()
    block_processor = address_reuse.block_processor.BlockProcessor(
        blockchain_reader, db)
    try:
        #Process blocks until we're caught up, or have hit the max # blocks to 
        #   process in this run.
        while (current_height_iterated < current_blockchain_height and \
               num_blocks_remaining_to_process):
            print("DEBUG: update_using_local_blockchain.py: current block height of blockchain is %d, last block processed in db is %d, %d remaining blocks to process in this run." % (current_blockchain_height, current_height_iterated, num_blocks_remaining_to_process))

            block_processor.process_block(current_height_iterated, benchmarker, 
                                          defer_blaming = True)
            print("Completed processing of block at height %d." % current_height_iterated)
             #Log successful processing of this block
            address_reuse.logger.log_status('Processed block %d with RPC.' % current_height_iterated)

            last_block_height_processed = current_height_iterated
            current_height_iterated = current_height_iterated + 1

            #continue going through blocks until there are no more or hit MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN
            num_blocks_remaining_to_process = num_blocks_remaining_to_process - 1
            address_reuse.validate.check_int_and_die(
                num_blocks_remaining_to_process, 'num_blocks_remaining_to_process', 
                THIS_FILE)
    except Exception as e:
        traceback.print_exc()
    finally:
        #whether it finishes normally or is interrupted by ^C, print stats before 
        #   exiting
        benchmarker.stop()
        benchmarker.print_stats()

        #handle safe rollback
        if last_block_height_processed is None and current_height_iterated > 0:
            #We didn't complete processing any blocks in this run, but we may
            #   have partially processed one. Need to roll back to the height
            #   we completed before starting this run
            last_block_height_processed = current_height_iterated - 1

        if last_block_height_processed is not None:
            db.rollback_seen_addresses_cache_to_block_height(
                last_block_height_processed)
            print(("Due to early exit, rolled 'seen addresses' table back to the "
                   "last block we finished processing at height %d") % 
                  last_block_height_processed)
            #TODO: This method of rollback is crappy because we may want to
            #   allow users in the future to run multiple instances of this
            #   script to process multiple block heights simultaneously, but
            #   this triggers a DELETE that would devestate any other threads
            #   working at a higher block height. For now, though, it doesnt'
            #   matter.
            db.rollback_blame_stats_to_block_height(last_block_height_processed)
            print(("Due to early exit, rolled 'blame stats' table back to the "
                   "last block we finished processing at height %d") % 
                  last_block_height_processed)
def main():
    db = address_reuse.db.Database(
        blockchain_mode=address_reuse.config.BlockchainMode.BITCOIND_RPC)

    #Determine the current furthest block out in the blockchain according to remote
    #   API
    api_reader = address_reuse.blockchain_reader.ThrottledBlockchainReader(db)
    current_blockchain_height = int(
        api_reader.get_current_blockchain_block_height())
    api_reader = None  #Done with API lookups :>

    blockchain_reader = address_reuse.blockchain_reader.LocalBlockchainRPCReader(
        db)

    #Determine the last block I've updated in the db
    last_height_in_db = db.get_last_block_height_in_db()
    num_blocks_processed = 0
    if last_height_in_db is None:
        current_height_iterated = 0
    else:
        current_height_iterated = last_height_in_db + 1

    #TODO: deal with orphans

    #Determine max number of blocks to process. -1 blocks = infinity
    config = address_reuse.config.Config(
        blockchain_mode=address_reuse.config.BlockchainMode.BITCOIND_RPC)
    MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN = config.MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN
    num_blocks_remaining_to_process = MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN

    last_block_height_processed = None

    benchmarker = address_reuse.benchmark.block_reader_benchmark.Benchmark()
    block_processor = address_reuse.block_processor.BlockProcessor(
        blockchain_reader, db)
    try:
        #Process blocks until we're caught up, or have hit the max # blocks to
        #   process in this run.
        while (current_height_iterated < current_blockchain_height and \
               num_blocks_remaining_to_process):
            print(
                "DEBUG: update_using_local_blockchain.py: current block height of blockchain is %d, last block processed in db is %d, %d remaining blocks to process in this run."
                % (current_blockchain_height, current_height_iterated,
                   num_blocks_remaining_to_process))

            block_processor.process_block(current_height_iterated,
                                          benchmarker,
                                          defer_blaming=True)
            print("Completed processing of block at height %d." %
                  current_height_iterated)
            #Log successful processing of this block
            address_reuse.logger.log_status('Processed block %d with RPC.' %
                                            current_height_iterated)

            last_block_height_processed = current_height_iterated
            current_height_iterated = current_height_iterated + 1

            #continue going through blocks until there are no more or hit MAX_NUM_BLOCKS_TO_PROCESS_PER_RUN
            num_blocks_remaining_to_process = num_blocks_remaining_to_process - 1
            address_reuse.validate.check_int_and_die(
                num_blocks_remaining_to_process,
                'num_blocks_remaining_to_process', THIS_FILE)
    except Exception as e:
        traceback.print_exc()
    finally:
        #whether it finishes normally or is interrupted by ^C, print stats before
        #   exiting
        benchmarker.stop()
        benchmarker.print_stats()

        #handle safe rollback
        if last_block_height_processed is None and current_height_iterated > 0:
            #We didn't complete processing any blocks in this run, but we may
            #   have partially processed one. Need to roll back to the height
            #   we completed before starting this run
            last_block_height_processed = current_height_iterated - 1

        if last_block_height_processed is not None:
            db.rollback_seen_addresses_cache_to_block_height(
                last_block_height_processed)
            print((
                "Due to early exit, rolled 'seen addresses' table back to the "
                "last block we finished processing at height %d") %
                  last_block_height_processed)
            #TODO: This method of rollback is crappy because we may want to
            #   allow users in the future to run multiple instances of this
            #   script to process multiple block heights simultaneously, but
            #   this triggers a DELETE that would devestate any other threads
            #   working at a higher block height. For now, though, it doesnt'
            #   matter.
            db.rollback_blame_stats_to_block_height(
                last_block_height_processed)
            print(("Due to early exit, rolled 'blame stats' table back to the "
                   "last block we finished processing at height %d") %
                  last_block_height_processed)