def multiprocess_pool( bitcoind_opts, initializer=None, initargs=None ):
   """
   Given bitcoind options, create a multiprocess pool 
   for querying it.
   """
   num_workers, worker_batch_size = configure_multiprocessing( bitcoind_opts )
   return Pool( processes=num_workers, initializer=initializer, initargs=initargs )
Esempio n. 2
0
def multiprocess_pool(bitcoind_opts, initializer=None, initargs=None):
    """
   Given bitcoind options, create a multiprocess pool 
   for querying it.
   """
    num_workers, worker_batch_size = configure_multiprocessing(bitcoind_opts)
    return Pool(processes=num_workers,
                initializer=initializer,
                initargs=initargs)
Esempio n. 3
0
def multiprocess_pool( bitcoind_opts, python_filepath ):
   """
   Create a multiprocess pool to index the blockchain, given the path to the python file to run to receive commands
   and the blockchain connection options.
   """
   num_workers, worker_batch_size = configure_multiprocessing( bitcoind_opts )
   bitcoind_opts_environ = pickle.dumps( bitcoind_opts )
   worker_env = {
        "VIRTUALCHAIN_BITCOIND_OPTIONS": bitcoind_opts_environ
   }

   if os.environ.get("PYTHONPATH", None) is not None:
       worker_env["PYTHONPATH"] = os.environ["PYTHONPATH"]

   return Workpool( num_workers, "python", [python_filepath], worker_env=worker_env )
Esempio n. 4
0
def multiprocess_pool(bitcoind_opts, python_filepath):
    """
    Create a multiprocess pool to index the blockchain, given the path to the python file to run to receive commands
    and the blockchain connection options.
    """
    num_workers, worker_batch_size = configure_multiprocessing(bitcoind_opts)

    bitcoind_opts_environ = pickle.dumps(bitcoind_opts)

    worker_env = {"VIRTUALCHAIN_BITCOIND_OPTIONS": bitcoind_opts_environ}

    if os.environ.get("PYTHONPATH", None) is not None:
        worker_env["PYTHONPATH"] = os.environ["PYTHONPATH"]

    # use full_path to python from sys.executable as default
    # this is used when PYTHONPATH is not set
    return Workpool(num_workers,
                    sys.executable, [python_filepath],
                    worker_env=worker_env)
Esempio n. 5
0
    def build(cls, bitcoind_opts, end_block_id, state_engine):
        """
        Top-level call to process all blocks in the blockchain.
        Goes and fetches all OP_RETURN nulldata in order,
        and feeds them into the state engine implementation using its
        'db_parse', 'db_check', 'db_commit', and 'db_save'
        methods.
        
        Note that this method can take some time (hours, days) to complete 
        when called from the first block.
        
        This method is *NOT* thread-safe.  However, it can be interrupted 
        with the "stop_build" method.
        
        Return True on success 
        Return False on error
        Raise an exception on irrecoverable error--the caller should simply try again.
        """

        first_block_id = state_engine.lastblock + 1
        if first_block_id >= end_block_id:
            # built
            log.debug("Up-to-date")
            return True

        num_workers, worker_batch_size = config.configure_multiprocessing(
            bitcoind_opts)

        rc = True

        state_engine.start_workpool(bitcoind_opts)

        try:

            log.debug("Process blocks %s to %s" %
                      (first_block_id, end_block_id))

            for block_id in xrange(first_block_id, end_block_id,
                                   worker_batch_size * num_workers):

                if not rc:
                    break

                if state_engine.get_workpool() is None:
                    # interrupted
                    log.debug("Build interrupted")
                    rc = False
                    break

                block_ids = range(
                    block_id,
                    min(block_id + worker_batch_size * num_workers,
                        end_block_id))

                # returns: [(block_id, txs)]
                block_ids_and_txs = transactions.get_nulldata_txs_in_blocks(
                    state_engine.get_workpool(), bitcoind_opts, block_ids)

                # process in order by block ID
                block_ids_and_txs.sort()

                for processed_block_id, txs in block_ids_and_txs:

                    if state_engine.get_consensus_at(
                            processed_block_id) is not None:
                        raise Exception("Already processed block %s (%s)" % (
                            processed_block_id,
                            state_engine.get_consensus_at(processed_block_id)))

                    ops = state_engine.parse_block(block_id, txs)
                    consensus_hash = state_engine.process_block(
                        processed_block_id, ops)

                    log.debug(
                        "CONSENSUS(%s): %s" %
                        (processed_block_id,
                         state_engine.get_consensus_at(processed_block_id)))

                    if consensus_hash is None:

                        # fatal error
                        rc = False
                        log.error("Failed to process block %d" %
                                  processed_block_id)
                        break

            log.debug("Last block is %s" % state_engine.lastblock)

        except:

            state_engine.stop_workpool()
            raise

        state_engine.stop_workpool()
        return rc
Esempio n. 6
0
    def build( cls, bitcoind_opts, end_block_id, state_engine ):
        """
        Top-level call to process all blocks in the blockchain.
        Goes and fetches all OP_RETURN nulldata in order,
        and feeds them into the state engine implementation using its
        'db_parse', 'db_check', 'db_commit', and 'db_save'
        methods.
        
        Note that this method can take some time (hours, days) to complete 
        when called from the first block.
        
        This method is *NOT* thread-safe.  However, it can be interrupted 
        with the "stop_build" method.
        
        Return True on success 
        Return False on error
        Raise an exception on irrecoverable error--the caller should simply try again.
        """
        
        first_block_id = state_engine.lastblock + 1
        if first_block_id >= end_block_id:
            # built 
            log.debug("Up-to-date")
            return True 

        num_workers, worker_batch_size = config.configure_multiprocessing( bitcoind_opts )

        rc = True

        state_engine.start_workpool( bitcoind_opts )
        
        try:
            
            log.debug("Process blocks %s to %s" % (first_block_id, end_block_id) )
            
            for block_id in xrange( first_block_id, end_block_id, worker_batch_size * num_workers ):
                
                if not rc:
                    break 
                
                if state_engine.get_workpool() is None:
                    # interrupted 
                    log.debug("Build interrupted")
                    rc = False
                    break 
                
                block_ids = range( block_id, min(block_id + worker_batch_size * num_workers, end_block_id) )
               
                # returns: [(block_id, txs)]
                block_ids_and_txs = transactions.get_nulldata_txs_in_blocks( state_engine.get_workpool(), bitcoind_opts, block_ids )
                
                # process in order by block ID
                block_ids_and_txs.sort()
               
                for processed_block_id, txs in block_ids_and_txs:

                    if state_engine.get_consensus_at( processed_block_id ) is not None:
                        raise Exception("Already processed block %s (%s)" % (processed_block_id, state_engine.get_consensus_at( processed_block_id )) )

                    ops = state_engine.parse_block( block_id, txs )
                    consensus_hash = state_engine.process_block( processed_block_id, ops )
                    
                    log.debug("CONSENSUS(%s): %s" % (processed_block_id, state_engine.get_consensus_at( processed_block_id )))
                    
                    if consensus_hash is None:
                        
                        # fatal error 
                        rc = False
                        log.error("Failed to process block %d" % processed_block_id )
                        break
            
            log.debug("Last block is %s" % state_engine.lastblock )

        except:
            
            state_engine.stop_workpool()
            raise
       
        state_engine.stop_workpool()
        return rc
Esempio n. 7
0
def multiprocess_batch_size(bitcoind_opts):
    """
    How many blocks can we be querying at once?
    """
    num_workers, worker_batch_size = configure_multiprocessing(bitcoind_opts)
    return num_workers * worker_batch_size
Esempio n. 8
0
def multiprocess_batch_size( bitcoind_opts ):
   """
   How many blocks can we be querying at once?
   """
   num_workers, worker_batch_size = configure_multiprocessing( bitcoind_opts )
   return num_workers * worker_batch_size