def __init__(self, coin_store: CoinStore, consensus_constants: ConsensusConstants): self.constants: ConsensusConstants = consensus_constants self.constants_json = recurse_jsonify( dataclasses.asdict(self.constants)) # Transactions that were unable to enter mempool, used for retry. (they were invalid) self.potential_txs: Dict[bytes32, Tuple[SpendBundle, CostResult, bytes32]] = {} # Keep track of seen spend_bundles self.seen_bundle_hashes: Dict[bytes32, bytes32] = {} self.coin_store = coin_store self.mempool_max_total_cost = int(self.constants.MAX_BLOCK_COST_CLVM * self.constants.MEMPOOL_BLOCK_BUFFER) self.potential_cache_max_total_cost = int( self.constants.MAX_BLOCK_COST_CLVM * self.constants.MEMPOOL_BLOCK_BUFFER) self.potential_cache_cost: int = 0 self.seen_cache_size = 10000 self.pool = ProcessPoolExecutor(max_workers=1) # The mempool will correspond to a certain peak self.peak: Optional[BlockRecord] = None self.mempool: Mempool = Mempool(self.mempool_max_total_cost)
async def create( block_store: WalletBlockStore, consensus_constants: ConsensusConstants, coins_of_interest_received: Callable, # f(removals: List[Coin], additions: List[Coin], height: uint32) reorg_rollback: Callable, ): """ Initializes a blockchain with the BlockRecords from disk, assuming they have all been validated. Uses the genesis block given in override_constants, or as a fallback, in the consensus constants config. """ self = WalletBlockchain() self.lock = asyncio.Lock() # External lock handled by full node cpu_count = multiprocessing.cpu_count() if cpu_count > 61: cpu_count = 61 # Windows Server 2016 has an issue https://bugs.python.org/issue26903 num_workers = max(cpu_count - 2, 1) self.pool = ProcessPoolExecutor(max_workers=num_workers) log.info(f"Started {num_workers} processes for block validation") self.constants = consensus_constants self.constants_json = recurse_jsonify( dataclasses.asdict(self.constants)) self.block_store = block_store self._shut_down = False self.coins_of_interest_received = coins_of_interest_received self.reorg_rollback = reorg_rollback self.log = logging.getLogger(__name__) await self._load_chain_from_store() return self
def __init__(self, coin_store: CoinStore, consensus_constants: ConsensusConstants): self.constants: ConsensusConstants = consensus_constants self.constants_json = recurse_jsonify( dataclasses.asdict(self.constants)) # Keep track of seen spend_bundles self.seen_bundle_hashes: Dict[bytes32, bytes32] = {} self.coin_store = coin_store self.lock = asyncio.Lock() # The fee per cost must be above this amount to consider the fee "nonzero", and thus able to kick out other # transactions. This prevents spam. This is equivalent to 0.055 XCH per block, or about 0.00005 XCH for two # spends. self.nonzero_fee_minimum_fpc = 5 self.limit_factor = 0.5 self.mempool_max_total_cost = int(self.constants.MAX_BLOCK_COST_CLVM * self.constants.MEMPOOL_BLOCK_BUFFER) # Transactions that were unable to enter mempool, used for retry. (they were invalid) self.potential_cache = PendingTxCache( self.constants.MAX_BLOCK_COST_CLVM * 1) self.seen_cache_size = 10000 self.pool = ProcessPoolExecutor(max_workers=2) # The mempool will correspond to a certain peak self.peak: Optional[BlockRecord] = None self.mempool: Mempool = Mempool(self.mempool_max_total_cost)
async def create( coin_store: CoinStore, block_store: BlockStore, consensus_constants: ConsensusConstants, ): """ Initializes a blockchain with the BlockRecords from disk, assuming they have all been validated. Uses the genesis block given in override_constants, or as a fallback, in the consensus constants config. """ self = Blockchain() self.lock = asyncio.Lock() # External lock handled by full node cpu_count = multiprocessing.cpu_count() if cpu_count > 61: cpu_count = 61 # Windows Server 2016 has an issue https://bugs.python.org/issue26903 num_workers = max(cpu_count - 2, 1) self.pool = ProcessPoolExecutor(max_workers=num_workers) log.info(f"Started {num_workers} processes for block validation") self.constants = consensus_constants self.coin_store = coin_store self.block_store = block_store self.constants_json = recurse_jsonify(dataclasses.asdict(self.constants)) self._shut_down = False await self._load_chain_from_store() self._seen_compact_proofs = set() return self
def to_json_dict(self, include_legacy_keys: bool = True, exclude_modern_keys: bool = True): if include_legacy_keys is False and exclude_modern_keys is True: raise ValueError("`coin_spends` not included in legacy or modern outputs") d = dataclasses.asdict(self) if include_legacy_keys: d["coin_solutions"] = d["coin_spends"] if exclude_modern_keys: del d["coin_spends"] return recurse_jsonify(d)
def __init__(self, coin_store: CoinStore, consensus_constants: ConsensusConstants): self.constants: ConsensusConstants = consensus_constants self.constants_json = recurse_jsonify(dataclasses.asdict(self.constants)) # Transactions that were unable to enter mempool, used for retry. (they were invalid) self.potential_txs: Dict[bytes32, Tuple[SpendBundle, CostResult, bytes32]] = {} # Keep track of seen spend_bundles self.seen_bundle_hashes: Dict[bytes32, bytes32] = {} self.coin_store = coin_store tx_per_sec = self.constants.TX_PER_SEC sec_per_block = self.constants.SUB_SLOT_TIME_TARGET // self.constants.SLOT_BLOCKS_TARGET block_buffer_count = self.constants.MEMPOOL_BLOCK_BUFFER # MEMPOOL_SIZE = 60000 self.mempool_size = int(tx_per_sec * sec_per_block * block_buffer_count) self.potential_cache_size = 300 self.seen_cache_size = 10000 self.pool = ProcessPoolExecutor(max_workers=1) # The mempool will correspond to a certain peak self.peak: Optional[BlockRecord] = None self.mempool: Mempool = Mempool.create(self.mempool_size)