def get_next_difficulty(
    constants: ConsensusConstants,
    sub_blocks: BlockchainInterface,
    prev_header_hash: bytes32,
    height: uint32,
    current_difficulty: uint64,
    deficit: uint8,
    new_slot: bool,
    signage_point_total_iters: uint128,
    skip_epoch_check=False,
) -> uint64:
    """
    Returns the difficulty of the next sub-block that extends onto sub-block.
    Used to calculate the number of iterations. When changing this, also change the implementation
    in wallet_state_manager.py.

    Args:
        constants: consensus constants being used for this chain
        sub_blocks: dictionary from header hash to SBR of all included SBR
        prev_header_hash: header hash of the previous sub-block
        height: the sub-block height of the sub-block to look at
        current_difficulty: difficulty at the infusion point of the sub_block at height
        deficit: deficit of the sub_block at height
        new_slot: whether or not there is a new slot after height
        signage_point_total_iters: signage point iters of the sub_block at height
        skip_epoch_check: don't check correct epoch
    """
    next_height: uint32 = uint32(height + 1)

    if next_height < (constants.EPOCH_SUB_BLOCKS -
                      constants.MAX_SUB_SLOT_SUB_BLOCKS):
        # We are in the first epoch
        return uint64(constants.DIFFICULTY_STARTING)

    if not sub_blocks.contains_sub_block(prev_header_hash):
        raise ValueError(f"Header hash {prev_header_hash} not in sub blocks")

    prev_sb: SubBlockRecord = sub_blocks.sub_block_record(prev_header_hash)

    # If we are in the same slot as previous sub-block, return same difficulty
    if not skip_epoch_check:
        _, can_finish_epoch = can_finish_sub_and_full_epoch(
            constants, height, deficit, sub_blocks, prev_header_hash, False)
        if not new_slot or not can_finish_epoch:
            return current_difficulty

    last_block_prev: SubBlockRecord = _get_last_block_in_previous_epoch(
        constants, sub_blocks, prev_sb)

    # Ensure we get a block for the last block as well, and that it is before the signage point
    last_block_curr = prev_sb
    while last_block_curr.total_iters > signage_point_total_iters or not last_block_curr.is_block:
        last_block_curr = sub_blocks.sub_block_record(
            last_block_curr.prev_hash)

    assert last_block_curr.timestamp is not None
    assert last_block_prev.timestamp is not None
    actual_epoch_time: uint64 = uint64(last_block_curr.timestamp -
                                       last_block_prev.timestamp)

    old_difficulty = uint64(
        prev_sb.weight - sub_blocks.sub_block_record(prev_sb.prev_hash).weight)

    # Terms are rearranged so there is only one division.
    new_difficulty_precise = (
        (last_block_curr.weight - last_block_prev.weight) *
        constants.SUB_SLOT_TIME_TARGET //
        (constants.SLOT_SUB_BLOCKS_TARGET * actual_epoch_time))
    # Take only DIFFICULTY_SIGNIFICANT_BITS significant bits
    new_difficulty = uint64(
        truncate_to_significant_bits(new_difficulty_precise,
                                     constants.SIGNIFICANT_BITS))
    assert count_significant_bits(new_difficulty) <= constants.SIGNIFICANT_BITS

    # Only change by a max factor, to prevent attacks, as in greenpaper, and must be at least 1
    max_diff = uint64(
        truncate_to_significant_bits(
            constants.DIFFICULTY_FACTOR * old_difficulty,
            constants.SIGNIFICANT_BITS,
        ))
    min_diff = uint64(
        truncate_to_significant_bits(
            old_difficulty // constants.DIFFICULTY_FACTOR,
            constants.SIGNIFICANT_BITS,
        ))
    if new_difficulty >= old_difficulty:
        return min(new_difficulty, max_diff)
    else:
        return max([uint64(1), new_difficulty, min_diff])
async def pre_validate_blocks_multiprocessing(
    constants: ConsensusConstants,
    constants_json: Dict,
    sub_blocks: BlockchainInterface,
    blocks: Sequence[Union[FullBlock, HeaderBlock]],
    pool: ProcessPoolExecutor,
) -> Optional[List[PreValidationResult]]:
    """
    This method must be called under the blockchain lock
    If all the full blocks pass pre-validation, (only validates header), returns the list of required iters.
    if any validation issue occurs, returns False.

    Args:
        constants_json:
        pool:
        constants:
        sub_blocks:
        blocks: list of full blocks to validate (must be connected to current chain)
    """
    batch_size = 4
    prev_sb: Optional[SubBlockRecord] = None
    # Collects all the recent sub-blocks (up to the previous sub-epoch)
    recent_sub_blocks: Dict[bytes32, SubBlockRecord] = {}
    recent_sub_blocks_compressed: Dict[bytes32, SubBlockRecord] = {}
    num_sub_slots_found = 0
    num_blocks_seen = 0
    if blocks[0].height > 0:
        if not sub_blocks.contains_sub_block(blocks[0].prev_header_hash):
            return [
                PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value),
                                    None, None)
            ]
        curr = sub_blocks.sub_block_record(blocks[0].prev_header_hash)
        num_sub_slots_to_look_for = 3 if curr.overflow else 2
        while (curr.sub_epoch_summary_included is None
               or num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS
               or num_sub_slots_found < num_sub_slots_to_look_for
               ) and curr.height > 0:
            if num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS or num_sub_slots_found < num_sub_slots_to_look_for:
                recent_sub_blocks_compressed[curr.header_hash] = curr

            if curr.first_in_sub_slot:
                assert curr.finished_challenge_slot_hashes is not None
                num_sub_slots_found += len(curr.finished_challenge_slot_hashes)
            recent_sub_blocks[curr.header_hash] = curr
            if curr.is_block:
                num_blocks_seen += 1
            curr = sub_blocks.sub_block_record(curr.prev_hash)
        recent_sub_blocks[curr.header_hash] = curr
        recent_sub_blocks_compressed[curr.header_hash] = curr
    sub_block_was_present = []
    for block in blocks:
        sub_block_was_present.append(
            sub_blocks.contains_sub_block(block.header_hash))

    diff_ssis: List[Tuple[uint64, uint64]] = []
    for sub_block in blocks:
        if sub_block.height != 0 and prev_sb is None:
            prev_sb = sub_blocks.sub_block_record(sub_block.prev_header_hash)
        sub_slot_iters, difficulty = get_sub_slot_iters_and_difficulty(
            constants, sub_block, prev_sb, sub_blocks)

        if sub_block.reward_chain_sub_block.signage_point_index >= constants.NUM_SPS_SUB_SLOT:
            log.warning(f"Sub block: {sub_block.reward_chain_sub_block}")
        overflow = is_overflow_sub_block(
            constants, sub_block.reward_chain_sub_block.signage_point_index)
        challenge = get_block_challenge(
            constants,
            sub_block,
            BlockCache(recent_sub_blocks),
            prev_sb is None,
            overflow,
            False,
        )
        if sub_block.reward_chain_sub_block.challenge_chain_sp_vdf is None:
            cc_sp_hash: bytes32 = challenge
        else:
            cc_sp_hash = sub_block.reward_chain_sub_block.challenge_chain_sp_vdf.output.get_hash(
            )
        q_str: Optional[
            bytes32] = sub_block.reward_chain_sub_block.proof_of_space.verify_and_get_quality_string(
                constants, challenge, cc_sp_hash)
        if q_str is None:
            for i, block_i in enumerate(blocks):
                if not sub_block_was_present[
                        i] and sub_blocks.contains_sub_block(
                            block_i.header_hash):
                    sub_blocks.remove_sub_block(block_i.header_hash)
            return None

        required_iters: uint64 = calculate_iterations_quality(
            q_str,
            sub_block.reward_chain_sub_block.proof_of_space.size,
            difficulty,
            cc_sp_hash,
        )

        sub_block_rec = block_to_sub_block_record(
            constants,
            sub_blocks,
            required_iters,
            sub_block,
            None,
        )
        recent_sub_blocks[sub_block_rec.header_hash] = sub_block_rec
        recent_sub_blocks_compressed[sub_block_rec.header_hash] = sub_block_rec
        sub_blocks.add_sub_block(
            sub_block_rec)  # Temporarily add sub block to dict
        prev_sb = sub_block_rec
        diff_ssis.append((difficulty, sub_slot_iters))

    for i, block in enumerate(blocks):
        if not sub_block_was_present[i]:
            sub_blocks.remove_sub_block(block.header_hash)

    recent_sb_compressed_pickled = {
        bytes(k): bytes(v)
        for k, v in recent_sub_blocks_compressed.items()
    }

    futures = []
    # Pool of workers to validate blocks concurrently
    for i in range(0, len(blocks), batch_size):
        end_i = min(i + batch_size, len(blocks))
        blocks_to_validate = blocks[i:end_i]
        if any([
                len(block.finished_sub_slots) > 0
                for block in blocks_to_validate
        ]):
            final_pickled = {
                bytes(k): bytes(v)
                for k, v in recent_sub_blocks.items()
            }
        else:
            final_pickled = recent_sb_compressed_pickled
        hb_pickled: List[bytes] = []
        generators: List[Optional[bytes]] = []
        for block in blocks_to_validate:
            if isinstance(block, FullBlock):
                hb_pickled.append(bytes(block.get_block_header()))
                generators.append(
                    bytes(block.transactions_generator) if block.
                    transactions_generator is not None else None)
            else:
                hb_pickled.append(bytes(block))
                generators.append(None)

        futures.append(asyncio.get_running_loop().run_in_executor(
            pool,
            batch_pre_validate_sub_blocks,
            constants_json,
            final_pickled,
            hb_pickled,
            generators,
            True,
            [diff_ssis[j][0] for j in range(i, end_i)],
            [diff_ssis[j][1] for j in range(i, end_i)],
        ))
    # Collect all results into one flat list
    return [
        PreValidationResult.from_bytes(result)
        for batch_result in (await asyncio.gather(*futures))
        for result in batch_result
    ]
def get_next_sub_slot_iters(
    constants: ConsensusConstants,
    sub_blocks: BlockchainInterface,
    prev_header_hash: bytes32,
    height: uint32,
    curr_sub_slot_iters: uint64,
    deficit: uint8,
    new_slot: bool,
    signage_point_total_iters: uint128,
    skip_epoch_check=False,
) -> uint64:
    """
    Returns the slot iterations required for the next block after the one at height, where new_slot is true
    iff the next block will be in the next slot.

    Args:
        constants: consensus constants being used for this chain
        sub_blocks: dictionary from header hash to SBR of all included SBR
        prev_header_hash: header hash of the previous sub-block
        height: the sub-block height of the sub-block to look at
        curr_sub_slot_iters: sub-slot iters at the infusion point of the sub_block at height
        deficit: deficit of the sub_block at height
        new_slot: whether or not there is a new slot after height
        signage_point_total_iters: signage point iters of the sub_block at height
        skip_epoch_check: don't check correct epoch
    """
    next_height: uint32 = uint32(height + 1)

    if next_height < (constants.EPOCH_SUB_BLOCKS -
                      constants.MAX_SUB_SLOT_SUB_BLOCKS):
        return uint64(constants.SUB_SLOT_ITERS_STARTING)

    if not sub_blocks.contains_sub_block(prev_header_hash):
        raise ValueError(f"Header hash {prev_header_hash} not in sub blocks")

    prev_sb: SubBlockRecord = sub_blocks.sub_block_record(prev_header_hash)

    # If we are in the same epoch, return same ssi
    if not skip_epoch_check:
        _, can_finish_epoch = can_finish_sub_and_full_epoch(
            constants, height, deficit, sub_blocks, prev_header_hash, False)
        if not new_slot or not can_finish_epoch:
            return curr_sub_slot_iters

    last_block_prev: SubBlockRecord = _get_last_block_in_previous_epoch(
        constants, sub_blocks, prev_sb)

    # Ensure we get a block for the last block as well, and that it is before the signage point
    last_block_curr = prev_sb
    while last_block_curr.total_iters > signage_point_total_iters or not last_block_curr.is_block:
        last_block_curr = sub_blocks.sub_block_record(
            last_block_curr.prev_hash)
    assert last_block_curr.timestamp is not None and last_block_prev.timestamp is not None

    # This is computed as the iterations per second in last epoch, times the target number of seconds per slot
    new_ssi_precise: uint64 = uint64(
        constants.SUB_SLOT_TIME_TARGET *
        (last_block_curr.total_iters - last_block_prev.total_iters) //
        (last_block_curr.timestamp - last_block_prev.timestamp))
    new_ssi = uint64(
        truncate_to_significant_bits(new_ssi_precise,
                                     constants.SIGNIFICANT_BITS))

    # Only change by a max factor as a sanity check
    max_ssi = uint64(
        truncate_to_significant_bits(
            constants.DIFFICULTY_FACTOR * last_block_curr.sub_slot_iters,
            constants.SIGNIFICANT_BITS,
        ))
    min_ssi = uint64(
        truncate_to_significant_bits(
            last_block_curr.sub_slot_iters // constants.DIFFICULTY_FACTOR,
            constants.SIGNIFICANT_BITS,
        ))
    if new_ssi >= last_block_curr.sub_slot_iters:
        new_ssi = min(new_ssi, max_ssi)
    else:
        new_ssi = uint64(max([constants.NUM_SPS_SUB_SLOT, new_ssi, min_ssi]))

    new_ssi = uint64(
        new_ssi -
        new_ssi % constants.NUM_SPS_SUB_SLOT)  # Must divide the sub slot
    assert count_significant_bits(new_ssi) <= constants.SIGNIFICANT_BITS
    return new_ssi