示例#1
0
def get_vdf_info_and_proof(
    constants: ConsensusConstants,
    vdf_input: ClassgroupElement,
    challenge_hash: bytes32,
    number_iters: uint64,
) -> Tuple[VDFInfo, VDFProof]:
    int_size = (constants.DISCRIMINANT_SIZE_BITS + 16) >> 4
    result: bytes = prove(
        bytes(challenge_hash),
        str(vdf_input.a),
        str(vdf_input.b),
        constants.DISCRIMINANT_SIZE_BITS,
        number_iters,
    )

    output = ClassgroupElement(
        int512(int.from_bytes(
            result[0:int_size],
            "big",
            signed=True,
        )),
        int512(
            int.from_bytes(
                result[int_size:2 * int_size],
                "big",
                signed=True,
            )),
    )
    proof_bytes = result[2 * int_size:4 * int_size]
    return VDFInfo(challenge_hash, number_iters,
                   output), VDFProof(uint8(0), proof_bytes)
示例#2
0
    def _create_block(
            self,
            test_constants: ConsensusConstants,
            challenge_hash: bytes32,
            height: uint32,
            prev_header_hash: bytes32,
            prev_iters: uint64,
            prev_weight: uint128,
            timestamp: uint64,
            difficulty: int,
            min_iters: int,
            seed: bytes,
            genesis: bool = False,
            reward_puzzlehash: bytes32 = None,
            transactions: Program = None,
            aggsig: G2Element = None,
            fees: uint64 = uint64(0),
    ) -> FullBlock:
        """
        Creates a block with the specified details. Uses the stored plots to create a proof of space,
        and also evaluates the VDF for the proof of time.
        """
        selected_plot_info = None
        selected_proof_index = 0
        selected_quality: Optional[bytes] = None
        best_quality = 0
        plots = [
            pinfo for _, pinfo in sorted(list(self.plots.items()),
                                         key=lambda x: str(x[0]))
        ]
        if self.use_any_pos:
            random.seed(seed)
            for i in range(len(plots) * 3):
                # Allow passing in seed, to create reorgs and different chains
                seeded_pn = random.randint(0, len(plots) - 1)
                plot_info = plots[seeded_pn]
                plot_id = plot_info.prover.get_id()
                ccp = ProofOfSpace.can_create_proof(
                    plot_id,
                    challenge_hash,
                    test_constants.NUMBER_ZERO_BITS_CHALLENGE_SIG,
                )
                if not ccp:
                    continue
                qualities = plot_info.prover.get_qualities_for_challenge(
                    challenge_hash)
                if len(qualities) > 0:
                    selected_plot_info = plot_info
                    selected_quality = qualities[0]
                    break
        else:
            for i in range(len(plots)):
                plot_info = plots[i]
                j = 0
                plot_id = plot_info.prover.get_id()
                ccp = ProofOfSpace.can_create_proof(
                    plot_id,
                    challenge_hash,
                    test_constants.NUMBER_ZERO_BITS_CHALLENGE_SIG,
                )
                if not ccp:
                    continue
                qualities = plot_info.prover.get_qualities_for_challenge(
                    challenge_hash)
                for quality in qualities:
                    qual_int = int.from_bytes(quality, "big", signed=False)
                    if qual_int > best_quality:
                        best_quality = qual_int
                        selected_quality = quality
                        selected_plot_info = plot_info
                        selected_proof_index = j
                    j += 1

        assert selected_plot_info is not None
        if selected_quality is None:
            raise RuntimeError("No proofs for this challenge")

        proof_xs: bytes = selected_plot_info.prover.get_full_proof(
            challenge_hash, selected_proof_index)

        plot_pk = ProofOfSpace.generate_plot_public_key(
            selected_plot_info.local_sk.get_g1(),
            selected_plot_info.farmer_public_key,
        )
        proof_of_space: ProofOfSpace = ProofOfSpace(
            challenge_hash,
            selected_plot_info.pool_public_key,
            plot_pk,
            selected_plot_info.prover.get_size(),
            proof_xs,
        )

        number_iters: uint64 = pot_iterations.calculate_iterations(
            proof_of_space,
            difficulty,
            min_iters,
            test_constants.NUMBER_ZERO_BITS_CHALLENGE_SIG,
        )
        if self.real_plots:
            print(f"Performing {number_iters} VDF iterations")

        int_size = (test_constants.DISCRIMINANT_SIZE_BITS + 16) >> 4

        result = prove(challenge_hash, test_constants.DISCRIMINANT_SIZE_BITS,
                       number_iters)

        output = ClassgroupElement(
            int512(int.from_bytes(
                result[0:int_size],
                "big",
                signed=True,
            )),
            int512(
                int.from_bytes(
                    result[int_size:2 * int_size],
                    "big",
                    signed=True,
                )),
        )
        proof_bytes = result[2 * int_size:4 * int_size]

        proof_of_time = ProofOfTime(
            challenge_hash,
            number_iters,
            output,
            uint8(0),
            proof_bytes,
        )

        # Use the extension data to create different blocks based on header hash
        extension_data: bytes32 = bytes32(
            [random.randint(0, 255) for _ in range(32)])
        cost = uint64(0)

        fee_reward = uint64(block_rewards.calculate_base_fee(height) + fees)

        std_hash(std_hash(height))

        # Create filter
        byte_array_tx: List[bytes32] = []
        tx_additions: List[Coin] = []
        tx_removals: List[bytes32] = []
        if transactions:
            error, npc_list, _ = get_name_puzzle_conditions(transactions)
            additions: List[Coin] = additions_for_npc(npc_list)
            for coin in additions:
                tx_additions.append(coin)
                byte_array_tx.append(bytearray(coin.puzzle_hash))
            for npc in npc_list:
                tx_removals.append(npc.coin_name)
                byte_array_tx.append(bytearray(npc.coin_name))
        farmer_ph = self.farmer_ph
        pool_ph = self.pool_ph
        if reward_puzzlehash is not None:
            farmer_ph = reward_puzzlehash
            pool_ph = reward_puzzlehash

        byte_array_tx.append(bytearray(farmer_ph))
        byte_array_tx.append(bytearray(pool_ph))
        bip158: PyBIP158 = PyBIP158(byte_array_tx)
        encoded = bytes(bip158.GetEncoded())

        removal_merkle_set = MerkleSet()
        addition_merkle_set = MerkleSet()

        # Create removal Merkle set
        for coin_name in tx_removals:
            removal_merkle_set.add_already_hashed(coin_name)

        # Create addition Merkle set
        puzzlehash_coin_map: Dict[bytes32, List[Coin]] = {}
        cb_reward = calculate_block_reward(height)
        cb_coin = create_coinbase_coin(height, pool_ph, cb_reward)
        fees_coin = create_fees_coin(height, farmer_ph, fee_reward)
        for coin in tx_additions + [cb_coin, fees_coin]:
            if coin.puzzle_hash in puzzlehash_coin_map:
                puzzlehash_coin_map[coin.puzzle_hash].append(coin)
            else:
                puzzlehash_coin_map[coin.puzzle_hash] = [coin]

        # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
        for puzzle, coins in puzzlehash_coin_map.items():
            addition_merkle_set.add_already_hashed(puzzle)
            addition_merkle_set.add_already_hashed(hash_coin_list(coins))

        additions_root = addition_merkle_set.get_root()
        removal_root = removal_merkle_set.get_root()

        generator_hash = (transactions.get_tree_hash()
                          if transactions is not None else bytes32([0] * 32))
        filter_hash = std_hash(encoded)

        pool_target = PoolTarget(pool_ph, uint32(height))
        pool_target_signature = self.get_pool_key_signature(
            pool_target, proof_of_space.pool_public_key)
        assert pool_target_signature is not None
        final_aggsig: G2Element = pool_target_signature
        if aggsig is not None:
            final_aggsig = AugSchemeMPL.aggregate([final_aggsig, aggsig])

        header_data: HeaderData = HeaderData(
            height,
            prev_header_hash,
            timestamp,
            filter_hash,
            proof_of_space.get_hash(),
            uint128(prev_weight + difficulty),
            uint64(prev_iters + number_iters),
            additions_root,
            removal_root,
            farmer_ph,
            fee_reward,
            pool_target,
            final_aggsig,
            cost,
            extension_data,
            generator_hash,
        )

        header_hash_sig: G2Element = self.get_plot_signature(
            header_data, plot_pk)

        header: Header = Header(header_data, header_hash_sig)

        full_block: FullBlock = FullBlock(proof_of_space, proof_of_time,
                                          header, transactions, encoded)

        return full_block
示例#3
0
    async def _do_process_communication(self, challenge_hash, challenge_weight,
                                        ip, reader, writer):
        disc: int = create_discriminant(
            challenge_hash, self.constants["DISCRIMINANT_SIZE_BITS"])

        prefix = str(len(str(disc)))
        if len(prefix) == 1:
            prefix = "00" + prefix
        writer.write((prefix + str(disc)).encode())
        await writer.drain()

        try:
            ok = await reader.readexactly(2)
        except (asyncio.IncompleteReadError, ConnectionResetError,
                Exception) as e:
            log.warning(f"{type(e)} {e}")
            async with self.lock:
                if challenge_hash not in self.done_discriminants:
                    self.done_discriminants.append(challenge_hash)
            return

        if ok.decode() != "OK":
            return

        log.info("Got handshake with VDF client.")

        async with self.lock:
            self.active_discriminants[challenge_hash] = (writer,
                                                         challenge_weight, ip)
            self.active_discriminants_start_time[challenge_hash] = time.time()

        asyncio.create_task(self._send_iterations(challenge_hash, writer))

        # Listen to the client until "STOP" is received.
        while True:
            try:
                data = await reader.readexactly(4)
            except (asyncio.IncompleteReadError, ConnectionResetError,
                    Exception) as e:
                log.warning(f"{type(e)} {e}")
                async with self.lock:
                    if challenge_hash in self.active_discriminants:
                        del self.active_discriminants[challenge_hash]
                    if challenge_hash in self.active_discriminants_start_time:
                        del self.active_discriminants_start_time[
                            challenge_hash]
                    if challenge_hash not in self.done_discriminants:
                        self.done_discriminants.append(challenge_hash)
                break

            msg = ""
            try:
                msg = data.decode()
            except Exception as e:
                log.error(f"Exception while decoding data {e}")
                pass

            if msg == "STOP":
                log.info(f"Stopped client running on ip {ip}.")
                async with self.lock:
                    writer.write(b"ACK")
                    await writer.drain()
                break
            else:
                try:
                    # This must be a proof, 4bytes is length prefix
                    length = int.from_bytes(data, "big")
                    proof = await reader.readexactly(length)
                    stdout_bytes_io: io.BytesIO = io.BytesIO(
                        bytes.fromhex(proof.decode()))
                except (
                        asyncio.IncompleteReadError,
                        ConnectionResetError,
                        Exception,
                ) as e:
                    log.warning(f"{type(e)} {e}")
                    async with self.lock:
                        if challenge_hash in self.active_discriminants:
                            del self.active_discriminants[challenge_hash]
                        if challenge_hash in self.active_discriminants_start_time:
                            del self.active_discriminants_start_time[
                                challenge_hash]
                        if challenge_hash not in self.done_discriminants:
                            self.done_discriminants.append(challenge_hash)
                    break

                iterations_needed = uint64(
                    int.from_bytes(stdout_bytes_io.read(8), "big",
                                   signed=True))

                y_size_bytes = stdout_bytes_io.read(8)
                y_size = uint64(
                    int.from_bytes(y_size_bytes, "big", signed=True))

                y_bytes = stdout_bytes_io.read(y_size)

                proof_bytes: bytes = stdout_bytes_io.read()

                # Verifies our own proof just in case
                a = int.from_bytes(y_bytes[:129], "big", signed=True)
                b = int.from_bytes(y_bytes[129:], "big", signed=True)

                output = ClassgroupElement(int512(a), int512(b))

                proof_of_time = ProofOfTime(
                    challenge_hash,
                    iterations_needed,
                    output,
                    self.config["n_wesolowski"],
                    proof_bytes,
                )

                if not proof_of_time.is_valid(
                        self.constants["DISCRIMINANT_SIZE_BITS"]):
                    log.error("Invalid proof of time")

                response = timelord_protocol.ProofOfTimeFinished(proof_of_time)

                await self._update_avg_ips(challenge_hash, iterations_needed,
                                           ip)

                async with self.lock:
                    self.proofs_to_write.append(
                        OutboundMessage(
                            NodeType.FULL_NODE,
                            Message("proof_of_time_finished", response),
                            Delivery.BROADCAST,
                        ))

                await self._update_proofs_count(challenge_weight)
示例#4
0
    def _create_block(
        self,
        test_constants: Dict,
        challenge_hash: bytes32,
        height: uint32,
        prev_header_hash: bytes32,
        prev_iters: uint64,
        prev_weight: uint128,
        timestamp: uint64,
        difficulty: uint64,
        min_iters: uint64,
        seed: bytes,
        genesis: bool = False,
        reward_puzzlehash: bytes32 = None,
        transactions: Program = None,
        aggsig: BLSSignature = None,
        fees: uint64 = uint64(0),
    ) -> FullBlock:
        """
        Creates a block with the specified details. Uses the stored plots to create a proof of space,
        and also evaluates the VDF for the proof of time.
        """
        selected_prover = None
        selected_plot_sk = None
        selected_pool_sk = None
        selected_proof_index = 0
        plots = list(self.plot_config["plots"].items())
        selected_quality: Optional[bytes] = None
        best_quality = 0
        if self.use_any_pos:
            for i in range(len(plots) * 3):
                # Allow passing in seed, to create reorgs and different chains
                random.seed(seed + i.to_bytes(4, "big"))
                seeded_pn = random.randint(0, len(plots) - 1)
                pool_sk = PrivateKey.from_bytes(
                    bytes.fromhex(plots[seeded_pn][1]["pool_sk"])
                )
                plot_sk = PrivateKey.from_bytes(
                    bytes.fromhex(plots[seeded_pn][1]["sk"])
                )
                prover = DiskProver(plots[seeded_pn][0])
                qualities = prover.get_qualities_for_challenge(challenge_hash)
                if len(qualities) > 0:
                    if self.use_any_pos:
                        selected_quality = qualities[0]
                        selected_prover = prover
                        selected_pool_sk = pool_sk
                        selected_plot_sk = plot_sk
                        break
        else:
            for i in range(len(plots)):
                pool_sk = PrivateKey.from_bytes(bytes.fromhex(plots[i][1]["pool_sk"]))
                plot_sk = PrivateKey.from_bytes(bytes.fromhex(plots[i][1]["sk"]))
                prover = DiskProver(plots[i][0])
                qualities = prover.get_qualities_for_challenge(challenge_hash)
                j = 0
                for quality in qualities:
                    qual_int = int.from_bytes(quality, "big", signed=False)
                    if qual_int > best_quality:
                        best_quality = qual_int
                        selected_quality = quality
                        selected_prover = prover
                        selected_pool_sk = pool_sk
                        selected_plot_sk = plot_sk
                        selected_proof_index = j
                    j += 1

        assert selected_prover
        assert selected_pool_sk
        assert selected_plot_sk
        pool_pk = selected_pool_sk.get_public_key()
        plot_pk = selected_plot_sk.get_public_key()
        if selected_quality is None:
            raise RuntimeError("No proofs for this challenge")

        proof_xs: bytes = selected_prover.get_full_proof(
            challenge_hash, selected_proof_index
        )
        proof_of_space: ProofOfSpace = ProofOfSpace(
            challenge_hash, pool_pk, plot_pk, selected_prover.get_size(), proof_xs
        )
        number_iters: uint64 = pot_iterations.calculate_iterations(
            proof_of_space, difficulty, min_iters
        )
        # print("Doing iters", number_iters)
        int_size = (test_constants["DISCRIMINANT_SIZE_BITS"] + 16) >> 4

        result = prove(
            challenge_hash, test_constants["DISCRIMINANT_SIZE_BITS"], number_iters
        )

        output = ClassgroupElement(
            int512(int.from_bytes(result[0:int_size], "big", signed=True,)),
            int512(
                int.from_bytes(result[int_size : 2 * int_size], "big", signed=True,)
            ),
        )
        proof_bytes = result[2 * int_size : 4 * int_size]

        proof_of_time = ProofOfTime(
            challenge_hash, number_iters, output, self.n_wesolowski, proof_bytes,
        )

        if not reward_puzzlehash:
            reward_puzzlehash = self.fee_target

        # Use the extension data to create different blocks based on header hash
        extension_data: bytes32 = bytes32([random.randint(0, 255) for _ in range(32)])
        cost = uint64(0)

        coinbase_reward = block_rewards.calculate_block_reward(height)
        fee_reward = uint64(block_rewards.calculate_base_fee(height) + fees)

        coinbase_coin, coinbase_signature = create_coinbase_coin_and_signature(
            height, reward_puzzlehash, coinbase_reward, selected_pool_sk
        )

        parent_coin_name = std_hash(std_hash(height))
        fees_coin = Coin(parent_coin_name, reward_puzzlehash, uint64(fee_reward))

        # Create filter
        byte_array_tx: List[bytes32] = []
        tx_additions: List[Coin] = []
        tx_removals: List[bytes32] = []
        encoded = None
        if transactions:
            error, npc_list, _ = get_name_puzzle_conditions(transactions)
            additions: List[Coin] = additions_for_npc(npc_list)
            for coin in additions:
                tx_additions.append(coin)
                byte_array_tx.append(bytearray(coin.puzzle_hash))
            for npc in npc_list:
                tx_removals.append(npc.coin_name)
                byte_array_tx.append(bytearray(npc.coin_name))

            bip158: PyBIP158 = PyBIP158(byte_array_tx)
            encoded = bytes(bip158.GetEncoded())

        removal_merkle_set = MerkleSet()
        addition_merkle_set = MerkleSet()

        # Create removal Merkle set
        for coin_name in tx_removals:
            removal_merkle_set.add_already_hashed(coin_name)

        # Create addition Merkle set
        puzzlehash_coin_map: Dict[bytes32, List[Coin]] = {}
        for coin in tx_additions:
            if coin.puzzle_hash in puzzlehash_coin_map:
                puzzlehash_coin_map[coin.puzzle_hash].append(coin)
            else:
                puzzlehash_coin_map[coin.puzzle_hash] = [coin]

        # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash
        for puzzle, coins in puzzlehash_coin_map.items():
            addition_merkle_set.add_already_hashed(puzzle)
            addition_merkle_set.add_already_hashed(hash_coin_list(coins))

        additions_root = addition_merkle_set.get_root()
        removal_root = removal_merkle_set.get_root()

        generator_hash = (
            transactions.get_tree_hash()
            if transactions is not None
            else bytes32([0] * 32)
        )
        filter_hash = std_hash(encoded) if encoded is not None else bytes32([0] * 32)

        header_data: HeaderData = HeaderData(
            height,
            prev_header_hash,
            timestamp,
            filter_hash,
            proof_of_space.get_hash(),
            uint128(prev_weight + difficulty),
            uint64(prev_iters + number_iters),
            additions_root,
            removal_root,
            coinbase_coin,
            coinbase_signature,
            fees_coin,
            aggsig,
            cost,
            extension_data,
            generator_hash,
        )

        header_hash_sig: PrependSignature = selected_plot_sk.sign_prepend(
            header_data.get_hash()
        )

        header: Header = Header(header_data, header_hash_sig)

        full_block: FullBlock = FullBlock(
            proof_of_space, proof_of_time, header, transactions, encoded
        )

        return full_block
示例#5
0
 def get_default_element():
     return ClassgroupElement(int512(2), int512(1))
示例#6
0
    async def _do_process_communication(
        self,
        chain: Chain,
        challenge: bytes32,
        initial_form: ClassgroupElement,
        ip: str,
        reader: asyncio.StreamReader,
        writer: asyncio.StreamWriter,
    ):
        disc: int = create_discriminant(challenge,
                                        self.constants.DISCRIMINANT_SIZE_BITS)

        try:
            # Depending on the flags 'fast_algorithm' and 'sanitizer_mode',
            # the timelord tells the vdf_client what to execute.
            async with self.lock:
                if self.config["fast_algorithm"]:
                    # Run n-wesolowski (fast) algorithm.
                    writer.write(b"N")
                else:
                    # Run two-wesolowski (slow) algorithm.
                    writer.write(b"T")
                await writer.drain()

            prefix = str(len(str(disc)))
            if len(prefix) == 1:
                prefix = "00" + prefix
            if len(prefix) == 2:
                prefix = "0" + prefix
            async with self.lock:
                writer.write((prefix + str(disc)).encode())
                await writer.drain()

            # Send (a, b) from 'initial_form'.
            for num in [initial_form.a, initial_form.b]:
                prefix_l = len(str(num))
                prefix_len = len(str(prefix_l))
                async with self.lock:
                    writer.write(
                        (str(prefix_len) + str(prefix_l) + str(num)).encode())
                    await writer.drain()
            try:
                ok = await reader.readexactly(2)
            except (asyncio.IncompleteReadError, ConnectionResetError,
                    Exception) as e:
                log.warning(f"{type(e)} {e}")
                async with self.lock:
                    self.vdf_failures.append(chain)
                    self.vdf_failures_count += 1
                return

            if ok.decode() != "OK":
                return

            log.info("Got handshake with VDF client.")
            async with self.lock:
                self.allows_iters.append(chain)
            # Listen to the client until "STOP" is received.
            while True:
                try:
                    data = await reader.readexactly(4)
                except (
                        asyncio.IncompleteReadError,
                        ConnectionResetError,
                        Exception,
                ) as e:
                    log.warning(f"{type(e)} {e}")
                    async with self.lock:
                        self.vdf_failures.append(chain)
                        self.vdf_failures_count += 1
                    break

                msg = ""
                try:
                    msg = data.decode()
                except Exception:
                    pass
                if msg == "STOP":
                    log.info(f"Stopped client running on ip {ip}.")
                    async with self.lock:
                        writer.write(b"ACK")
                        await writer.drain()
                    break
                else:
                    try:
                        # This must be a proof, 4 bytes is length prefix
                        length = int.from_bytes(data, "big")
                        proof = await reader.readexactly(length)
                        stdout_bytes_io: io.BytesIO = io.BytesIO(
                            bytes.fromhex(proof.decode()))
                    except (
                            asyncio.IncompleteReadError,
                            ConnectionResetError,
                            Exception,
                    ) as e:
                        log.warning(f"{type(e)} {e}")
                        async with self.lock:
                            self.vdf_failures.append(chain)
                            self.vdf_failures_count += 1
                        break

                    iterations_needed = uint64(
                        int.from_bytes(stdout_bytes_io.read(8),
                                       "big",
                                       signed=True))

                    y_size_bytes = stdout_bytes_io.read(8)
                    y_size = uint64(
                        int.from_bytes(y_size_bytes, "big", signed=True))

                    y_bytes = stdout_bytes_io.read(y_size)
                    witness_type = uint8(
                        int.from_bytes(stdout_bytes_io.read(1),
                                       "big",
                                       signed=True))
                    proof_bytes: bytes = stdout_bytes_io.read()

                    # Verifies our own proof just in case
                    int_size = (self.constants.DISCRIMINANT_SIZE_BITS +
                                16) >> 4
                    a = int.from_bytes(y_bytes[:int_size], "big", signed=True)
                    b = int.from_bytes(y_bytes[int_size:], "big", signed=True)
                    output = ClassgroupElement(int512(a), int512(b))
                    time_taken = time.time() - self.chain_start_time[chain]
                    ips = int(iterations_needed / time_taken * 10) / 10
                    log.info(
                        f"Finished PoT chall:{challenge[:10].hex()}.. {iterations_needed}"
                        f" iters, "
                        f"Estimated IPS: {ips}, Chain: {chain}")

                    vdf_info: VDFInfo = VDFInfo(
                        challenge,
                        iterations_needed,
                        output,
                    )
                    vdf_proof: VDFProof = VDFProof(
                        witness_type,
                        proof_bytes,
                    )

                    if not vdf_proof.is_valid(self.constants, initial_form,
                                              vdf_info):
                        log.error("Invalid proof of time!")
                    async with self.lock:
                        self.proofs_finished.append(
                            (chain, vdf_info, vdf_proof))
        except ConnectionResetError as e:
            log.info(f"Connection reset with VDF client {e}")
示例#7
0
    async def _do_process_communication(
        self, challenge_hash, challenge_weight, ip, reader, writer
    ):
        disc: int = create_discriminant(challenge_hash, self.discriminant_size_bits)
        # Depending on the flags 'fast_algorithm' and 'sanitizer_mode',
        # the timelord tells the vdf_client what to execute.
        if not self.sanitizer_mode:
            if self.config["fast_algorithm"]:
                # Run n-wesolowski (fast) algorithm.
                writer.write(b"N")
            else:
                # Run two-wesolowski (slow) algorithm.
                writer.write(b"T")
        else:
            # Create compact proofs of time.
            writer.write(b"S")
        await writer.drain()

        prefix = str(len(str(disc)))
        if len(prefix) == 1:
            prefix = "00" + prefix
        writer.write((prefix + str(disc)).encode())
        await writer.drain()

        try:
            ok = await reader.readexactly(2)
        except (asyncio.IncompleteReadError, ConnectionResetError, Exception) as e:
            log.warning(f"{type(e)} {e}")
            async with self.lock:
                if challenge_hash not in self.done_discriminants:
                    self.done_discriminants.append(challenge_hash)
                if self.sanitizer_mode:
                    if challenge_hash in self.pending_iters:
                        del self.pending_iters[challenge_hash]
                    if challenge_hash in self.submitted_iters:
                        del self.submitted_iters[challenge_hash]
            return

        if ok.decode() != "OK":
            return

        log.info("Got handshake with VDF client.")

        async with self.lock:
            self.active_discriminants[challenge_hash] = (writer, challenge_weight, ip)
            self.active_discriminants_start_time[challenge_hash] = time.time()

        asyncio.create_task(self._send_iterations(challenge_hash, writer))

        # Listen to the client until "STOP" is received.
        while True:
            try:
                data = await reader.readexactly(4)
            except (asyncio.IncompleteReadError, ConnectionResetError, Exception) as e:
                log.warning(f"{type(e)} {e}")
                async with self.lock:
                    if challenge_hash in self.active_discriminants:
                        del self.active_discriminants[challenge_hash]
                    if challenge_hash in self.active_discriminants_start_time:
                        del self.active_discriminants_start_time[challenge_hash]
                    if challenge_hash not in self.done_discriminants:
                        self.done_discriminants.append(challenge_hash)
                    if self.sanitizer_mode:
                        if challenge_hash in self.pending_iters:
                            del self.pending_iters[challenge_hash]
                        if challenge_hash in self.submitted_iters:
                            del self.submitted_iters[challenge_hash]
                break

            msg = ""
            try:
                msg = data.decode()
            except Exception as e:
                log.error(f"Exception while decoding data {e}")

            if msg == "STOP":
                log.info(f"Stopped client running on ip {ip}.")
                async with self.lock:
                    writer.write(b"ACK")
                    await writer.drain()
                break
            else:
                try:
                    # This must be a proof, 4bytes is length prefix
                    length = int.from_bytes(data, "big")
                    proof = await reader.readexactly(length)
                    stdout_bytes_io: io.BytesIO = io.BytesIO(
                        bytes.fromhex(proof.decode())
                    )
                except (
                    asyncio.IncompleteReadError,
                    ConnectionResetError,
                    Exception,
                ) as e:
                    log.warning(f"{type(e)} {e}")
                    async with self.lock:
                        if challenge_hash in self.active_discriminants:
                            del self.active_discriminants[challenge_hash]
                        if challenge_hash in self.active_discriminants_start_time:
                            del self.active_discriminants_start_time[challenge_hash]
                        if challenge_hash not in self.done_discriminants:
                            self.done_discriminants.append(challenge_hash)
                        if self.sanitizer_mode:
                            if challenge_hash in self.pending_iters:
                                del self.pending_iters[challenge_hash]
                            if challenge_hash in self.submitted_iters:
                                del self.submitted_iters[challenge_hash]
                    break

                iterations_needed = uint64(
                    int.from_bytes(stdout_bytes_io.read(8), "big", signed=True)
                )

                y_size_bytes = stdout_bytes_io.read(8)
                y_size = uint64(int.from_bytes(y_size_bytes, "big", signed=True))

                y_bytes = stdout_bytes_io.read(y_size)
                witness_type = uint8(
                    int.from_bytes(stdout_bytes_io.read(1), "big", signed=True)
                )
                proof_bytes: bytes = stdout_bytes_io.read()

                # Verifies our own proof just in case
                a = int.from_bytes(y_bytes[:129], "big", signed=True)
                b = int.from_bytes(y_bytes[129:], "big", signed=True)

                output = ClassgroupElement(int512(a), int512(b))

                proof_of_time = ProofOfTime(
                    challenge_hash,
                    iterations_needed,
                    output,
                    witness_type,
                    proof_bytes,
                )

                if not proof_of_time.is_valid(self.discriminant_size_bits):
                    log.error("Invalid proof of time")

                response = timelord_protocol.ProofOfTimeFinished(proof_of_time)

                await self._update_avg_ips(challenge_hash, iterations_needed, ip)

                async with self.lock:
                    self.proofs_to_write.append(
                        OutboundMessage(
                            NodeType.FULL_NODE,
                            Message("proof_of_time_finished", response),
                            Delivery.BROADCAST,
                        )
                    )

                if not self.sanitizer_mode:
                    await self._update_proofs_count(challenge_weight)
                else:
                    async with self.lock:
                        writer.write(b"010")
                        await writer.drain()
                        try:
                            del self.active_discriminants[challenge_hash]
                            del self.active_discriminants_start_time[challenge_hash]
                            del self.pending_iters[challenge_hash]
                            del self.submitted_iters[challenge_hash]
                        except KeyError:
                            log.error("Discriminant stopped anormally.")