コード例 #1
0
def calculate_base_fee(height: uint32) -> uint64:
    """
    Returns the base fee reward at a certain block height.
    1 base fee reward is 1/8 of total block reward
    """
    return uint64(2000000000000)
コード例 #2
0
    async def unfinished_block(
        self, unfinished_block: peer_protocol.UnfinishedBlock
    ) -> OutboundMessageGenerator:
        """
        We have received an unfinished block, either created by us, or from another peer.
        We can validate it and if it's a good block, propagate it to other peers and
        timelords.
        """
        # Adds the unfinished block to seen, and check if it's seen before
        if self.store.seen_unfinished_block(unfinished_block.block.header_hash):
            return

        if not self.blockchain.is_child_of_head(unfinished_block.block):
            return

        if not await self.blockchain.validate_unfinished_block(unfinished_block.block):
            raise InvalidUnfinishedBlock()

        prev_full_block: Optional[FullBlock] = await self.store.get_block(
            unfinished_block.block.prev_header_hash
        )

        assert prev_full_block

        prev_block: HeaderBlock = prev_full_block.header_block

        assert prev_block.challenge

        challenge_hash: bytes32 = prev_block.challenge.get_hash()
        difficulty: uint64 = self.blockchain.get_next_difficulty(
            unfinished_block.block.header_block.prev_header_hash
        )
        vdf_ips: uint64 = self.blockchain.get_next_ips(prev_block)

        iterations_needed: uint64 = calculate_iterations(
            unfinished_block.block.header_block.proof_of_space,
            difficulty,
            vdf_ips,
            constants["MIN_BLOCK_TIME"],
        )

        if (
            self.store.get_unfinished_block((challenge_hash, iterations_needed))
            is not None
        ):
            return

        expected_time: uint64 = uint64(
            int(iterations_needed / (self.store.get_proof_of_time_estimate_ips()))
        )

        if expected_time > constants["PROPAGATION_DELAY_THRESHOLD"]:
            log.info(f"Block is slow, expected {expected_time} seconds, waiting")
            # If this block is slow, sleep to allow faster blocks to come out first
            await asyncio.sleep(5)

        leader: Tuple[uint32, uint64] = self.store.get_unfinished_block_leader()
        if leader is None or unfinished_block.block.height > leader[0]:
            log.info(
                f"This is the first unfinished block at height {unfinished_block.block.height}, so propagate."
            )
            # If this is the first block we see at this height, propagate
            self.store.set_unfinished_block_leader(
                (unfinished_block.block.height, expected_time)
            )
        elif unfinished_block.block.height == leader[0]:
            if expected_time > leader[1] + constants["PROPAGATION_THRESHOLD"]:
                # If VDF is expected to finish X seconds later than the best, don't propagate
                log.info(
                    f"VDF will finish too late {expected_time} seconds, so don't propagate"
                )
                return
            elif expected_time < leader[1]:
                log.info(
                    f"New best unfinished block at height {unfinished_block.block.height}"
                )
                # If this will be the first block to finalize, update our leader
                self.store.set_unfinished_block_leader((leader[0], expected_time))
        else:
            # If we have seen an unfinished block at a greater or equal height, don't propagate
            log.info(f"Unfinished block at old height, so don't propagate")
            return

        self.store.add_unfinished_block(
            (challenge_hash, iterations_needed), unfinished_block.block
        )

        timelord_request = timelord_protocol.ProofOfSpaceInfo(
            challenge_hash, iterations_needed
        )

        yield OutboundMessage(
            NodeType.TIMELORD,
            Message("proof_of_space_info", timelord_request),
            Delivery.BROADCAST,
        )
        yield OutboundMessage(
            NodeType.FULL_NODE,
            Message("unfinished_block", unfinished_block),
            Delivery.BROADCAST_TO_OTHERS,
        )
コード例 #3
0
    def _create_block(
        self,
        test_constants: Dict,
        challenge_hash: bytes32,
        height: uint32,
        prev_header_hash: bytes32,
        prev_iters: uint64,
        prev_weight: uint64,
        timestamp: uint64,
        difficulty: uint64,
        ips: uint64,
        seed: bytes,
    ) -> FullBlock:
        """
        Creates a block with the specified details. Uses the stored plots to create a proof of space,
        and also evaluates the VDF for the proof of time.
        """
        prover = None
        plot_pk = None
        plot_sk = None
        qualities: List[bytes] = []
        for pn in range(num_plots):
            # Allow passing in seed, to create reorgs and different chains
            seeded_pn = (pn + 17 * int.from_bytes(seed, "big")) % num_plots
            filename = self.filenames[seeded_pn]
            plot_pk = plot_pks[seeded_pn]
            plot_sk = plot_sks[seeded_pn]
            prover = DiskProver(os.path.join(self.plot_dir, filename))
            qualities = prover.get_qualities_for_challenge(challenge_hash)
            if len(qualities) > 0:
                break

        assert prover
        assert plot_pk
        assert plot_sk
        if len(qualities) == 0:
            raise NoProofsOfSpaceFound("No proofs for this challenge")

        proof_xs: bytes = prover.get_full_proof(challenge_hash, 0)
        proof_of_space: ProofOfSpace = ProofOfSpace(
            challenge_hash, pool_pk, plot_pk, k, [uint8(b) for b in proof_xs])
        number_iters: uint64 = pot_iterations.calculate_iterations(
            proof_of_space, difficulty, ips, test_constants["MIN_BLOCK_TIME"])

        disc: int = create_discriminant(
            challenge_hash, test_constants["DISCRIMINANT_SIZE_BITS"])
        start_x: ClassGroup = ClassGroup.from_ab_discriminant(2, 1, disc)
        y_cl, proof_bytes = create_proof_of_time_nwesolowski(
            disc, start_x, number_iters, disc, n_wesolowski)

        output = ClassgroupElement(y_cl[0], y_cl[1])

        proof_of_time = ProofOfTime(
            challenge_hash,
            number_iters,
            output,
            n_wesolowski,
            [uint8(b) for b in proof_bytes],
        )

        coinbase: CoinbaseInfo = CoinbaseInfo(
            height,
            block_rewards.calculate_block_reward(uint32(height)),
            coinbase_target,
        )
        coinbase_sig: PrependSignature = pool_sk.sign_prepend(bytes(coinbase))
        fees_target: FeesTarget = FeesTarget(fee_target, uint64(0))
        solutions_generator: bytes32 = sha256(seed).digest()
        cost = uint64(0)
        body: Body = Body(coinbase, coinbase_sig, fees_target, None,
                          solutions_generator, cost)

        header_data: HeaderData = HeaderData(
            prev_header_hash,
            timestamp,
            bytes([0] * 32),
            proof_of_space.get_hash(),
            body.get_hash(),
            bytes([0] * 32),
        )

        header_hash_sig: PrependSignature = plot_sk.sign_prepend(
            header_data.get_hash())

        header: Header = Header(header_data, header_hash_sig)

        challenge = Challenge(
            challenge_hash,
            proof_of_space.get_hash(),
            proof_of_time.get_hash(),
            height,
            uint64(prev_weight + difficulty),
            uint64(prev_iters + number_iters),
        )
        header_block = HeaderBlock(proof_of_space, proof_of_time, challenge,
                                   header)

        full_block: FullBlock = FullBlock(header_block, body)

        return full_block
コード例 #4
0
    async def test_cc_spend(self, two_wallet_nodes):
        num_blocks = 10
        full_nodes, wallets = two_wallet_nodes
        full_node_1, server_1 = full_nodes[0]
        wallet_node, server_2 = wallets[0]
        wallet_node_2, server_3 = wallets[1]
        wallet = wallet_node.wallet_state_manager.main_wallet
        wallet2 = wallet_node_2.wallet_state_manager.main_wallet

        ph = await wallet.get_new_puzzlehash()

        await server_2.start_client(PeerInfo("localhost", uint16(server_1._port)), None)
        await server_3.start_client(PeerInfo("localhost", uint16(server_1._port)), None)

        for i in range(1, num_blocks):
            await full_node_1.farm_new_block(FarmNewBlockProtocol(ph))

        funds = sum(
            [
                calculate_base_fee(uint32(i)) + calculate_block_reward(uint32(i))
                for i in range(1, num_blocks - 2)
            ]
        )

        await self.time_out_assert(15, wallet.get_confirmed_balance, funds)

        cc_wallet: CCWallet = await CCWallet.create_new_cc(
            wallet_node.wallet_state_manager, wallet, uint64(100)
        )

        for i in range(1, num_blocks):
            await full_node_1.farm_new_block(FarmNewBlockProtocol(ph))

        await self.time_out_assert(15, cc_wallet.get_confirmed_balance, 100)
        await self.time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100)

        assert cc_wallet.cc_info.my_core
        colour = cc_wallet_puzzles.get_genesis_from_core(cc_wallet.cc_info.my_core)

        cc_wallet_2: CCWallet = await CCWallet.create_wallet_for_cc(
            wallet_node_2.wallet_state_manager, wallet2, colour
        )

        assert cc_wallet.cc_info.my_core == cc_wallet_2.cc_info.my_core

        cc_2_hash = await cc_wallet_2.get_new_inner_hash()
        await cc_wallet.cc_spend(uint64(60), cc_2_hash)

        for i in range(1, num_blocks):
            await full_node_1.farm_new_block(FarmNewBlockProtocol(ph))

        await self.time_out_assert(15, cc_wallet.get_confirmed_balance, 40)
        await self.time_out_assert(15, cc_wallet.get_unconfirmed_balance, 40)

        await self.time_out_assert(30, cc_wallet_2.get_confirmed_balance, 60)
        await self.time_out_assert(30, cc_wallet_2.get_unconfirmed_balance, 60)

        cc_hash = await cc_wallet.get_new_inner_hash()
        await cc_wallet_2.cc_spend(uint64(15), cc_hash)

        for i in range(1, num_blocks):
            await full_node_1.farm_new_block(FarmNewBlockProtocol(ph))

        await self.time_out_assert(15, cc_wallet.get_confirmed_balance, 55)
        await self.time_out_assert(15, cc_wallet.get_unconfirmed_balance, 55)
コード例 #5
0
    async def _sync(self) -> OutboundMessageGenerator:
        """
        Performs a full sync of the blockchain.
            - Check which are the heaviest tips
            - Request headers for the heaviest
            - Verify the weight of the tip, using the headers
            - Find the fork point to see where to start downloading blocks
            - Blacklist peers that provide invalid blocks
            - Sync blockchain up to heads (request blocks in batches)
        """
        log.info("Starting to perform sync with peers.")
        log.info("Waiting to receive tips from peers.")
        # TODO: better way to tell that we have finished receiving tips
        await asyncio.sleep(5)
        highest_weight: uint64 = uint64(0)
        tip_block: FullBlock
        tip_height = 0
        sync_start_time = time.time()

        # Based on responses from peers about the current heads, see which head is the heaviest
        # (similar to longest chain rule).

        potential_tips: List[
            Tuple[bytes32, FullBlock]
        ] = self.store.get_potential_tips_tuples()
        log.info(f"Have collected {len(potential_tips)} potential tips")
        for header_hash, potential_tip_block in potential_tips:
            if potential_tip_block.header_block.challenge is None:
                raise ValueError(
                    f"Invalid tip block {potential_tip_block.header_hash} received"
                )
            if potential_tip_block.header_block.challenge.total_weight > highest_weight:
                highest_weight = potential_tip_block.header_block.challenge.total_weight
                tip_block = potential_tip_block
                tip_height = potential_tip_block.header_block.challenge.height
        if highest_weight <= max(
            [t.weight for t in self.blockchain.get_current_tips()]
        ):
            log.info("Not performing sync, already caught up.")
            return

        assert tip_block
        log.info(f"Tip block {tip_block.header_hash} tip height {tip_block.height}")

        for height in range(0, tip_block.height + 1):
            self.store.set_potential_headers_received(uint32(height), Event())
            self.store.set_potential_blocks_received(uint32(height), Event())
            self.store.set_potential_hashes_received(Event())

        timeout = 200
        sleep_interval = 10
        total_time_slept = 0

        while True:
            if total_time_slept > timeout:
                raise TimeoutError("Took too long to fetch header hashes.")
            if self._shut_down:
                return
            # Download all the header hashes and find the fork point
            request = peer_protocol.RequestAllHeaderHashes(tip_block.header_hash)
            yield OutboundMessage(
                NodeType.FULL_NODE,
                Message("request_all_header_hashes", request),
                Delivery.RANDOM,
            )
            try:
                phr = self.store.get_potential_hashes_received()
                assert phr is not None
                await asyncio.wait_for(
                    phr.wait(), timeout=sleep_interval,
                )
                break
            except concurrent.futures.TimeoutError:
                total_time_slept += sleep_interval
                log.warning("Did not receive desired header hashes")

        # Finding the fork point allows us to only download headers and blocks from the fork point
        header_hashes = self.store.get_potential_hashes()
        fork_point_height: uint32 = self.blockchain.find_fork_point(header_hashes)
        fork_point_hash: bytes32 = header_hashes[fork_point_height]
        log.info(f"Fork point: {fork_point_hash} at height {fork_point_height}")

        # Now, we download all of the headers in order to verify the weight, in batches
        headers: List[HeaderBlock] = []

        # Download headers in batches. We download a few batches ahead in case there are delays or peers
        # that don't have the headers that we need.
        last_request_time: float = 0
        highest_height_requested: uint32 = uint32(0)
        request_made: bool = False
        for height_checkpoint in range(
            fork_point_height + 1, tip_height + 1, self.config["max_headers_to_send"]
        ):
            end_height = min(
                height_checkpoint + self.config["max_headers_to_send"], tip_height + 1
            )

            total_time_slept = 0
            while True:
                if self._shut_down:
                    return
                if total_time_slept > timeout:
                    raise TimeoutError("Took too long to fetch blocks")

                # Request batches that we don't have yet
                for batch in range(0, self.config["num_sync_batches"]):
                    batch_start = (
                        height_checkpoint + batch * self.config["max_headers_to_send"]
                    )
                    batch_end = min(
                        batch_start + self.config["max_headers_to_send"], tip_height + 1
                    )

                    if batch_start > tip_height:
                        # We have asked for all blocks
                        break

                    blocks_missing = any(
                        [
                            not (
                                self.store.get_potential_headers_received(uint32(h))
                            ).is_set()
                            for h in range(batch_start, batch_end)
                        ]
                    )
                    if (
                        time.time() - last_request_time > sleep_interval
                        and blocks_missing
                    ) or (batch_end - 1) > highest_height_requested:
                        # If we are missing header blocks in this batch, and we haven't made a request in a while,
                        # Make a request for this batch. Also, if we have never requested this batch, make
                        # the request
                        if batch_end - 1 > highest_height_requested:
                            highest_height_requested = batch_end - 1

                        request_made = True
                        request_hb = peer_protocol.RequestHeaderBlocks(
                            tip_block.header_block.header.get_hash(),
                            [uint32(h) for h in range(batch_start, batch_end)],
                        )
                        log.info(f"Requesting header blocks {batch_start, batch_end}.")
                        yield OutboundMessage(
                            NodeType.FULL_NODE,
                            Message("request_header_blocks", request_hb),
                            Delivery.RANDOM,
                        )
                if request_made:
                    # Reset the timer for requests, so we don't overload other peers with requests
                    last_request_time = time.time()
                    request_made = False

                # Wait for the first batch (the next "max_blocks_to_send" blocks to arrive)
                awaitables = [
                    (self.store.get_potential_headers_received(uint32(height))).wait()
                    for height in range(height_checkpoint, end_height)
                ]
                future = asyncio.gather(*awaitables, return_exceptions=True)
                try:
                    await asyncio.wait_for(future, timeout=sleep_interval)
                    break
                except concurrent.futures.TimeoutError:
                    try:
                        await future
                    except asyncio.CancelledError:
                        pass
                    total_time_slept += sleep_interval
                    log.info(f"Did not receive desired header blocks")

        for h in range(fork_point_height + 1, tip_height + 1):
            header = self.store.get_potential_header(uint32(h))
            assert header is not None
            headers.append(header)

        log.info(f"Downloaded headers up to tip height: {tip_height}")
        if not verify_weight(
            tip_block.header_block, headers, self.blockchain.headers[fork_point_hash],
        ):
            raise errors.InvalidWeight(
                f"Weight of {tip_block.header_block.header.get_hash()} not valid."
            )

        log.info(
            f"Validated weight of headers. Downloaded {len(headers)} headers, tip height {tip_height}"
        )
        assert tip_height == fork_point_height + len(headers)
        self.store.clear_potential_headers()
        headers.clear()

        # Download blocks in batches, and verify them as they come in. We download a few batches ahead,
        # in case there are delays.
        last_request_time = 0
        highest_height_requested = uint32(0)
        request_made = False
        for height_checkpoint in range(
            fork_point_height + 1, tip_height + 1, self.config["max_blocks_to_send"]
        ):
            end_height = min(
                height_checkpoint + self.config["max_blocks_to_send"], tip_height + 1
            )

            total_time_slept = 0
            while True:
                if self._shut_down:
                    return
                if total_time_slept > timeout:
                    raise TimeoutError("Took too long to fetch blocks")

                # Request batches that we don't have yet
                for batch in range(0, self.config["num_sync_batches"]):
                    batch_start = (
                        height_checkpoint + batch * self.config["max_blocks_to_send"]
                    )
                    batch_end = min(
                        batch_start + self.config["max_blocks_to_send"], tip_height + 1
                    )

                    if batch_start > tip_height:
                        # We have asked for all blocks
                        break

                    blocks_missing = any(
                        [
                            not (
                                self.store.get_potential_blocks_received(uint32(h))
                            ).is_set()
                            for h in range(batch_start, batch_end)
                        ]
                    )
                    if (
                        time.time() - last_request_time > sleep_interval
                        and blocks_missing
                    ) or (batch_end - 1) > highest_height_requested:
                        # If we are missing blocks in this batch, and we haven't made a request in a while,
                        # Make a request for this batch. Also, if we have never requested this batch, make
                        # the request
                        log.info(
                            f"Requesting sync blocks {[i for i in range(batch_start, batch_end)]}"
                        )
                        if batch_end - 1 > highest_height_requested:
                            highest_height_requested = batch_end - 1
                        request_made = True
                        request_sync = peer_protocol.RequestSyncBlocks(
                            tip_block.header_block.header.header_hash,
                            [
                                uint32(height)
                                for height in range(batch_start, batch_end)
                            ],
                        )
                        yield OutboundMessage(
                            NodeType.FULL_NODE,
                            Message("request_sync_blocks", request_sync),
                            Delivery.RANDOM,
                        )
                if request_made:
                    # Reset the timer for requests, so we don't overload other peers with requests
                    last_request_time = time.time()
                    request_made = False

                # Wait for the first batch (the next "max_blocks_to_send" blocks to arrive)
                awaitables = [
                    (self.store.get_potential_blocks_received(uint32(height))).wait()
                    for height in range(height_checkpoint, end_height)
                ]
                future = asyncio.gather(*awaitables, return_exceptions=True)
                try:
                    await asyncio.wait_for(future, timeout=sleep_interval)
                    break
                except concurrent.futures.TimeoutError:
                    try:
                        await future
                    except asyncio.CancelledError:
                        pass
                    total_time_slept += sleep_interval
                    log.info("Did not receive desired blocks")

            # Verifies this batch, which we are guaranteed to have (since we broke from the above loop)
            blocks = []
            for height in range(height_checkpoint, end_height):
                b: Optional[FullBlock] = await self.store.get_potential_block(
                    uint32(height)
                )
                assert b is not None
                blocks.append(b)

            validation_start_time = time.time()
            prevalidate_results = await self.blockchain.pre_validate_blocks(blocks)
            index = 0
            for height in range(height_checkpoint, end_height):
                if self._shut_down:
                    return
                block: Optional[FullBlock] = await self.store.get_potential_block(
                    uint32(height)
                )
                assert block is not None

                prev_block: Optional[FullBlock] = await self.store.get_potential_block(
                    uint32(height - 1)
                )
                if prev_block is None:
                    prev_block = await self.store.get_block(block.prev_header_hash)
                assert prev_block is not None

                # The block gets permanantly added to the blockchain
                validated, pos = prevalidate_results[index]
                index += 1

                async with self.store.lock:
                    result = await self.blockchain.receive_block(
                        block, prev_block.header_block, validated, pos
                    )
                    if (
                        result == ReceiveBlockResult.INVALID_BLOCK
                        or result == ReceiveBlockResult.DISCONNECTED_BLOCK
                    ):
                        raise RuntimeError(f"Invalid block {block.header_hash}")

                    # Always immediately add the block to the database, after updating blockchain state
                    await self.store.add_block(block)

                assert (
                    max([h.height for h in self.blockchain.get_current_tips()])
                    >= height
                )
                self.store.set_proof_of_time_estimate_ips(
                    self.blockchain.get_next_ips(block.header_block)
                )
            log.info(
                f"Took {time.time() - validation_start_time} seconds to validate and add blocks "
                f"{height_checkpoint} to {end_height}."
            )
        assert max([h.height for h in self.blockchain.get_current_tips()]) == tip_height
        log.info(
            f"Finished sync up to height {tip_height}. Total time: "
            f"{round((time.time() - sync_start_time)/60, 2)} minutes."
        )
コード例 #6
0
    async def validate_block(
        self,
        block: FullBlock,
        prev_full_block: Optional[HeaderBlock] = None,
        genesis: bool = False,
        pre_validated: bool = False,
        pos_quality: bytes32 = None,
    ) -> bool:
        """
        Block validation algorithm. Returns true iff the candidate block is fully valid,
        and extends something in the blockchain.
        """
        # 1. Validate unfinished block (check the rest of the conditions)
        if not (
            await self.validate_unfinished_block(
                block, genesis, pre_validated, pos_quality
            )
        ):
            return False

        difficulty: uint64
        ips: uint64
        if not genesis:
            difficulty = self.get_next_difficulty(block.prev_header_hash)
            assert prev_full_block is not None
            ips = self.get_next_ips(prev_full_block)
        else:
            difficulty = uint64(self.constants["DIFFICULTY_STARTING"])
            ips = uint64(self.constants["VDF_IPS_STARTING"])

        # 2. Check proof of space hash
        if not pre_validated:
            if not block.header_block.challenge or not block.header_block.proof_of_time:
                return False
            if (
                block.header_block.proof_of_space.get_hash()
                != block.header_block.challenge.proof_of_space_hash
            ):
                return False

        # 3. Check number of iterations on PoT is correct, based on prev block and PoS
        if pos_quality is None:
            pos_quality = block.header_block.proof_of_space.verify_and_get_quality()

        if pos_quality is None:
            return False

        number_of_iters: uint64 = calculate_iterations_quality(
            pos_quality,
            block.header_block.proof_of_space.size,
            difficulty,
            ips,
            self.constants["MIN_BLOCK_TIME"],
        )

        if block.header_block.proof_of_time is None:
            return False

        if number_of_iters != block.header_block.proof_of_time.number_of_iterations:
            return False

        # 4. Check PoT
        if not pre_validated:
            if not block.header_block.proof_of_time.is_valid(
                self.constants["DISCRIMINANT_SIZE_BITS"]
            ):
                return False

        if block.header_block.challenge is None:
            return False

        if block.body.coinbase.height != block.header_block.challenge.height:
            return False

        if not genesis:
            prev_block: Optional[SmallHeaderBlock] = self.headers.get(
                block.prev_header_hash, None
            )
            if not prev_block or not prev_block.challenge:
                return False

            # 5. and check if PoT.challenge_hash matches
            if (
                block.header_block.proof_of_time.challenge_hash
                != prev_block.challenge.get_hash()
            ):
                return False

            # 6a. Check challenge height = parent height + 1
            if block.header_block.challenge.height != prev_block.challenge.height + 1:
                return False

            # 7a. Check challenge total_weight = parent total_weight + difficulty
            if (
                block.header_block.challenge.total_weight
                != prev_block.challenge.total_weight + difficulty
            ):
                return False

            # 8a. Check challenge total_iters = parent total_iters + number_iters
            if (
                block.header_block.challenge.total_iters
                != prev_block.challenge.total_iters + number_of_iters
            ):
                return False
        else:
            # 6b. Check challenge height = parent height + 1
            if block.header_block.challenge.height != 0:
                return False

            # 7b. Check challenge total_weight = parent total_weight + difficulty
            if block.header_block.challenge.total_weight != difficulty:
                return False

            # 8b. Check challenge total_iters = parent total_iters + number_iters
            if block.header_block.challenge.total_iters != number_of_iters:
                return False

        return True
コード例 #7
0
    async def respond_to_offer(
        self, file_path: Path
    ) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
        has_wallets = await self.maybe_create_wallets_for_offer(file_path)
        if not has_wallets:
            return False, None, "Unknown Error"
        trade_offer_hex = file_path.read_text()
        trade_offer: TradeRecord = TradeRecord.from_bytes(
            bytes.fromhex(trade_offer_hex)
        )
        offer_spend_bundle = trade_offer.spend_bundle

        coinsols = []  # [] of CoinSolutions
        cc_coinsol_outamounts: Dict[bytes32, List[Tuple[Any, int]]] = dict()
        # Used for generating auditor solution, key is colour
        auditees: Dict[bytes32, List[Tuple[bytes32, bytes32, Any, int]]] = dict()
        aggsig = offer_spend_bundle.aggregated_signature
        cc_discrepancies: Dict[bytes32, int] = dict()
        chia_discrepancy = None
        wallets: Dict[bytes32, Any] = dict()  # colour to wallet dict

        for coinsol in offer_spend_bundle.coin_solutions:
            puzzle = coinsol.solution.first()
            solution = coinsol.solution.rest().first()

            # work out the deficits between coin amount and expected output for each
            if cc_wallet_puzzles.check_is_cc_puzzle(puzzle):
                parent_info = binutils.disassemble(solution.rest().first()).split(" ")

                if len(parent_info) > 1:
                    # Calculate output amounts
                    colour = cc_wallet_puzzles.get_genesis_from_puzzle(
                        binutils.disassemble(puzzle)
                    )
                    if colour not in wallets:
                        wallets[
                            colour
                        ] = await self.wallet_state_manager.get_wallet_for_colour(
                            colour
                        )
                    unspent = await self.wallet_state_manager.get_spendable_coins_for_wallet(
                        wallets[colour].wallet_info.id
                    )
                    if coinsol.coin in [record.coin for record in unspent]:
                        return False, None, "can't respond to own offer"
                    innerpuzzlereveal = solution.rest().rest().rest().first()
                    innersol = solution.rest().rest().rest().rest().first()
                    out_amount = cc_wallet_puzzles.get_output_amount_for_puzzle_and_solution(
                        innerpuzzlereveal, innersol
                    )

                    if colour in cc_discrepancies:
                        cc_discrepancies[colour] += coinsol.coin.amount - out_amount
                    else:
                        cc_discrepancies[colour] = coinsol.coin.amount - out_amount
                    # Store coinsol and output amount for later
                    if colour in cc_coinsol_outamounts:
                        cc_coinsol_outamounts[colour].append((coinsol, out_amount))
                    else:
                        cc_coinsol_outamounts[colour] = [(coinsol, out_amount)]

                    # auditees should be (primary_input, innerpuzhash, coin_amount, output_amount)
                    if colour in auditees:
                        auditees[colour].append(
                            (
                                coinsol.coin.parent_coin_info,
                                Program(innerpuzzlereveal).get_tree_hash(),
                                coinsol.coin.amount,
                                out_amount,
                            )
                        )
                    else:
                        auditees[colour] = [
                            (
                                coinsol.coin.parent_coin_info,
                                Program(innerpuzzlereveal).get_tree_hash(),
                                coinsol.coin.amount,
                                out_amount,
                            )
                        ]
                else:
                    coinsols.append(coinsol)
            else:
                # standard chia coin
                unspent = await self.wallet_state_manager.get_spendable_coins_for_wallet(
                    1
                )
                if coinsol.coin in [record.coin for record in unspent]:
                    return False, None, "can't respond to own offer"
                if chia_discrepancy is None:
                    chia_discrepancy = cc_wallet_puzzles.get_output_discrepancy_for_puzzle_and_solution(
                        coinsol.coin, puzzle, solution
                    )
                else:
                    chia_discrepancy += cc_wallet_puzzles.get_output_discrepancy_for_puzzle_and_solution(
                        coinsol.coin, puzzle, solution
                    )
                coinsols.append(coinsol)

        chia_spend_bundle: Optional[SpendBundle] = None
        if chia_discrepancy is not None:
            chia_spend_bundle = await self.wallet_state_manager.main_wallet.create_spend_bundle_relative_chia(
                chia_discrepancy, []
            )

        zero_spend_list: List[SpendBundle] = []
        # create coloured coin
        self.log.info(cc_discrepancies)
        for colour in cc_discrepancies.keys():
            if cc_discrepancies[colour] < 0:
                my_cc_spends = await wallets[colour].select_coins(
                    abs(cc_discrepancies[colour])
                )
            else:
                if chia_spend_bundle is None:
                    to_exclude: List = []
                else:
                    to_exclude = chia_spend_bundle.removals()
                my_cc_spends = await wallets[colour].select_coins(0)
                if my_cc_spends is None or my_cc_spends == set():
                    zero_spend_bundle: SpendBundle = await wallets[
                        colour
                    ].generate_zero_val_coin(False, to_exclude)
                    if zero_spend_bundle is None:
                        return (
                            False,
                            None,
                            "Unable to generate zero value coin. Confirm that you have chia available",
                        )
                    zero_spend_list.append(zero_spend_bundle)

                    additions = zero_spend_bundle.additions()
                    removals = zero_spend_bundle.removals()
                    my_cc_spends = set()
                    for add in additions:
                        if add not in removals and add.amount == 0:
                            my_cc_spends.add(add)

            if my_cc_spends == set() or my_cc_spends is None:
                return False, None, "insufficient funds"

            auditor = my_cc_spends.pop()
            auditor_inner_puzzle = await self.get_inner_puzzle_for_puzzle_hash(
                auditor.puzzle_hash
            )
            assert auditor_inner_puzzle is not None
            inner_hash = auditor_inner_puzzle.get_tree_hash()

            auditor_info = (
                auditor.parent_coin_info,
                inner_hash,
                auditor.amount,
            )
            auditor_formatted = (
                f"(0x{auditor.parent_coin_info} 0x{inner_hash} {auditor.amount})"
            )
            core = cc_wallet_puzzles.cc_make_core(colour)
            parent_info = await wallets[colour].get_parent_for_coin(auditor)

            for coloured_coin in my_cc_spends:
                inner_solution = self.wallet_state_manager.main_wallet.make_solution(
                    consumed=[auditor.name()]
                )
                sig = await wallets[colour].get_sigs_for_innerpuz_with_innersol(
                    await self.get_inner_puzzle_for_puzzle_hash(
                        coloured_coin.puzzle_hash
                    ),
                    inner_solution,
                )
                aggsig = AugSchemeMPL.aggregate([sig, aggsig])
                inner_puzzle = await self.get_inner_puzzle_for_puzzle_hash(
                    coloured_coin.puzzle_hash
                )
                assert inner_puzzle is not None
                # auditees should be (primary_input, innerpuzhash, coin_amount, output_amount)
                auditees[colour].append(
                    (
                        coloured_coin.parent_coin_info,
                        inner_puzzle.get_tree_hash(),
                        coloured_coin.amount,
                        0,
                    )
                )

                solution = cc_wallet_puzzles.cc_make_solution(
                    core,
                    (
                        parent_info.parent_name,
                        parent_info.inner_puzzle_hash,
                        parent_info.amount,
                    ),
                    coloured_coin.amount,
                    binutils.disassemble(inner_puzzle),
                    binutils.disassemble(inner_solution),
                    auditor_info,
                    None,
                )
                coin_spend = CoinSolution(
                    coloured_coin,
                    Program.to(
                        [
                            cc_wallet_puzzles.cc_make_puzzle(
                                inner_puzzle.get_tree_hash(), core,
                            ),
                            solution,
                        ]
                    ),
                )
                coinsols.append(coin_spend)

                ephemeral = cc_wallet_puzzles.create_spend_for_ephemeral(
                    coloured_coin, auditor, 0
                )
                coinsols.append(ephemeral)

                auditor = cc_wallet_puzzles.create_spend_for_auditor(
                    auditor, coloured_coin
                )
                coinsols.append(auditor)

            # Tweak the offer's solution to include the new auditor
            for cc_coinsol_out in cc_coinsol_outamounts[colour]:
                cc_coinsol = cc_coinsol_out[0]
                offer_sol = binutils.disassemble(cc_coinsol.solution)
                # auditor is (primary_input, innerpuzzlehash, amount)
                offer_sol = offer_sol.replace(
                    "))) ()) () ()))", f"))) ()) {auditor_formatted} ()))"
                )
                new_coinsol = CoinSolution(
                    cc_coinsol.coin, binutils.assemble(offer_sol)
                )
                coinsols.append(new_coinsol)

                eph = cc_wallet_puzzles.create_spend_for_ephemeral(
                    cc_coinsol.coin, auditor, cc_coinsol_out[1]
                )
                coinsols.append(eph)

                aud = cc_wallet_puzzles.create_spend_for_auditor(
                    auditor, cc_coinsol.coin
                )
                coinsols.append(aud)

            # Finish the auditor CoinSolution with new information
            newinnerpuzhash = await wallets[colour].get_new_inner_hash()
            outputamount = (
                sum([c.amount for c in my_cc_spends])
                + cc_discrepancies[colour]
                + auditor.amount
            )
            innersol = self.wallet_state_manager.main_wallet.make_solution(
                primaries=[{"puzzlehash": newinnerpuzhash, "amount": outputamount}]
            )
            parent_info = await wallets[colour].get_parent_for_coin(auditor)

            auditees[colour].append(
                (
                    auditor.parent_coin_info,
                    auditor_inner_puzzle.get_tree_hash(),
                    auditor.amount,
                    outputamount,
                )
            )

            sigs: List[G2Element] = await wallets[colour].get_sigs(
                auditor_inner_puzzle, innersol
            )
            aggsig = AugSchemeMPL.aggregate(sigs + [aggsig])

            solution = cc_wallet_puzzles.cc_make_solution(
                core,
                (
                    parent_info.parent_name,
                    parent_info.inner_puzzle_hash,
                    parent_info.amount,
                ),
                auditor.amount,
                binutils.disassemble(auditor_inner_puzzle),
                binutils.disassemble(innersol),
                auditor_info,
                auditees[colour],
            )

            cs = CoinSolution(
                auditor,
                Program.to(
                    [
                        cc_wallet_puzzles.cc_make_puzzle(
                            auditor_inner_puzzle.get_tree_hash(), core
                        ),
                        solution,
                    ]
                ),
            )
            coinsols.append(cs)

            cs_eph = create_spend_for_ephemeral(auditor, auditor, outputamount)
            coinsols.append(cs_eph)

            cs_aud = create_spend_for_auditor(auditor, auditor)
            coinsols.append(cs_aud)

        spend_bundle = SpendBundle(coinsols, aggsig)
        my_tx_records = []

        if zero_spend_list is not None:
            zero_spend_list.append(spend_bundle)
            spend_bundle = SpendBundle.aggregate(zero_spend_list)

        # Add transaction history hor this trade
        now = uint64(int(time.time()))
        if chia_spend_bundle is not None:
            spend_bundle = SpendBundle.aggregate([spend_bundle, chia_spend_bundle])
            if chia_discrepancy < 0:
                tx_record = TransactionRecord(
                    confirmed_at_index=uint32(0),
                    created_at_time=now,
                    to_puzzle_hash=token_bytes(),
                    amount=uint64(abs(chia_discrepancy)),
                    fee_amount=uint64(0),
                    incoming=False,
                    confirmed=False,
                    sent=uint32(10),
                    spend_bundle=chia_spend_bundle,
                    additions=chia_spend_bundle.additions(),
                    removals=chia_spend_bundle.removals(),
                    wallet_id=uint32(1),
                    sent_to=[],
                    trade_id=std_hash(spend_bundle.name() + bytes(now)),
                )
            else:
                tx_record = TransactionRecord(
                    confirmed_at_index=uint32(0),
                    created_at_time=uint64(int(time.time())),
                    to_puzzle_hash=token_bytes(),
                    amount=uint64(abs(chia_discrepancy)),
                    fee_amount=uint64(0),
                    incoming=True,
                    confirmed=False,
                    sent=uint32(10),
                    spend_bundle=chia_spend_bundle,
                    additions=chia_spend_bundle.additions(),
                    removals=chia_spend_bundle.removals(),
                    wallet_id=uint32(1),
                    sent_to=[],
                    trade_id=std_hash(spend_bundle.name() + bytes(now)),
                )
            my_tx_records.append(tx_record)

        for colour, amount in cc_discrepancies.items():
            wallet = wallets[colour]
            if chia_discrepancy > 0:
                tx_record = TransactionRecord(
                    confirmed_at_index=uint32(0),
                    created_at_time=uint64(int(time.time())),
                    to_puzzle_hash=token_bytes(),
                    amount=uint64(abs(amount)),
                    fee_amount=uint64(0),
                    incoming=False,
                    confirmed=False,
                    sent=uint32(10),
                    spend_bundle=spend_bundle,
                    additions=spend_bundle.additions(),
                    removals=spend_bundle.removals(),
                    wallet_id=wallet.wallet_info.id,
                    sent_to=[],
                    trade_id=std_hash(spend_bundle.name() + bytes(now)),
                )
            else:
                tx_record = TransactionRecord(
                    confirmed_at_index=uint32(0),
                    created_at_time=uint64(int(time.time())),
                    to_puzzle_hash=token_bytes(),
                    amount=uint64(abs(amount)),
                    fee_amount=uint64(0),
                    incoming=True,
                    confirmed=False,
                    sent=uint32(10),
                    spend_bundle=spend_bundle,
                    additions=spend_bundle.additions(),
                    removals=spend_bundle.removals(),
                    wallet_id=wallet.wallet_info.id,
                    sent_to=[],
                    trade_id=std_hash(spend_bundle.name() + bytes(now)),
                )
            my_tx_records.append(tx_record)

        tx_record = TransactionRecord(
            confirmed_at_index=uint32(0),
            created_at_time=uint64(int(time.time())),
            to_puzzle_hash=token_bytes(),
            amount=uint64(0),
            fee_amount=uint64(0),
            incoming=False,
            confirmed=False,
            sent=uint32(0),
            spend_bundle=spend_bundle,
            additions=spend_bundle.additions(),
            removals=spend_bundle.removals(),
            wallet_id=uint32(0),
            sent_to=[],
            trade_id=std_hash(spend_bundle.name() + bytes(now)),
        )

        now = uint64(int(time.time()))
        trade_record: TradeRecord = TradeRecord(
            confirmed_at_index=uint32(0),
            accepted_at_time=now,
            created_at_time=now,
            my_offer=False,
            sent=uint32(0),
            spend_bundle=offer_spend_bundle,
            tx_spend_bundle=spend_bundle,
            additions=spend_bundle.additions(),
            removals=spend_bundle.removals(),
            trade_id=std_hash(spend_bundle.name() + bytes(now)),
            status=uint32(TradeStatus.PENDING_CONFIRM.value),
            sent_to=[],
        )

        await self.save_trade(trade_record)

        await self.wallet_state_manager.add_pending_transaction(tx_record)
        for tx in my_tx_records:
            await self.wallet_state_manager.add_transaction(tx)

        return True, trade_record, None
コード例 #8
0
def get_next_difficulty(
    constants: ConsensusConstants,
    sub_blocks: BlockchainInterface,
    prev_header_hash: bytes32,
    height: uint32,
    current_difficulty: uint64,
    deficit: uint8,
    new_slot: bool,
    signage_point_total_iters: uint128,
    skip_epoch_check=False,
) -> uint64:
    """
    Returns the difficulty of the next sub-block that extends onto sub-block.
    Used to calculate the number of iterations. When changing this, also change the implementation
    in wallet_state_manager.py.

    Args:
        constants: consensus constants being used for this chain
        sub_blocks: dictionary from header hash to SBR of all included SBR
        prev_header_hash: header hash of the previous sub-block
        height: the sub-block height of the sub-block to look at
        current_difficulty: difficulty at the infusion point of the sub_block at height
        deficit: deficit of the sub_block at height
        new_slot: whether or not there is a new slot after height
        signage_point_total_iters: signage point iters of the sub_block at height
        skip_epoch_check: don't check correct epoch
    """
    next_height: uint32 = uint32(height + 1)

    if next_height < (constants.EPOCH_SUB_BLOCKS -
                      constants.MAX_SUB_SLOT_SUB_BLOCKS):
        # We are in the first epoch
        return uint64(constants.DIFFICULTY_STARTING)

    if not sub_blocks.contains_sub_block(prev_header_hash):
        raise ValueError(f"Header hash {prev_header_hash} not in sub blocks")

    prev_sb: SubBlockRecord = sub_blocks.sub_block_record(prev_header_hash)

    # If we are in the same slot as previous sub-block, return same difficulty
    if not skip_epoch_check:
        _, can_finish_epoch = can_finish_sub_and_full_epoch(
            constants, height, deficit, sub_blocks, prev_header_hash, False)
        if not new_slot or not can_finish_epoch:
            return current_difficulty

    last_block_prev: SubBlockRecord = _get_last_block_in_previous_epoch(
        constants, sub_blocks, prev_sb)

    # Ensure we get a block for the last block as well, and that it is before the signage point
    last_block_curr = prev_sb
    while last_block_curr.total_iters > signage_point_total_iters or not last_block_curr.is_block:
        last_block_curr = sub_blocks.sub_block_record(
            last_block_curr.prev_hash)

    assert last_block_curr.timestamp is not None
    assert last_block_prev.timestamp is not None
    actual_epoch_time: uint64 = uint64(last_block_curr.timestamp -
                                       last_block_prev.timestamp)

    old_difficulty = uint64(
        prev_sb.weight - sub_blocks.sub_block_record(prev_sb.prev_hash).weight)

    # Terms are rearranged so there is only one division.
    new_difficulty_precise = (
        (last_block_curr.weight - last_block_prev.weight) *
        constants.SUB_SLOT_TIME_TARGET //
        (constants.SLOT_SUB_BLOCKS_TARGET * actual_epoch_time))
    # Take only DIFFICULTY_SIGNIFICANT_BITS significant bits
    new_difficulty = uint64(
        truncate_to_significant_bits(new_difficulty_precise,
                                     constants.SIGNIFICANT_BITS))
    assert count_significant_bits(new_difficulty) <= constants.SIGNIFICANT_BITS

    # Only change by a max factor, to prevent attacks, as in greenpaper, and must be at least 1
    max_diff = uint64(
        truncate_to_significant_bits(
            constants.DIFFICULTY_FACTOR * old_difficulty,
            constants.SIGNIFICANT_BITS,
        ))
    min_diff = uint64(
        truncate_to_significant_bits(
            old_difficulty // constants.DIFFICULTY_FACTOR,
            constants.SIGNIFICANT_BITS,
        ))
    if new_difficulty >= old_difficulty:
        return min(new_difficulty, max_diff)
    else:
        return max([uint64(1), new_difficulty, min_diff])
コード例 #9
0
    async def test_assert_time_exceeds(self, two_nodes):

        num_blocks = 10
        wallet_a = WALLET_A
        coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
        receiver_puzzlehash = BURN_PUZZLE_HASH

        # Farm blocks
        blocks = bt.get_consecutive_blocks(
            num_blocks,
            farmer_reward_puzzle_hash=coinbase_puzzlehash,
            guarantee_transaction_block=True)
        full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
        full_node_1 = full_node_api_1.full_node

        for block in blocks:
            await full_node_api_1.full_node.respond_block(
                full_node_protocol.RespondBlock(block))

        # Coinbase that gets spent
        block1 = blocks[2]
        spend_coin_block_1 = None
        for coin in list(block1.get_included_reward_coins()):
            if coin.puzzle_hash == coinbase_puzzlehash:
                spend_coin_block_1 = coin

        # This condition requires block1 coinbase to be spent after 30 seconds from now
        current_time_plus3 = uint64(
            blocks[-1].foliage_transaction_block.timestamp + 30)
        block1_cvp = ConditionVarPair(
            ConditionOpcode.ASSERT_SECONDS_NOW_EXCEEDS,
            [int_to_bytes(current_time_plus3)])
        block1_dic = {block1_cvp.opcode: [block1_cvp]}
        block1_spend_bundle = wallet_a.generate_signed_transaction(
            1000, receiver_puzzlehash, spend_coin_block_1, block1_dic)

        # program that will be sent to early
        assert block1_spend_bundle is not None
        invalid_new_blocks = bt.get_consecutive_blocks(
            1,
            blocks,
            farmer_reward_puzzle_hash=coinbase_puzzlehash,
            transaction_data=block1_spend_bundle,
            time_per_block=20,
            guarantee_transaction_block=True,
        )

        # Try to validate that block before 3 sec
        res, err, _ = await full_node_1.blockchain.receive_block(
            invalid_new_blocks[-1])
        assert res == ReceiveBlockResult.INVALID_BLOCK
        assert err == Err.ASSERT_SECONDS_NOW_EXCEEDS_FAILED

        valid_new_blocks = bt.get_consecutive_blocks(
            1,
            blocks,
            farmer_reward_puzzle_hash=coinbase_puzzlehash,
            transaction_data=block1_spend_bundle,
            guarantee_transaction_block=True,
            time_per_block=31,
        )
        res, err, _ = await full_node_1.blockchain.receive_block(
            valid_new_blocks[-1])
        assert err is None
        assert res == ReceiveBlockResult.NEW_PEAK
コード例 #10
0
async def validate_unfinished_block_header(
    constants: ConsensusConstants,
    headers: Dict[bytes32, Header],
    height_to_hash: Dict[uint32, bytes32],
    block_header: Header,
    proof_of_space: ProofOfSpace,
    prev_header_block: Optional[HeaderBlock],
    pre_validated: bool = False,
    pos_quality_string: bytes32 = None,
) -> Tuple[Optional[Err], Optional[uint64]]:
    """
    Block validation algorithm. Returns the number of VDF iterations that this block's
    proof of time must have, if the candidate block is fully valid (except for proof of
    time). The same as validate_block, but without proof of time and challenge validation.
    If the block is invalid, an error code is returned.

    Does NOT validate transactions and fees.
    """
    if not pre_validated:
        # 1. The hash of the proof of space must match header_data.proof_of_space_hash
        if proof_of_space.get_hash() != block_header.data.proof_of_space_hash:
            return (Err.INVALID_POSPACE_HASH, None)

        # 2. The coinbase signature must be valid, according the the pool public key
        # TODO: change numbers

        # 3. Check harvester signature of header data is valid based on harvester key
        validates = blspy.AugSchemeMPL.verify(
            proof_of_space.plot_public_key,
            block_header.data.get_hash(),
            block_header.plot_signature,
        )
        if not validates:
            return (Err.INVALID_PLOT_SIGNATURE, None)

    # 4. If not genesis, the previous block must exist
    if prev_header_block is not None and block_header.prev_header_hash not in headers:
        return (Err.DOES_NOT_EXTEND, None)

    # 5. If not genesis, the timestamp must be >= the average timestamp of last 11 blocks
    # and less than 2 hours in the future (if block height < 11, average all previous blocks).
    # Average is the sum, int diveded by the number of timestamps
    if prev_header_block is not None:
        last_timestamps: List[uint64] = []
        curr = prev_header_block.header
        while len(last_timestamps) < constants["NUMBER_OF_TIMESTAMPS"]:
            last_timestamps.append(curr.data.timestamp)
            fetched = headers.get(curr.prev_header_hash, None)
            if not fetched:
                break
            curr = fetched
        if len(last_timestamps) != constants["NUMBER_OF_TIMESTAMPS"]:
            # For blocks 1 to 10, average timestamps of all previous blocks
            assert curr.height == 0
        prev_time: uint64 = uint64(
            int(sum(last_timestamps) // len(last_timestamps)))
        if block_header.data.timestamp < prev_time:
            return (Err.TIMESTAMP_TOO_FAR_IN_PAST, None)
        if block_header.data.timestamp > time.time(
        ) + constants["MAX_FUTURE_TIME"]:
            return (Err.TIMESTAMP_TOO_FAR_IN_FUTURE, None)

    # 7. Extension data must be valid, if any is present

    # Compute challenge of parent
    challenge_hash: bytes32
    if prev_header_block is not None:
        challenge: Challenge = prev_header_block.challenge
        challenge_hash = challenge.get_hash()
        # 8. Check challenge hash of prev is the same as in pos
        if challenge_hash != proof_of_space.challenge_hash:
            return (Err.INVALID_POSPACE_CHALLENGE, None)

    # 10. The proof of space must be valid on the challenge
    if pos_quality_string is None:
        pos_quality_string = proof_of_space.verify_and_get_quality_string(
            constants["NUMBER_ZERO_BITS_CHALLENGE_SIG"])
        if not pos_quality_string:
            return (Err.INVALID_POSPACE, None)

    if prev_header_block is not None:
        # 11. If not genesis, the height on the previous block must be one less than on this block
        if block_header.height != prev_header_block.height + 1:
            return (Err.INVALID_HEIGHT, None)
    else:
        # 12. If genesis, the height must be 0
        if block_header.height != 0:
            return (Err.INVALID_HEIGHT, None)

    # 13. The pool max height must be valid
    if (block_header.data.pool_target.max_height != 0 and
            block_header.data.pool_target.max_height < block_header.height):
        return (Err.INVALID_POOL_TARGET, None)

    difficulty: uint64
    if prev_header_block is not None:
        difficulty = get_next_difficulty(constants, headers, height_to_hash,
                                         prev_header_block.header)
        min_iters = get_next_min_iters(constants, headers, height_to_hash,
                                       prev_header_block)
    else:
        difficulty = uint64(constants["DIFFICULTY_STARTING"])
        min_iters = uint64(constants["MIN_ITERS_STARTING"])

    number_of_iters: uint64 = calculate_iterations_quality(
        pos_quality_string,
        proof_of_space.size,
        difficulty,
        min_iters,
    )

    assert count_significant_bits(difficulty) <= constants["SIGNIFICANT_BITS"]
    assert count_significant_bits(min_iters) <= constants["SIGNIFICANT_BITS"]

    if prev_header_block is not None:
        # 17. If not genesis, the total weight must be the parent weight + difficulty
        if block_header.weight != prev_header_block.weight + difficulty:
            return (Err.INVALID_WEIGHT, None)

        # 18. If not genesis, the total iters must be parent iters + number_iters
        if (block_header.data.total_iters !=
                prev_header_block.header.data.total_iters + number_of_iters):
            return (Err.INVALID_TOTAL_ITERS, None)
    else:
        # 19. If genesis, the total weight must be starting difficulty
        if block_header.weight != difficulty:
            return (Err.INVALID_WEIGHT, None)

        # 20. If genesis, the total iters must be number iters
        if block_header.data.total_iters != number_of_iters:
            return (Err.INVALID_TOTAL_ITERS, None)

    return (None, number_of_iters)
コード例 #11
0
def get_next_sub_slot_iters(
    constants: ConsensusConstants,
    sub_blocks: BlockchainInterface,
    prev_header_hash: bytes32,
    height: uint32,
    curr_sub_slot_iters: uint64,
    deficit: uint8,
    new_slot: bool,
    signage_point_total_iters: uint128,
    skip_epoch_check=False,
) -> uint64:
    """
    Returns the slot iterations required for the next block after the one at height, where new_slot is true
    iff the next block will be in the next slot.

    Args:
        constants: consensus constants being used for this chain
        sub_blocks: dictionary from header hash to SBR of all included SBR
        prev_header_hash: header hash of the previous sub-block
        height: the sub-block height of the sub-block to look at
        curr_sub_slot_iters: sub-slot iters at the infusion point of the sub_block at height
        deficit: deficit of the sub_block at height
        new_slot: whether or not there is a new slot after height
        signage_point_total_iters: signage point iters of the sub_block at height
        skip_epoch_check: don't check correct epoch
    """
    next_height: uint32 = uint32(height + 1)

    if next_height < (constants.EPOCH_SUB_BLOCKS -
                      constants.MAX_SUB_SLOT_SUB_BLOCKS):
        return uint64(constants.SUB_SLOT_ITERS_STARTING)

    if not sub_blocks.contains_sub_block(prev_header_hash):
        raise ValueError(f"Header hash {prev_header_hash} not in sub blocks")

    prev_sb: SubBlockRecord = sub_blocks.sub_block_record(prev_header_hash)

    # If we are in the same epoch, return same ssi
    if not skip_epoch_check:
        _, can_finish_epoch = can_finish_sub_and_full_epoch(
            constants, height, deficit, sub_blocks, prev_header_hash, False)
        if not new_slot or not can_finish_epoch:
            return curr_sub_slot_iters

    last_block_prev: SubBlockRecord = _get_last_block_in_previous_epoch(
        constants, sub_blocks, prev_sb)

    # Ensure we get a block for the last block as well, and that it is before the signage point
    last_block_curr = prev_sb
    while last_block_curr.total_iters > signage_point_total_iters or not last_block_curr.is_block:
        last_block_curr = sub_blocks.sub_block_record(
            last_block_curr.prev_hash)
    assert last_block_curr.timestamp is not None and last_block_prev.timestamp is not None

    # This is computed as the iterations per second in last epoch, times the target number of seconds per slot
    new_ssi_precise: uint64 = uint64(
        constants.SUB_SLOT_TIME_TARGET *
        (last_block_curr.total_iters - last_block_prev.total_iters) //
        (last_block_curr.timestamp - last_block_prev.timestamp))
    new_ssi = uint64(
        truncate_to_significant_bits(new_ssi_precise,
                                     constants.SIGNIFICANT_BITS))

    # Only change by a max factor as a sanity check
    max_ssi = uint64(
        truncate_to_significant_bits(
            constants.DIFFICULTY_FACTOR * last_block_curr.sub_slot_iters,
            constants.SIGNIFICANT_BITS,
        ))
    min_ssi = uint64(
        truncate_to_significant_bits(
            last_block_curr.sub_slot_iters // constants.DIFFICULTY_FACTOR,
            constants.SIGNIFICANT_BITS,
        ))
    if new_ssi >= last_block_curr.sub_slot_iters:
        new_ssi = min(new_ssi, max_ssi)
    else:
        new_ssi = uint64(max([constants.NUM_SPS_SUB_SLOT, new_ssi, min_ssi]))

    new_ssi = uint64(
        new_ssi -
        new_ssi % constants.NUM_SPS_SUB_SLOT)  # Must divide the sub slot
    assert count_significant_bits(new_ssi) <= constants.SIGNIFICANT_BITS
    return new_ssi
コード例 #12
0
 # We override this value based on the chain being run (testnet0, testnet1, mainnet, etc)
 # Default used for tests is std_hash(b'')
 "GENESIS_CHALLENGE": bytes.fromhex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"),
 "GENESIS_PRE_FARM_POOL_PUZZLE_HASH": bytes.fromhex(
     "d23da14695a188ae5708dd152263c4db883eb27edeb936178d4d988b8f3ce5fc"
 ),
 "GENESIS_PRE_FARM_FARMER_PUZZLE_HASH": bytes.fromhex(
     "3d8765d3a597ec1d99663f6c9816d915b9f68613ac94009884c4addaefcce6af"
 ),
 "MAX_VDF_WITNESS_SIZE": 64,
 # Target tx count per sec
 "TX_PER_SEC": 20,
 # Size of mempool = 10x the size of block
 "MEMPOOL_BLOCK_BUFFER": 10,
 # Max coin amount, fits into 64 bits
 "MAX_COIN_AMOUNT": uint64((1 << 64) - 1),
 # Targeting twice bitcoin's block size of 1.3MB per block
 # Raw size per block target = 1,300,000 * 600 / 47 = approx 100 KB
 # Rax TX (single in, single out) = 219 bytes (not compressed)
 # TX = 457 vBytes
 # floor(100 * 1024 / 219) * 457 = 213684 (size in vBytes)
 # Max block cost in virtual bytes
 "MAX_BLOCK_COST": 213684,
 # MAX block cost in clvm cost units = MAX_BLOCK_COST * CLVM_COST_RATIO_CONSTANT
 # 1 vByte = 108 clvm cost units
 "CLVM_COST_RATIO_CONSTANT": 108,
 # Max block cost in clvm cost units (MAX_BLOCK_COST * CLVM_COST_RATIO_CONSTANT)
 # "MAX_BLOCK_COST_CLVM": 23077872,
 "MAX_BLOCK_COST_CLVM": 40000000,  # Based on arvid analysis
 "WEIGHT_PROOF_THRESHOLD": 2,
 "BLOCKS_CACHE_SIZE": 4608 + (128 * 4),
コード例 #13
0
    async def test1(self, simulation):
        test_rpc_port = uint16(21522)
        test_rpc_port_2 = uint16(21523)
        harvester, farmer_api = simulation

        def stop_node_cb():
            pass

        def stop_node_cb_2():
            pass

        config = bt.config
        hostname = config["self_hostname"]
        daemon_port = config["daemon_port"]

        farmer_rpc_api = FarmerRpcApi(farmer_api.farmer)
        harvester_rpc_api = HarvesterRpcApi(harvester)

        rpc_cleanup = await start_rpc_server(
            farmer_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port,
            stop_node_cb,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )
        rpc_cleanup_2 = await start_rpc_server(
            harvester_rpc_api,
            hostname,
            daemon_port,
            test_rpc_port_2,
            stop_node_cb_2,
            bt.root_path,
            config,
            connect_to_daemon=False,
        )

        try:
            client = await FarmerRpcClient.create(self_hostname, test_rpc_port, bt.root_path, config)
            client_2 = await HarvesterRpcClient.create(self_hostname, test_rpc_port_2, bt.root_path, config)

            async def have_connections():
                return len(await client.get_connections()) > 0

            await time_out_assert(15, have_connections, True)

            assert (await client.get_signage_point(std_hash(b"2"))) is None
            assert len(await client.get_signage_points()) == 0

            async def have_signage_points():
                return len(await client.get_signage_points()) > 0

            sp = farmer_protocol.NewSignagePoint(
                std_hash(b"1"), std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(2)
            )
            await farmer_api.new_signage_point(sp)

            await time_out_assert(5, have_signage_points, True)
            assert (await client.get_signage_point(std_hash(b"2"))) is not None

            async def have_plots():
                return len((await client_2.get_plots())["plots"]) > 0

            await time_out_assert(5, have_plots, True)

            res = await client_2.get_plots()
            num_plots = len(res["plots"])
            assert num_plots > 0
            plot_dir = get_plot_dir() / "subdir"
            plot_dir.mkdir(parents=True, exist_ok=True)

            plot_dir_sub = get_plot_dir() / "subdir" / "subsubdir"
            plot_dir_sub.mkdir(parents=True, exist_ok=True)

            plotter = DiskPlotter()
            filename = "test_farmer_harvester_rpc_plot.plot"
            filename_2 = "test_farmer_harvester_rpc_plot2.plot"
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename,
                18,
                stream_plot_info_pk(bt.pool_pk, bt.farmer_pk, AugSchemeMPL.key_gen(bytes([4] * 32))),
                token_bytes(32),
                128,
                0,
                2000,
                0,
                False,
            )

            # Making a plot with a puzzle hash encoded into it instead of pk
            plot_id_2 = token_bytes(32)
            plotter.create_plot_disk(
                str(plot_dir),
                str(plot_dir),
                str(plot_dir),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            # Making the same plot, in a different dir. This should not be farmed
            plotter.create_plot_disk(
                str(plot_dir_sub),
                str(plot_dir_sub),
                str(plot_dir_sub),
                filename_2,
                18,
                stream_plot_info_ph(std_hash(b"random ph"), bt.farmer_pk, AugSchemeMPL.key_gen(bytes([5] * 32))),
                plot_id_2,
                128,
                0,
                2000,
                0,
                False,
            )

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots

            assert len(await client_2.get_plot_directories()) == 1

            await client_2.add_plot_directory(str(plot_dir))
            await client_2.add_plot_directory(str(plot_dir_sub))

            assert len(await client_2.get_plot_directories()) == 3

            res_2 = await client_2.get_plots()
            assert len(res_2["plots"]) == num_plots + 2

            await client_2.delete_plot(str(plot_dir / filename))
            await client_2.delete_plot(str(plot_dir / filename_2))
            res_3 = await client_2.get_plots()
            assert len(res_3["plots"]) == num_plots

            await client_2.remove_plot_directory(str(plot_dir))
            assert len(await client_2.get_plot_directories()) == 2

        finally:
            # Checks that the RPC manages to stop the node
            client.close()
            client_2.close()
            await client.await_closed()
            await client_2.await_closed()
            await rpc_cleanup()
            await rpc_cleanup_2()
コード例 #14
0
    async def test_short_sync_with_transactions_wallet(self, wallet_node):
        full_node_1, wallet_node, server_1, server_2 = wallet_node
        wallet_a = wallet_node.wallet_state_manager.main_wallet
        wallet_a_dummy = WalletTool()
        wallet_b = WalletTool()
        coinbase_puzzlehash = await wallet_a.get_new_puzzlehash()
        coinbase_puzzlehash_rest = wallet_b.get_new_puzzlehash()
        puzzle_hashes = [
            await wallet_a.get_new_puzzlehash() for _ in range(10)
        ]
        puzzle_hashes.append(wallet_b.get_new_puzzlehash())

        blocks = bt.get_consecutive_blocks(test_constants, 3, [], 10, b"",
                                           coinbase_puzzlehash)
        for block in blocks:
            [
                _ async for _ in full_node_1.respond_block(
                    full_node_protocol.RespondBlock(block))
            ]
        await server_2.start_client(
            PeerInfo("localhost", uint16(server_1._port)), None)
        await time_out_assert(60, wallet_height_at_least, True, wallet_node, 1)

        server_2.global_connections.close_all_connections()

        dic_h = {}
        prev_coin = blocks[1].get_coinbase()
        for i in range(11):
            pk, sk = await wallet_a.wallet_state_manager.get_keys(
                prev_coin.puzzle_hash)
            transaction_unsigned = wallet_a_dummy.generate_unsigned_transaction(
                1000, puzzle_hashes[i], prev_coin, {}, 0, secretkey=sk)
            spend_bundle = await wallet_a.sign_transaction(transaction_unsigned
                                                           )
            block_spendbundle = SpendBundle.aggregate([spend_bundle])
            program = best_solution_program(block_spendbundle)
            aggsig = block_spendbundle.aggregated_signature
            prev_coin = Coin(prev_coin.name(), puzzle_hashes[i], uint64(1000))
            dic_h[i + 4] = (program, aggsig)

        blocks = bt.get_consecutive_blocks(test_constants, 13, blocks, 10, b"",
                                           coinbase_puzzlehash_rest, dic_h)
        # Move chain to height 16, with consecutive transactions in blocks 4 to 14
        for block in blocks:
            async for _ in full_node_1.respond_block(
                    full_node_protocol.RespondBlock(block)):
                pass

        # Do a short sync from 0 to 14
        await server_2.start_client(
            PeerInfo("localhost", uint16(server_1._port)), None)
        await time_out_assert(60, wallet_height_at_least, True, wallet_node,
                              14)

        server_2.global_connections.close_all_connections()

        # 2 block rewards and 3 fees
        assert await wallet_a.get_confirmed_balance() == (
            blocks[1].get_coinbase().amount *
            2) + (blocks[1].get_fees_coin().amount * 3)
        # All of our coins are spent and puzzle hashes present
        for puzzle_hash in puzzle_hashes[:-1]:
            records = await wallet_node.wallet_state_manager.wallet_store.get_coin_records_by_puzzle_hash(
                puzzle_hash)
            assert len(records) == 1
            assert records[0].spent and not records[0].coinbase

        # Then do the same but in a reorg chain
        dic_h = {}
        prev_coin = blocks[1].get_coinbase()
        for i in range(11):
            pk, sk = await wallet_a.wallet_state_manager.get_keys(
                prev_coin.puzzle_hash)
            transaction_unsigned = wallet_a_dummy.generate_unsigned_transaction(
                1000, puzzle_hashes[i], prev_coin, {}, 0, secretkey=sk)
            spend_bundle = await wallet_a.sign_transaction(transaction_unsigned
                                                           )
            block_spendbundle = SpendBundle.aggregate([spend_bundle])
            program = best_solution_program(block_spendbundle)
            aggsig = block_spendbundle.aggregated_signature
            prev_coin = Coin(prev_coin.name(), puzzle_hashes[i], uint64(1000))
            dic_h[i + 4] = (program, aggsig)

        blocks = bt.get_consecutive_blocks(
            test_constants,
            31,
            blocks[:4],
            10,
            b"this is a reorg",
            coinbase_puzzlehash_rest,
            dic_h,
        )

        # Move chain to height 34, with consecutive transactions in blocks 4 to 14
        for block in blocks:
            async for _ in full_node_1.respond_block(
                    full_node_protocol.RespondBlock(block)):
                pass

        # Do a sync from 0 to 22
        await server_2.start_client(
            PeerInfo("localhost", uint16(server_1._port)), None)

        await time_out_assert(60, wallet_height_at_least, True, wallet_node,
                              28)
        server_2.global_connections.close_all_connections()

        # 2 block rewards and 3 fees
        assert await wallet_a.get_confirmed_balance() == (
            blocks[1].get_coinbase().amount *
            2) + (blocks[1].get_fees_coin().amount * 3)
        # All of our coins are spent and puzzle hashes present
        for puzzle_hash in puzzle_hashes[:-1]:
            records = await wallet_node.wallet_state_manager.wallet_store.get_coin_records_by_puzzle_hash(
                puzzle_hash)
            assert len(records) == 1
            assert records[0].spent and not records[0].coinbase

        # Test spending the rewards earned in reorg
        new_coinbase_puzzlehash = await wallet_a.get_new_puzzlehash()
        another_puzzlehash = await wallet_a.get_new_puzzlehash()

        dic_h = {}
        pk, sk = await wallet_a.wallet_state_manager.get_keys(
            new_coinbase_puzzlehash)
        coinbase_coin = create_coinbase_coin(uint32(25),
                                             new_coinbase_puzzlehash,
                                             uint64(14000000000000))
        transaction_unsigned = wallet_a_dummy.generate_unsigned_transaction(
            7000000000000,
            another_puzzlehash,
            coinbase_coin, {},
            0,
            secretkey=sk)
        spend_bundle = await wallet_a.sign_transaction(transaction_unsigned)
        block_spendbundle = SpendBundle.aggregate([spend_bundle])
        program = best_solution_program(block_spendbundle)
        aggsig = block_spendbundle.aggregated_signature
        dic_h[26] = (program, aggsig)

        # Farm a block (25) to ourselves
        blocks = bt.get_consecutive_blocks(
            test_constants,
            1,
            blocks[:25],
            10,
            b"this is yet another reorg",
            new_coinbase_puzzlehash,
        )

        # Brings height up to 40, with block 31 having half our reward spent to us
        blocks = bt.get_consecutive_blocks(
            test_constants,
            15,
            blocks,
            10,
            b"this is yet another reorg more blocks",
            coinbase_puzzlehash_rest,
            dic_h,
        )
        for block in blocks:
            async for _ in full_node_1.respond_block(
                    full_node_protocol.RespondBlock(block)):
                pass

        await server_2.start_client(
            PeerInfo("localhost", uint16(server_1._port)), None)
        await time_out_assert(60, wallet_height_at_least, True, wallet_node,
                              38)

        # 2 block rewards and 4 fees, plus 7000000000000 coins
        assert (await wallet_a.get_confirmed_balance() ==
                (blocks[1].get_coinbase().amount * 2) +
                (blocks[1].get_fees_coin().amount * 4) + 7000000000000)
        records = await wallet_node.wallet_state_manager.wallet_store.get_coin_records_by_puzzle_hash(
            new_coinbase_puzzlehash)
        # Fee and coinbase
        assert len(records) == 2
        print(records)
        assert records[0].spent != records[1].spent
        assert records[0].coinbase == records[1].coinbase
        records = await wallet_node.wallet_state_manager.wallet_store.get_coin_records_by_puzzle_hash(
            another_puzzlehash)
        assert len(records) == 1
        assert not records[0].spent
        assert not records[0].coinbase
コード例 #15
0
    def get_next_ips(self, header_block: HeaderBlock) -> uint64:
        """
        Returns the VDF speed in iterations per seconds, to be used for the next block. This depends on
        the number of iterations of the last epoch, and changes at the same block as the difficulty.
        """
        block: SmallHeaderBlock = self.headers[header_block.header_hash]
        assert block.challenge is not None

        next_height: uint32 = uint32(block.height + 1)
        if next_height < self.constants["DIFFICULTY_EPOCH"]:
            # First epoch has a hardcoded vdf speed
            return self.constants["VDF_IPS_STARTING"]

        prev_block: SmallHeaderBlock = self.headers[block.prev_header_hash]
        assert prev_block.challenge is not None

        proof_of_space = header_block.proof_of_space
        difficulty = self.get_next_difficulty(prev_block.header_hash)
        iterations = uint64(
            block.challenge.total_iters - prev_block.challenge.total_iters
        )
        prev_ips = calculate_ips_from_iterations(
            proof_of_space, difficulty, iterations, self.constants["MIN_BLOCK_TIME"]
        )

        if (
            next_height % self.constants["DIFFICULTY_EPOCH"]
            != self.constants["DIFFICULTY_DELAY"]
        ):
            # Not at a point where ips would change, so return the previous ips
            # TODO: cache this for efficiency
            return prev_ips

        # ips (along with difficulty) will change in this block, so we need to calculate the new one.
        # The calculation is (iters_2 - iters_1) // (timestamp_2 - timestamp_1).
        # 1 and 2 correspond to height_1 and height_2, being the last block of the second to last, and last
        # block of the last epochs. Basically, it's total iterations over time, of previous epoch.

        # Height1 is the last block 2 epochs ago, so we can include the iterations taken for mining first block in epoch
        height1 = uint32(
            next_height
            - self.constants["DIFFICULTY_EPOCH"]
            - self.constants["DIFFICULTY_DELAY"]
            - 1
        )
        # Height2 is the last block in the previous epoch
        height2 = uint32(next_height - self.constants["DIFFICULTY_DELAY"] - 1)

        block1: Optional[SmallHeaderBlock] = None
        block2: Optional[SmallHeaderBlock] = None
        if block not in self.get_current_tips() or height2 not in self.height_to_hash:
            # This means we are either on a fork, or on one of the chains, but after the LCA,
            # so we manually backtrack.
            curr: Optional[SmallHeaderBlock] = block
            assert curr is not None
            while (
                curr.height not in self.height_to_hash
                or self.height_to_hash[curr.height] != curr.header_hash
            ):
                if curr.height == height1:
                    block1 = curr
                elif curr.height == height2:
                    block2 = curr
                curr = self.headers.get(curr.prev_header_hash, None)
                assert curr is not None
        # Once we are before the fork point (and before the LCA), we can use the height_to_hash map
        if block1 is None and height1 >= 0:
            # height1 could be -1, for the first difficulty calculation
            block1 = self.headers.get(self.height_to_hash[height1], None)
        if block2 is None:
            block2 = self.headers.get(self.height_to_hash[height2], None)
        assert block2 is not None
        assert block2.challenge is not None

        if block1 is not None:
            assert block1.challenge is not None
            timestamp1 = block1.header.data.timestamp
            iters1 = block1.challenge.total_iters
        else:
            # In the case of height == -1, there is no timestamp here, so assume the genesis block
            # took constants["BLOCK_TIME_TARGET"] seconds to mine.
            genesis: SmallHeaderBlock = self.headers[self.height_to_hash[uint32(0)]]
            timestamp1 = (
                genesis.header.data.timestamp - self.constants["BLOCK_TIME_TARGET"]
            )
            assert genesis.challenge is not None
            iters1 = genesis.challenge.total_iters

        timestamp2 = block2.header.data.timestamp
        iters2 = block2.challenge.total_iters

        new_ips = uint64((iters2 - iters1) // (timestamp2 - timestamp1))

        # Only change by a max factor, and must be at least 1
        if new_ips >= prev_ips:
            return min(new_ips, uint64(self.constants["IPS_FACTOR"] * new_ips))
        else:
            return max(
                [uint64(1), new_ips, uint64(prev_ips // self.constants["IPS_FACTOR"])]
            )
コード例 #16
0
    async def test_basic_store(self):
        blocks = bt.get_consecutive_blocks(test_constants, 9, [], 9, b"0")

        db = await FullNodeStore.create("blockchain_test")
        db_2 = await FullNodeStore.create("blockchain_test_2")
        try:
            await db._clear_database()

            genesis = FullBlock.from_bytes(constants["GENESIS_BLOCK"])

            # Save/get block
            for block in blocks:
                await db.add_block(block)
                assert block == await db.get_block(block.header_hash)

            # Save/get sync
            for sync_mode in (False, True):
                await db.set_sync_mode(sync_mode)
                assert sync_mode == await db.get_sync_mode()

            # clear sync info
            await db.clear_sync_info()

            # add/get potential tip, get potential tips num
            await db.add_potential_tip(blocks[6])
            assert blocks[6] == await db.get_potential_tip(
                blocks[6].header_hash)

            # add/get potential trunk
            header = genesis.header_block
            db.add_potential_header(header)
            assert db.get_potential_header(genesis.height) == header

            # Add potential block
            await db.add_potential_block(genesis)
            assert genesis == await db.get_potential_block(uint32(0))

            # Add/get candidate block
            assert await db.get_candidate_block(0) is None
            partial = (
                blocks[5].body,
                blocks[5].header_block.header.data,
                blocks[5].header_block.proof_of_space,
            )
            await db.add_candidate_block(blocks[5].header_hash, *partial)
            assert await db.get_candidate_block(blocks[5].header_hash
                                                ) == partial
            await db.clear_candidate_blocks_below(uint32(8))
            assert await db.get_candidate_block(blocks[5].header_hash) is None

            # Add/get unfinished block
            i = 1
            for block in blocks:
                key = (block.header_hash, uint64(1000))

                # Different database should have different data
                await db_2.add_unfinished_block(key, block)

                assert await db.get_unfinished_block(key) is None
                await db.add_unfinished_block(key, block)
                assert await db.get_unfinished_block(key) == block
                assert len(await db.get_unfinished_blocks()) == i
                i += 1
            await db.clear_unfinished_blocks_below(uint32(5))
            assert len(await db.get_unfinished_blocks()) == 5

            # Set/get unf block leader
            assert db.get_unfinished_block_leader() == (0, (1 << 64) - 1)
            db.set_unfinished_block_leader(key)
            assert db.get_unfinished_block_leader() == key

            assert await db.get_disconnected_block(blocks[0].prev_header_hash
                                                   ) is None
            # Disconnected blocks
            for block in blocks:
                await db.add_disconnected_block(block)
                await db.get_disconnected_block(block.prev_header_hash
                                                ) == block

            await db.clear_disconnected_blocks_below(uint32(5))
            assert await db.get_disconnected_block(blocks[4].prev_header_hash
                                                   ) is None

            h_hash_1 = bytes32(token_bytes(32))
            assert not db.seen_unfinished_block(h_hash_1)
            assert db.seen_unfinished_block(h_hash_1)
            db.clear_seen_unfinished_blocks()
            assert not db.seen_unfinished_block(h_hash_1)

        except Exception:
            await db.close()
            await db_2.close()
            raise

        # Different database should have different data
        db_3 = await FullNodeStore.create("blockchain_test_3")
        assert db_3.get_unfinished_block_leader() == (0, (1 << 64) - 1)

        await db.close()
        await db_2.close()
        await db_3.close()
コード例 #17
0
    async def validate_unfinished_block(
        self,
        block: FullBlock,
        genesis: bool = False,
        pre_validated: bool = True,
        pos_quality: bytes32 = None,
    ) -> bool:
        """
        Block validation algorithm. Returns true if the candidate block is fully valid
        (except for proof of time). The same as validate_block, but without proof of time
        and challenge validation.
        """
        if not pre_validated:
            # 1. Check the proof of space hash is valid
            if (
                block.header_block.proof_of_space.get_hash()
                != block.header_block.header.data.proof_of_space_hash
            ):
                return False

            # 2. Check body hash
            if block.body.get_hash() != block.header_block.header.data.body_hash:
                return False

            # 3. Check coinbase amount
            if (
                calculate_block_reward(block.body.coinbase.height)
                != block.body.coinbase.amount
            ):
                return False

            # 4. Check coinbase signature with pool pk
            if not block.body.coinbase_signature.verify(
                [blspy.Util.hash256(bytes(block.body.coinbase))],
                [block.header_block.proof_of_space.pool_pubkey],
            ):
                return False

            # 5. Check harvester signature of header data is valid based on harvester key
            if not block.header_block.header.harvester_signature.verify(
                [blspy.Util.hash256(block.header_block.header.data.get_hash())],
                [block.header_block.proof_of_space.plot_pubkey],
            ):
                return False

        # 6. Check previous pointer(s) / flyclient
        if not genesis and block.prev_header_hash not in self.headers:
            return False

        # 7. Check Now+2hrs > timestamp > avg timestamp of last 11 blocks
        prev_block: Optional[SmallHeaderBlock] = None
        if not genesis:
            # TODO: do something about first 11 blocks
            last_timestamps: List[uint64] = []
            prev_block = self.headers.get(block.prev_header_hash, None)
            if not prev_block:
                return False
            curr = prev_block
            while len(last_timestamps) < self.constants["NUMBER_OF_TIMESTAMPS"]:
                last_timestamps.append(curr.header.data.timestamp)
                fetched = self.headers.get(curr.prev_header_hash, None)
                if not fetched:
                    break
                curr = fetched
            if (
                len(last_timestamps) != self.constants["NUMBER_OF_TIMESTAMPS"]
                and curr.height != 0
            ):
                return False
            prev_time: uint64 = uint64(int(sum(last_timestamps) / len(last_timestamps)))
            if block.header_block.header.data.timestamp < prev_time:
                return False
            if (
                block.header_block.header.data.timestamp
                > time.time() + self.constants["MAX_FUTURE_TIME"]
            ):
                return False

        # 8. Check filter hash is correct TODO

        # 9. Check extension data, if any is added

        # 10. Compute challenge of parent
        challenge_hash: bytes32
        if not genesis:
            assert prev_block
            assert prev_block.challenge
            challenge_hash = prev_block.challenge.get_hash()

            # 8. Check challenge hash of prev is the same as in pos
            if challenge_hash != block.header_block.proof_of_space.challenge_hash:
                return False
        else:
            assert block.header_block.proof_of_time
            challenge_hash = block.header_block.proof_of_time.challenge_hash

            if challenge_hash != block.header_block.proof_of_space.challenge_hash:
                return False

        # 11. Check proof of space based on challenge
        if pos_quality is None:
            pos_quality = block.header_block.proof_of_space.verify_and_get_quality()
            if not pos_quality:
                return False

        # 12. Check coinbase height = prev height + 1
        if not genesis:
            assert prev_block
            if block.body.coinbase.height != prev_block.height + 1:
                return False
        else:
            if block.body.coinbase.height != 0:
                return False

        # TODO: 14a. check transactions
        # TODO: 14b. Aggregate transaction results into signature
        if block.body.aggregated_signature:
            # TODO: 15. check that aggregate signature is valid, based on pubkeys, and messages
            pass
        # TODO: 16. check fees
        # TODO: 17. check cost
        return True
コード例 #18
0
def get_signage_point_vdf_info(
    constants: ConsensusConstants,
    finished_sub_slots: List[EndOfSubSlotBundle],
    overflow: bool,
    prev_b: Optional[BlockRecord],
    blocks: BlockchainInterface,
    sp_total_iters: uint128,
    sp_iters: uint64,
):
    """
    Returns the following information, for the VDF of the signage point at sp_total_iters.
    cc and rc challenge hash
    cc and rc input
    cc and rc iterations
    """

    new_sub_slot: bool = len(finished_sub_slots) > 0
    genesis_block: bool = prev_b is None

    if new_sub_slot and not overflow:
        # Case 1: start from start of this slot. Case of no overflow slots. Also includes genesis block after empty
        # slot(s), but not overflowing
        rc_vdf_challenge: bytes32 = finished_sub_slots[
            -1].reward_chain.get_hash()
        cc_vdf_challenge = finished_sub_slots[-1].challenge_chain.get_hash()
        sp_vdf_iters = sp_iters
        cc_vdf_input = ClassgroupElement.get_default_element()
    elif new_sub_slot and overflow and len(finished_sub_slots) > 1:
        # Case 2: start from start of prev slot. This is a rare case of empty prev slot. Includes genesis block after
        # 2 empty slots
        rc_vdf_challenge = finished_sub_slots[-2].reward_chain.get_hash()
        cc_vdf_challenge = finished_sub_slots[-2].challenge_chain.get_hash()
        sp_vdf_iters = sp_iters
        cc_vdf_input = ClassgroupElement.get_default_element()
    elif genesis_block:
        # Case 3: Genesis block case, first challenge
        rc_vdf_challenge = constants.GENESIS_CHALLENGE
        cc_vdf_challenge = constants.GENESIS_CHALLENGE
        sp_vdf_iters = sp_iters
        cc_vdf_input = ClassgroupElement.get_default_element()
    elif new_sub_slot and overflow and len(finished_sub_slots) == 1:
        # Case 4: Starting at prev will put us in the previous, sub-slot, since case 2 handled more empty slots
        assert prev_b is not None
        curr: BlockRecord = prev_b
        while not curr.first_in_sub_slot and curr.total_iters > sp_total_iters:
            curr = blocks.block_record(curr.prev_hash)
        if curr.total_iters < sp_total_iters:
            sp_vdf_iters = uint64(sp_total_iters - curr.total_iters)
            cc_vdf_input = curr.challenge_vdf_output
            rc_vdf_challenge = curr.reward_infusion_new_challenge
        else:
            assert curr.finished_reward_slot_hashes is not None
            sp_vdf_iters = sp_iters
            cc_vdf_input = ClassgroupElement.get_default_element()
            rc_vdf_challenge = curr.finished_reward_slot_hashes[-1]

        while not curr.first_in_sub_slot:
            curr = blocks.block_record(curr.prev_hash)
        assert curr.finished_challenge_slot_hashes is not None
        cc_vdf_challenge = curr.finished_challenge_slot_hashes[-1]
    elif not new_sub_slot and overflow:
        # Case 5: prev is in the same sub slot and also overflow. Starting at prev does not skip any sub slots
        assert prev_b is not None
        curr = prev_b

        # Collects the last two finished slots
        if curr.first_in_sub_slot:
            assert curr.finished_challenge_slot_hashes is not None
            assert curr.finished_reward_slot_hashes is not None
            found_sub_slots = list(
                reversed(
                    list(
                        zip(
                            curr.finished_challenge_slot_hashes,
                            curr.finished_reward_slot_hashes,
                        ))))
        else:
            found_sub_slots = []
        sp_pre_sb: Optional[BlockRecord] = None
        while len(found_sub_slots) < 2 and curr.height > 0:
            if sp_pre_sb is None and curr.total_iters < sp_total_iters:
                sp_pre_sb = curr
            curr = blocks.block_record(curr.prev_hash)
            if curr.first_in_sub_slot:
                assert curr.finished_challenge_slot_hashes is not None
                assert curr.finished_reward_slot_hashes is not None
                found_sub_slots += list(
                    reversed(
                        list(
                            zip(
                                curr.finished_challenge_slot_hashes,
                                curr.finished_reward_slot_hashes,
                            ))))
        if sp_pre_sb is None and curr.total_iters < sp_total_iters:
            sp_pre_sb = curr
        if sp_pre_sb is not None:
            sp_vdf_iters = uint64(sp_total_iters - sp_pre_sb.total_iters)
            cc_vdf_input = sp_pre_sb.challenge_vdf_output
            rc_vdf_challenge = sp_pre_sb.reward_infusion_new_challenge
        else:
            sp_vdf_iters = sp_iters
            cc_vdf_input = ClassgroupElement.get_default_element()
            rc_vdf_challenge = found_sub_slots[1][1]
        cc_vdf_challenge = found_sub_slots[1][0]

    elif not new_sub_slot and not overflow:
        # Case 6: prev is in the same sub slot. Starting at prev does not skip any sub slots. We do not need
        # to go back another sub slot, because it's not overflow, so the VDF to signage point is this sub-slot.
        assert prev_b is not None
        curr = prev_b
        while not curr.first_in_sub_slot and curr.total_iters > sp_total_iters:
            curr = blocks.block_record(curr.prev_hash)
        if curr.total_iters < sp_total_iters:
            sp_vdf_iters = uint64(sp_total_iters - curr.total_iters)
            cc_vdf_input = curr.challenge_vdf_output
            rc_vdf_challenge = curr.reward_infusion_new_challenge
        else:
            assert curr.finished_reward_slot_hashes is not None
            sp_vdf_iters = sp_iters
            cc_vdf_input = ClassgroupElement.get_default_element()
            rc_vdf_challenge = curr.finished_reward_slot_hashes[-1]

        while not curr.first_in_sub_slot:
            curr = blocks.block_record(curr.prev_hash)
        assert curr.finished_challenge_slot_hashes is not None
        cc_vdf_challenge = curr.finished_challenge_slot_hashes[-1]
    else:
        # All cases are handled above
        assert False

    return (
        cc_vdf_challenge,
        rc_vdf_challenge,
        cc_vdf_input,
        ClassgroupElement.get_default_element(),
        sp_vdf_iters,
        sp_vdf_iters,
    )
コード例 #19
0
    async def _create_offer_for_ids(
        self, offer: Dict[int, int]
    ) -> Tuple[bool, Optional[TradeRecord], Optional[str]]:
        """
        Offer is dictionary of wallet ids and amount
        """
        spend_bundle = None
        try:
            for id in offer.keys():
                amount = offer[id]
                wallet_id = uint32(int(id))
                wallet = self.wallet_state_manager.wallets[wallet_id]
                if isinstance(wallet, CCWallet):
                    balance = await wallet.get_confirmed_balance()
                    if balance < abs(amount) and amount < 0:
                        raise Exception(f"insufficient funds in wallet {wallet_id}")
                    if amount > 0:
                        if spend_bundle is None:
                            to_exclude: List[Coin] = []
                        else:
                            to_exclude = spend_bundle.removals()
                        zero_spend_bundle: Optional[
                            SpendBundle
                        ] = await wallet.generate_zero_val_coin(False, to_exclude)

                        if zero_spend_bundle is None:
                            raise Exception(
                                "Failed to generate offer. Zero value coin not created."
                            )
                        if spend_bundle is None:
                            spend_bundle = zero_spend_bundle
                        else:
                            spend_bundle = SpendBundle.aggregate(
                                [spend_bundle, zero_spend_bundle]
                            )

                        additions = zero_spend_bundle.additions()
                        removals = zero_spend_bundle.removals()
                        zero_val_coin: Optional[Coin] = None
                        for add in additions:
                            if add not in removals and add.amount == 0:
                                zero_val_coin = add
                        new_spend_bundle = await wallet.create_spend_bundle_relative_amount(
                            amount, zero_val_coin
                        )
                    else:
                        new_spend_bundle = await wallet.create_spend_bundle_relative_amount(
                            amount
                        )
                elif isinstance(wallet, Wallet):
                    if spend_bundle is None:
                        to_exclude = []
                    else:
                        to_exclude = spend_bundle.removals()
                    new_spend_bundle = await wallet.create_spend_bundle_relative_chia(
                        amount, to_exclude
                    )
                else:
                    return False, None, "unsupported wallet type"
                if new_spend_bundle is None or new_spend_bundle.removals() == []:
                    raise Exception(f"Wallet {id} was unable to create offer.")
                if spend_bundle is None:
                    spend_bundle = new_spend_bundle
                else:
                    spend_bundle = SpendBundle.aggregate(
                        [spend_bundle, new_spend_bundle]
                    )

            if spend_bundle is None:
                return False, None, None

            now = uint64(int(time.time()))
            trade_offer: TradeRecord = TradeRecord(
                confirmed_at_index=uint32(0),
                accepted_at_time=None,
                created_at_time=now,
                my_offer=True,
                sent=uint32(0),
                spend_bundle=spend_bundle,
                tx_spend_bundle=None,
                additions=spend_bundle.additions(),
                removals=spend_bundle.removals(),
                trade_id=std_hash(spend_bundle.name() + bytes(now)),
                status=uint32(TradeStatus.PENDING_ACCEPT.value),
                sent_to=[],
            )
            return True, trade_offer, None
        except Exception as e:
            tb = traceback.format_exc()
            self.log.error(f"Error with creating trade offer: {type(e)}{tb}")
            return False, None, str(e)
コード例 #20
0
def next_sub_epoch_summary(
    constants: ConsensusConstants,
    blocks: BlockchainInterface,
    required_iters: uint64,
    block: Union[UnfinishedBlock, FullBlock],
    can_finish_soon: bool = False,
) -> Optional[SubEpochSummary]:
    """
    Returns the sub-epoch summary that can be included in the block after block. If it should include one. Block
    must be eligible to be the last block in the epoch. If not, returns None. Assumes that there is a new slot
    ending after block.

    Args:
        constants: consensus constants being used for this chain
        blocks: interface to cached SBR
        required_iters: required iters of the proof of space in block
        block: the (potentially) last block in the new epoch
        can_finish_soon: this is useful when sending SES to timelords. We might not be able to finish it, but we will
            soon (within MAX_SUB_SLOT_BLOCKS)

    Returns:
        object: the new sub-epoch summary
    """
    signage_point_index = block.reward_chain_block.signage_point_index
    prev_b: Optional[BlockRecord] = blocks.try_block_record(
        block.prev_header_hash)
    if prev_b is None or prev_b.height == 0:
        return None

    if len(block.finished_sub_slots) > 0 and block.finished_sub_slots[
            0].challenge_chain.new_difficulty is not None:
        # We just included a sub-epoch summary
        return None

    assert prev_b is not None
    # This is the ssi of the current block

    sub_slot_iters = get_next_sub_slot_iters_and_difficulty(
        constants,
        len(block.finished_sub_slots) > 0, prev_b, blocks)[0]
    overflow = is_overflow_block(constants, signage_point_index)

    if (len(block.finished_sub_slots) > 0 and
            block.finished_sub_slots[0].challenge_chain.subepoch_summary_hash
            is not None):
        return None

    if can_finish_soon:
        deficit: uint8 = uint8(
            0)  # Assume that our deficit will go to zero soon
        can_finish_se = True
        if height_can_be_first_in_epoch(constants, uint32(prev_b.height + 2)):
            can_finish_epoch = True
            if (prev_b.height + 2) % constants.SUB_EPOCH_BLOCKS > 1:
                curr: BlockRecord = prev_b
                while curr.height % constants.SUB_EPOCH_BLOCKS > 0:
                    if (curr.sub_epoch_summary_included is not None
                            and curr.sub_epoch_summary_included.new_difficulty
                            is not None):
                        can_finish_epoch = False
                    curr = blocks.block_record(curr.prev_hash)

                if (curr.sub_epoch_summary_included is not None
                        and curr.sub_epoch_summary_included.new_difficulty
                        is not None):
                    can_finish_epoch = False
        elif height_can_be_first_in_epoch(
                constants,
                uint32(prev_b.height + constants.MAX_SUB_SLOT_BLOCKS + 2)):
            can_finish_epoch = True
        else:
            can_finish_epoch = False
    else:
        deficit = calculate_deficit(
            constants,
            uint32(prev_b.height + 1),
            prev_b,
            overflow,
            len(block.finished_sub_slots),
        )
        can_finish_se, can_finish_epoch = can_finish_sub_and_full_epoch(
            constants,
            blocks,
            uint32(prev_b.height + 1),
            prev_b.header_hash if prev_b is not None else None,
            deficit,
            False,
        )

    # can't finish se, no summary
    if not can_finish_se:
        return None

    next_difficulty = None
    next_sub_slot_iters = None

    # if can finish epoch, new difficulty and ssi
    if can_finish_epoch:
        sp_iters = calculate_sp_iters(constants, sub_slot_iters,
                                      signage_point_index)
        ip_iters = calculate_ip_iters(constants, sub_slot_iters,
                                      signage_point_index, required_iters)

        next_difficulty = _get_next_difficulty(
            constants,
            blocks,
            block.prev_header_hash,
            uint32(prev_b.height + 1),
            uint64(prev_b.weight -
                   blocks.block_record(prev_b.prev_hash).weight),
            deficit,
            False,  # Already checked above
            True,
            uint128(block.total_iters - ip_iters + sp_iters -
                    (sub_slot_iters if overflow else 0)),
            True,
        )
        next_sub_slot_iters = _get_next_sub_slot_iters(
            constants,
            blocks,
            block.prev_header_hash,
            uint32(prev_b.height + 1),
            sub_slot_iters,
            deficit,
            False,  # Already checked above
            True,
            uint128(block.total_iters - ip_iters + sp_iters -
                    (sub_slot_iters if overflow else 0)),
            True,
        )

    return make_sub_epoch_summary(
        constants,
        blocks,
        uint32(prev_b.height + 2),
        prev_b,
        next_difficulty,
        next_sub_slot_iters,
    )
コード例 #21
0
    async def test_create_offer_with_zero_val(self, two_wallet_nodes):
        num_blocks = 10
        full_nodes, wallets = two_wallet_nodes
        full_node_1, server_1 = full_nodes[0]
        wallet_node, server_2 = wallets[0]
        wallet_node_2, server_3 = wallets[1]
        wallet = wallet_node.wallet_state_manager.main_wallet
        wallet2 = wallet_node_2.wallet_state_manager.main_wallet

        ph = await wallet.get_new_puzzlehash()
        ph2 = await wallet2.get_new_puzzlehash()

        await server_2.start_client(PeerInfo("localhost", uint16(server_1._port)), None)
        await server_3.start_client(PeerInfo("localhost", uint16(server_1._port)), None)

        for i in range(1, num_blocks):
            await full_node_1.farm_new_block(FarmNewBlockProtocol(ph))

        funds = sum(
            [
                calculate_base_fee(uint32(i)) + calculate_block_reward(uint32(i))
                for i in range(1, num_blocks - 2)
            ]
        )

        await self.time_out_assert(15, wallet.get_confirmed_balance, funds)

        cc_wallet: CCWallet = await CCWallet.create_new_cc(
            wallet_node.wallet_state_manager, wallet, uint64(100)
        )

        for i in range(1, num_blocks):
            await full_node_1.farm_new_block(FarmNewBlockProtocol(ph))

        await self.time_out_assert(15, cc_wallet.get_unconfirmed_balance, 100)
        await self.time_out_assert(15, cc_wallet.get_confirmed_balance, 100)

        assert cc_wallet.cc_info.my_core
        colour = cc_wallet_puzzles.get_genesis_from_core(cc_wallet.cc_info.my_core)

        cc_wallet_2: CCWallet = await CCWallet.create_wallet_for_cc(
            wallet_node_2.wallet_state_manager, wallet2, colour
        )

        assert cc_wallet.cc_info.my_core == cc_wallet_2.cc_info.my_core

        await full_node_1.farm_new_block(FarmNewBlockProtocol(ph2))
        for i in range(1, num_blocks):
            await full_node_1.farm_new_block(FarmNewBlockProtocol(ph))

        trade_manager_1 = await TradeManager.create(wallet_node.wallet_state_manager)
        trade_manager_2 = await TradeManager.create(wallet_node_2.wallet_state_manager)

        file = "test_offer_file.offer"
        file_path = Path(file)

        if file_path.exists():
            file_path.unlink()

        offer_dict = {1: -10, 2: 30}

        success, spend_bundle, error = await trade_manager_2.create_offer_for_ids(offer_dict)

        assert success is True
        assert spend_bundle is not None
        trade_manager_2.write_offer_to_disk(file_path, spend_bundle)

        success, offer, error = await trade_manager_1.get_discrepancies_for_offer(
            file_path
        )

        assert error is None
        assert success is True
        assert offer is not None

        assert offer["chia"] == 10
        assert offer[colour] == -30

        success, reason = await trade_manager_1.respond_to_offer(file_path)

        assert success is True

        for i in range(0, num_blocks):
            await full_node_1.farm_new_block(FarmNewBlockProtocol(token_bytes()))

        await self.time_out_assert(15, cc_wallet_2.get_confirmed_balance, 30)
        await self.time_out_assert(15, cc_wallet_2.get_confirmed_balance, 30)
コード例 #22
0
    async def generate_zero_val_coin(self,
                                     send=True,
                                     exclude: List[Coin] = None
                                     ) -> SpendBundle:
        if self.cc_info.my_genesis_checker is None:
            raise ValueError("My genesis checker is None")
        if exclude is None:
            exclude = []
        coins = await self.standard_wallet.select_coins(0, exclude)

        assert coins != set()

        origin = coins.copy().pop()
        origin_id = origin.name()

        cc_inner = await self.get_new_inner_hash()
        cc_puzzle_hash: Program = cc_puzzle_hash_for_inner_puzzle_hash(
            CC_MOD, self.cc_info.my_genesis_checker, cc_inner)

        tx: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
            uint64(0), cc_puzzle_hash, uint64(0), origin_id, coins)
        assert tx.spend_bundle is not None
        full_spend: SpendBundle = tx.spend_bundle
        self.log.info(
            f"Generate zero val coin: cc_puzzle_hash is {cc_puzzle_hash}")

        # generate eve coin so we can add future lineage_proofs even if we don't eve spend
        eve_coin = Coin(origin_id, cc_puzzle_hash, uint64(0))

        await self.add_lineage(
            eve_coin.name(),
            Program.to((
                1,
                [eve_coin.parent_coin_info, cc_inner, eve_coin.amount],
            )),
        )
        await self.add_lineage(eve_coin.parent_coin_info,
                               Program.to((0, [origin.as_list(), 1])))

        if send:
            regular_record = TransactionRecord(
                confirmed_at_sub_height=uint32(0),
                confirmed_at_height=uint32(0),
                created_at_time=uint64(int(time.time())),
                to_puzzle_hash=cc_puzzle_hash,
                amount=uint64(0),
                fee_amount=uint64(0),
                confirmed=False,
                sent=uint32(10),
                spend_bundle=full_spend,
                additions=full_spend.additions(),
                removals=full_spend.removals(),
                wallet_id=uint32(1),
                sent_to=[],
                trade_id=None,
                type=uint32(TransactionType.INCOMING_TX.value),
                name=token_bytes(),
            )
            cc_record = TransactionRecord(
                confirmed_at_sub_height=uint32(0),
                confirmed_at_height=uint32(0),
                created_at_time=uint64(int(time.time())),
                to_puzzle_hash=cc_puzzle_hash,
                amount=uint64(0),
                fee_amount=uint64(0),
                confirmed=False,
                sent=uint32(0),
                spend_bundle=full_spend,
                additions=full_spend.additions(),
                removals=full_spend.removals(),
                wallet_id=self.id(),
                sent_to=[],
                trade_id=None,
                type=uint32(TransactionType.INCOMING_TX.value),
                name=full_spend.name(),
            )
            await self.wallet_state_manager.add_transaction(regular_record)
            await self.wallet_state_manager.add_pending_transaction(cc_record)

        return full_spend
コード例 #23
0
ファイル: wallet.py プロジェクト: bleepster/chia-blockchain
    async def generate_unsigned_transaction(
        self,
        amount: uint64,
        newpuzzlehash: bytes32,
        fee: uint64 = uint64(0),
        origin_id: bytes32 = None,
        coins: Set[Coin] = None,
    ) -> List[Tuple[Program, CoinSolution]]:
        """
        Generates a unsigned transaction in form of List(Puzzle, Solutions)
        """
        if coins is None:
            coins = await self.select_coins(amount + fee)
        if coins is None:
            self.log.info("coins is None")
            return []

        self.log.info(f"coins is not None {coins}")
        spend_value = sum([coin.amount for coin in coins])
        change = spend_value - amount - fee

        spends: List[Tuple[Program, CoinSolution]] = []
        output_created = False

        for coin in coins:
            self.log.info(f"coin from coins {coin}")
            # Get keys for puzzle_hash
            puzzle_hash = coin.puzzle_hash
            maybe = await self.wallet_state_manager.get_keys(puzzle_hash)
            if not maybe:
                self.log.error(
                    f"Wallet couldn't find keys for puzzle_hash {puzzle_hash}")
                return []

            # Get puzzle for pubkey
            pubkey, secretkey = maybe
            puzzle: Program = puzzle_for_pk(bytes(pubkey))

            # Only one coin creates outputs
            if output_created is False and origin_id is None:
                primaries = [{"puzzlehash": newpuzzlehash, "amount": amount}]
                if change > 0:
                    changepuzzlehash = await self.get_new_puzzlehash()
                    primaries.append({
                        "puzzlehash": changepuzzlehash,
                        "amount": change
                    })

                if fee > 0:
                    solution = self.make_solution(primaries=primaries, fee=fee)
                else:
                    solution = self.make_solution(primaries=primaries)
                output_created = True
            elif output_created is False and origin_id == coin.name():
                primaries = [{"puzzlehash": newpuzzlehash, "amount": amount}]
                if change > 0:
                    changepuzzlehash = await self.get_new_puzzlehash()
                    primaries.append({
                        "puzzlehash": changepuzzlehash,
                        "amount": change
                    })

                if fee > 0:
                    solution = self.make_solution(primaries=primaries, fee=fee)
                else:
                    solution = self.make_solution(primaries=primaries)
                output_created = True
            else:
                solution = self.make_solution()

            spends.append((puzzle, CoinSolution(coin, solution)))

        self.log.info(f"Spends is {spends}")
        return spends
コード例 #24
0
    async def generate_signed_transaction(
        self,
        amounts: List[uint64],
        puzzle_hashes: List[bytes32],
        fee: uint64 = uint64(0),
        origin_id: bytes32 = None,
        coins: Set[Coin] = None,
    ) -> TransactionRecord:
        sigs: List[G2Element] = []

        # Get coins and calculate amount of change required
        outgoing_amount = uint64(sum(amounts))

        if coins is None:
            selected_coins: Set[Coin] = await self.select_coins(
                uint64(outgoing_amount + fee))
        else:
            selected_coins = coins

        total_amount = sum([x.amount for x in selected_coins])
        change = total_amount - outgoing_amount - fee
        primaries = []
        for amount, puzzle_hash in zip(amounts, puzzle_hashes):
            primaries.append({"puzzlehash": puzzle_hash, "amount": amount})

        if change > 0:
            changepuzzlehash = await self.get_new_inner_hash()
            primaries.append({
                "puzzlehash": changepuzzlehash,
                "amount": change
            })

        if fee > 0:
            innersol = self.standard_wallet.make_solution(primaries=primaries,
                                                          fee=fee)
        else:
            innersol = self.standard_wallet.make_solution(primaries=primaries)

        coin = selected_coins.pop()
        inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)

        if self.cc_info.my_genesis_checker is None:
            raise ValueError("My genesis checker is None")

        genesis_id = genesis_coin_id_for_genesis_coin_checker(
            self.cc_info.my_genesis_checker)
        innersol_list = [innersol]

        sigs = sigs + await self.get_sigs(inner_puzzle, innersol, coin.name())
        lineage_proof = await self.get_lineage_proof_for_coin(coin)
        assert lineage_proof is not None
        spendable_cc_list = [
            SpendableCC(coin, genesis_id, inner_puzzle, lineage_proof)
        ]
        assert self.cc_info.my_genesis_checker is not None

        for coin in selected_coins:
            coin_inner_puzzle = await self.inner_puzzle_for_cc_puzhash(
                coin.puzzle_hash)
            innersol = self.standard_wallet.make_solution()
            innersol_list.append(innersol)
            sigs = sigs + await self.get_sigs(coin_inner_puzzle, innersol,
                                              coin.name())
        spend_bundle = spend_bundle_for_spendable_ccs(
            CC_MOD,
            self.cc_info.my_genesis_checker,
            spendable_cc_list,
            innersol_list,
            sigs,
        )
        # TODO add support for array in stored records
        return TransactionRecord(
            confirmed_at_sub_height=uint32(0),
            confirmed_at_height=uint32(0),
            created_at_time=uint64(int(time.time())),
            to_puzzle_hash=puzzle_hashes[0],
            amount=uint64(outgoing_amount),
            fee_amount=uint64(0),
            confirmed=False,
            sent=uint32(0),
            spend_bundle=spend_bundle,
            additions=spend_bundle.additions(),
            removals=spend_bundle.removals(),
            wallet_id=self.id(),
            sent_to=[],
            trade_id=None,
            type=uint32(TransactionType.OUTGOING_TX.value),
            name=spend_bundle.name(),
        )
コード例 #25
0
    async def request_header_hash(
        self, request: farmer_protocol.RequestHeaderHash
    ) -> OutboundMessageGenerator:
        """
        Creates a block body and header, with the proof of space, coinbase, and fee targets provided
        by the farmer, and sends the hash of the header data back to the farmer.
        """
        plot_seed: bytes32 = request.proof_of_space.get_plot_seed()

        # Checks that the proof of space is valid
        quality_string: bytes = Verifier().validate_proof(
            plot_seed,
            request.proof_of_space.size,
            request.challenge_hash,
            bytes(request.proof_of_space.proof),
        )
        assert quality_string

        # Retrieves the correct head for the challenge
        tips: List[SmallHeaderBlock] = self.blockchain.get_current_tips()
        target_tip: Optional[SmallHeaderBlock] = None
        for tip in tips:
            assert tip.challenge
            if tip.challenge.get_hash() == request.challenge_hash:
                target_tip = tip
        if target_tip is None:
            # TODO: should we still allow the farmer to farm?
            log.warning(
                f"Challenge hash: {request.challenge_hash} not in one of three heads"
            )
            return

        # TODO: use mempool to grab best transactions, for the selected head
        transactions_generator: bytes32 = sha256(b"").digest()
        # TODO: calculate the fees of these transactions
        fees: FeesTarget = FeesTarget(request.fees_target_puzzle_hash, uint64(0))
        aggregate_sig: Signature = PrivateKey.from_seed(b"12345").sign(b"anything")
        # TODO: calculate aggregate signature based on transactions
        # TODO: calculate cost of all transactions
        cost = uint64(0)

        # Creates a block with transactions, coinbase, and fees
        body: Body = Body(
            request.coinbase,
            request.coinbase_signature,
            fees,
            aggregate_sig,
            transactions_generator,
            cost,
        )

        # Creates the block header
        prev_header_hash: bytes32 = target_tip.header.get_hash()
        timestamp: uint64 = uint64(int(time.time()))

        # TODO: use a real BIP158 filter based on transactions
        filter_hash: bytes32 = token_bytes(32)
        proof_of_space_hash: bytes32 = request.proof_of_space.get_hash()
        body_hash: Body = body.get_hash()
        extension_data: bytes32 = bytes32([0] * 32)
        block_header_data: HeaderData = HeaderData(
            prev_header_hash,
            timestamp,
            filter_hash,
            proof_of_space_hash,
            body_hash,
            extension_data,
        )

        block_header_data_hash: bytes32 = block_header_data.get_hash()

        # self.stores this block so we can submit it to the blockchain after it's signed by harvester
        self.store.add_candidate_block(
            proof_of_space_hash, body, block_header_data, request.proof_of_space
        )

        message = farmer_protocol.HeaderHash(
            proof_of_space_hash, block_header_data_hash
        )
        yield OutboundMessage(
            NodeType.FARMER, Message("header_hash", message), Delivery.RESPOND
        )
コード例 #26
0
    async def create_new_cc(
        wallet_state_manager: Any,
        wallet: Wallet,
        amount: uint64,
    ):
        self = CCWallet()
        self.base_puzzle_program = None
        self.base_inner_puzzle_hash = None
        self.standard_wallet = wallet
        self.log = logging.getLogger(__name__)

        self.wallet_state_manager = wallet_state_manager

        self.cc_info = CCInfo(None, [])
        info_as_string = bytes(self.cc_info).hex()
        self.wallet_info = await wallet_state_manager.user_store.create_wallet(
            "CC Wallet", WalletType.COLOURED_COIN, info_as_string)
        if self.wallet_info is None:
            raise ValueError("Internal Error")

        try:
            spend_bundle = await self.generate_new_coloured_coin(amount)
        except Exception:
            await wallet_state_manager.user_store.delete_wallet(self.id())
            raise

        await self.wallet_state_manager.add_new_wallet(self, self.id())

        # Change and actual coloured coin
        non_ephemeral_spends: List[
            Coin] = spend_bundle.not_ephemeral_additions()
        cc_coin = None
        puzzle_store = self.wallet_state_manager.puzzle_store

        for c in non_ephemeral_spends:
            info = await puzzle_store.wallet_info_for_puzzle_hash(c.puzzle_hash
                                                                  )
            if info is None:
                raise ValueError("Internal Error")
            id, wallet_type = info
            if id == self.id():
                cc_coin = c

        if cc_coin is None:
            raise ValueError(
                "Internal Error, unable to generate new coloured coin")

        regular_record = TransactionRecord(
            confirmed_at_sub_height=uint32(0),
            confirmed_at_height=uint32(0),
            created_at_time=uint64(int(time.time())),
            to_puzzle_hash=cc_coin.puzzle_hash,
            amount=uint64(cc_coin.amount),
            fee_amount=uint64(0),
            confirmed=False,
            sent=uint32(0),
            spend_bundle=spend_bundle,
            additions=spend_bundle.additions(),
            removals=spend_bundle.removals(),
            wallet_id=self.wallet_state_manager.main_wallet.id(),
            sent_to=[],
            trade_id=None,
            type=uint32(TransactionType.OUTGOING_TX.value),
            name=token_bytes(),
        )
        cc_record = TransactionRecord(
            confirmed_at_sub_height=uint32(0),
            confirmed_at_height=uint32(0),
            created_at_time=uint64(int(time.time())),
            to_puzzle_hash=cc_coin.puzzle_hash,
            amount=uint64(cc_coin.amount),
            fee_amount=uint64(0),
            confirmed=False,
            sent=uint32(10),
            spend_bundle=None,
            additions=spend_bundle.additions(),
            removals=spend_bundle.removals(),
            wallet_id=self.id(),
            sent_to=[],
            trade_id=None,
            type=uint32(TransactionType.INCOMING_TX.value),
            name=token_bytes(),
        )
        await self.standard_wallet.push_transaction(regular_record)
        await self.standard_wallet.push_transaction(cc_record)
        return self
コード例 #27
0
    async def add_spendbundle(
        self,
        new_spend: SpendBundle,
        to_pool: Mempool = None
    ) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]:
        """
        Tries to add spendbundle to either self.mempools or to_pool if it's specified.
        Returns true if it's added in any of pools, Returns error if it fails.
        """
        self.seen_bundle_hashes[new_spend.name()] = new_spend.name()
        self.maybe_pop_seen()

        # Calculate the cost and fees
        program = best_solution_program(new_spend)
        # npc contains names of the coins removed, puzzle_hashes and their spend conditions
        fail_reason, npc_list, cost = calculate_cost_of_program(program)
        if fail_reason:
            return None, MempoolInclusionStatus.FAILED, fail_reason

        # build removal list
        removal_names: List[bytes32] = new_spend.removal_names()

        additions = new_spend.additions()
        additions_dict: Dict[bytes32, Coin] = {}
        for add in additions:
            additions_dict[add.name()] = add

        addition_amount = uint64(0)

        # Check additions for max coin amount
        for coin in additions:
            if coin.amount >= uint64.from_bytes(
                    self.constants["MAX_COIN_AMOUNT"]):
                return (
                    None,
                    MempoolInclusionStatus.FAILED,
                    Err.COIN_AMOUNT_EXCEEDS_MAXIMUM,
                )
            addition_amount = uint64(addition_amount + coin.amount)

        # Check for duplicate outputs
        addition_counter = collections.Counter(_.name() for _ in additions)
        for k, v in addition_counter.items():
            if v > 1:
                return None, MempoolInclusionStatus.FAILED, Err.DUPLICATE_OUTPUT

        # Check for duplicate inputs
        removal_counter = collections.Counter(name for name in removal_names)
        for k, v in removal_counter.items():
            if v > 1:
                return None, MempoolInclusionStatus.FAILED, Err.DOUBLE_SPEND

        # Spend might be valid for one pool but not for other
        added_count = 0
        errors: List[Err] = []
        targets: List[Mempool]

        # If the transaction is added to potential set (to be retried), this is set.
        added_to_potential: bool = False
        potential_error: Optional[Err] = None

        if to_pool is not None:
            targets = [to_pool]
        else:
            targets = list(self.mempools.values())

        for pool in targets:
            # Skip if already added
            if new_spend.name() in pool.spends:
                added_count += 1
                continue

            removal_record_dict: Dict[bytes32, CoinRecord] = {}
            removal_coin_dict: Dict[bytes32, Coin] = {}

            unknown_unspent_error: bool = False
            removal_amount = uint64(0)
            for name in removal_names:
                removal_record = await self.coin_store.get_coin_record(
                    name, pool.header)
                if removal_record is None and name not in additions_dict:
                    unknown_unspent_error = True
                    break
                elif name in additions_dict:
                    removal_coin = additions_dict[name]
                    removal_record = CoinRecord(
                        removal_coin,
                        uint32(pool.header.height + 1),
                        uint32(0),
                        False,
                        False,
                    )

                assert removal_record is not None
                removal_amount = uint64(removal_amount +
                                        removal_record.coin.amount)
                removal_record_dict[name] = removal_record
                removal_coin_dict[name] = removal_record.coin

            if unknown_unspent_error:
                errors.append(Err.UNKNOWN_UNSPENT)
                continue

            if addition_amount > removal_amount:
                return None, MempoolInclusionStatus.FAILED, Err.MINTING_COIN

            fees = removal_amount - addition_amount
            assert_fee_sum: uint64 = uint64(0)

            for npc in npc_list:
                if ConditionOpcode.ASSERT_FEE in npc.condition_dict:
                    fee_list: List[ConditionVarPair] = npc.condition_dict[
                        ConditionOpcode.ASSERT_FEE]
                    for cvp in fee_list:
                        fee = int_from_bytes(cvp.var1)
                        assert_fee_sum = assert_fee_sum + fee

            if fees < assert_fee_sum:
                return (
                    None,
                    MempoolInclusionStatus.FAILED,
                    Err.ASSERT_FEE_CONDITION_FAILED,
                )

            if cost == 0:
                return None, MempoolInclusionStatus.FAILED, Err.UNKNOWN

            fees_per_cost: float = fees / cost

            # If pool is at capacity check the fee, if not then accept even without the fee
            if pool.at_full_capacity():
                if fees == 0:
                    errors.append(Err.INVALID_FEE_LOW_FEE)
                    continue
                if fees_per_cost < pool.get_min_fee_rate():
                    errors.append(Err.INVALID_FEE_LOW_FEE)
                    continue

            # Check removals against UnspentDB + DiffStore + Mempool + SpendBundle
            # Use this information later when constructing a block
            fail_reason, conflicts = await self.check_removals(
                removal_record_dict, pool)
            # If there is a mempool conflict check if this spendbundle has a higher fee per cost than all others
            tmp_error: Optional[Err] = None
            conflicting_pool_items: Dict[bytes32, MempoolItem] = {}
            if fail_reason is Err.MEMPOOL_CONFLICT:
                for conflicting in conflicts:
                    sb: MempoolItem = pool.removals[conflicting.name()]
                    conflicting_pool_items[sb.name] = sb
                for item in conflicting_pool_items.values():
                    if item.fee_per_cost >= fees_per_cost:
                        tmp_error = Err.MEMPOOL_CONFLICT
                        self.add_to_potential_tx_set(new_spend)
                        added_to_potential = True
                        potential_error = Err.MEMPOOL_CONFLICT
                        break
            elif fail_reason:
                errors.append(fail_reason)
                continue

            if tmp_error:
                errors.append(tmp_error)
                continue

            # Verify conditions, create hash_key list for aggsig check
            hash_key_pairs = []
            error: Optional[Err] = None
            for npc in npc_list:
                coin_record: CoinRecord = removal_record_dict[npc.coin_name]
                # Check that the revealed removal puzzles actually match the puzzle hash
                if npc.puzzle_hash != coin_record.coin.puzzle_hash:
                    log.warning(
                        f"Mempool rejecting transaction because of wrong puzzle_hash"
                    )
                    log.warning(
                        f"{npc.puzzle_hash} != {coin_record.coin.puzzle_hash}")
                    return None, MempoolInclusionStatus.FAILED, Err.WRONG_PUZZLE_HASH

                error = mempool_check_conditions_dict(coin_record, new_spend,
                                                      npc.condition_dict, pool)

                if error:
                    if (error is Err.ASSERT_BLOCK_INDEX_EXCEEDS_FAILED
                            or error is Err.ASSERT_BLOCK_AGE_EXCEEDS_FAILED):
                        self.add_to_potential_tx_set(new_spend)
                        added_to_potential = True
                        potential_error = error
                    break
                hash_key_pairs.extend(
                    hash_key_pairs_for_conditions_dict(npc.condition_dict,
                                                       npc.coin_name))
            if error:
                errors.append(error)
                continue

            # Verify aggregated signature
            if not new_spend.aggregated_signature.validate(hash_key_pairs):
                return None, MempoolInclusionStatus.FAILED, Err.BAD_AGGREGATE_SIGNATURE

            # Remove all conflicting Coins and SpendBundles
            if fail_reason:
                mitem: MempoolItem
                for mitem in conflicting_pool_items.values():
                    pool.remove_spend(mitem)

            new_item = MempoolItem(new_spend, fees_per_cost, uint64(fees),
                                   uint64(cost))
            pool.add_to_pool(new_item, additions, removal_coin_dict)

            added_count += 1

        if added_count > 0:
            return uint64(cost), MempoolInclusionStatus.SUCCESS, None
        elif added_to_potential:
            return uint64(
                cost), MempoolInclusionStatus.PENDING, potential_error
        else:
            return None, MempoolInclusionStatus.FAILED, errors[0]
コード例 #28
0
    def get_next_difficulty(self, header_hash: bytes32) -> uint64:
        """
        Returns the difficulty of the next block that extends onto header_hash.
        Used to calculate the number of iterations.
        """
        block: SmallHeaderBlock = self.headers[header_hash]

        next_height: uint32 = uint32(block.height + 1)
        if next_height < self.constants["DIFFICULTY_EPOCH"]:
            # We are in the first epoch
            return uint64(self.constants["DIFFICULTY_STARTING"])

        # Epochs are diffined as intervals of DIFFICULTY_EPOCH blocks, inclusive and indexed at 0.
        # For example, [0-2047], [2048-4095], etc. The difficulty changes DIFFICULTY_DELAY into the
        # epoch, as opposed to the first block (as in Bitcoin).
        elif (
            next_height % self.constants["DIFFICULTY_EPOCH"]
            != self.constants["DIFFICULTY_DELAY"]
        ):
            # Not at a point where difficulty would change
            prev_block: SmallHeaderBlock = self.headers[block.prev_header_hash]
            assert block.challenge is not None
            assert prev_block is not None and prev_block.challenge is not None
            if prev_block is None:
                raise Exception("Previous block is invalid.")
            return uint64(
                block.challenge.total_weight - prev_block.challenge.total_weight
            )

        #       old diff                  curr diff       new diff
        # ----------|-----|----------------------|-----|-----...
        #           h1    h2                     h3   i-1
        # Height1 is the last block 2 epochs ago, so we can include the time to mine 1st block in previous epoch
        height1 = uint32(
            next_height
            - self.constants["DIFFICULTY_EPOCH"]
            - self.constants["DIFFICULTY_DELAY"]
            - 1
        )
        # Height2 is the DIFFICULTY DELAYth block in the previous epoch
        height2 = uint32(next_height - self.constants["DIFFICULTY_EPOCH"] - 1)
        # Height3 is the last block in the previous epoch
        height3 = uint32(next_height - self.constants["DIFFICULTY_DELAY"] - 1)

        # h1 to h2 timestamps are mined on previous difficulty, while  and h2 to h3 timestamps are mined on the
        # current difficulty

        block1, block2, block3 = None, None, None
        if block not in self.get_current_tips() or height3 not in self.height_to_hash:
            # This means we are either on a fork, or on one of the chains, but after the LCA,
            # so we manually backtrack.
            curr: Optional[SmallHeaderBlock] = block
            assert curr is not None
            while (
                curr.height not in self.height_to_hash
                or self.height_to_hash[curr.height] != curr.header_hash
            ):
                if curr.height == height1:
                    block1 = curr
                elif curr.height == height2:
                    block2 = curr
                elif curr.height == height3:
                    block3 = curr
                curr = self.headers.get(curr.prev_header_hash, None)
                assert curr is not None
        # Once we are before the fork point (and before the LCA), we can use the height_to_hash map
        if not block1 and height1 >= 0:
            # height1 could be -1, for the first difficulty calculation
            block1 = self.headers[self.height_to_hash[height1]]
        if not block2:
            block2 = self.headers[self.height_to_hash[height2]]
        if not block3:
            block3 = self.headers[self.height_to_hash[height3]]
        assert block2 is not None and block3 is not None

        # Current difficulty parameter (diff of block h = i - 1)
        Tc = self.get_next_difficulty(block.prev_header_hash)

        # Previous difficulty parameter (diff of block h = i - 2048 - 1)
        Tp = self.get_next_difficulty(block2.prev_header_hash)
        if block1:
            timestamp1 = block1.header.data.timestamp  # i - 512 - 1
        else:
            # In the case of height == -1, there is no timestamp here, so assume the genesis block
            # took constants["BLOCK_TIME_TARGET"] seconds to mine.
            genesis = self.headers[self.height_to_hash[uint32(0)]]
            timestamp1 = (
                genesis.header.data.timestamp - self.constants["BLOCK_TIME_TARGET"]
            )
        timestamp2 = block2.header.data.timestamp  # i - 2048 + 512 - 1
        timestamp3 = block3.header.data.timestamp  # i - 512 - 1

        # Numerator fits in 128 bits, so big int is not necessary
        # We multiply by the denominators here, so we only have one fraction in the end (avoiding floating point)
        term1 = (
            self.constants["DIFFICULTY_DELAY"]
            * Tp
            * (timestamp3 - timestamp2)
            * self.constants["BLOCK_TIME_TARGET"]
        )
        term2 = (
            (self.constants["DIFFICULTY_WARP_FACTOR"] - 1)
            * (self.constants["DIFFICULTY_EPOCH"] - self.constants["DIFFICULTY_DELAY"])
            * Tc
            * (timestamp2 - timestamp1)
            * self.constants["BLOCK_TIME_TARGET"]
        )

        # Round down after the division
        new_difficulty: uint64 = uint64(
            (term1 + term2)
            // (
                self.constants["DIFFICULTY_WARP_FACTOR"]
                * (timestamp3 - timestamp2)
                * (timestamp2 - timestamp1)
            )
        )

        # Only change by a max factor, to prevent attacks, as in greenpaper, and must be at least 1
        if new_difficulty >= Tc:
            return min(new_difficulty, uint64(self.constants["DIFFICULTY_FACTOR"] * Tc))
        else:
            return max(
                [
                    uint64(1),
                    new_difficulty,
                    uint64(Tc // self.constants["DIFFICULTY_FACTOR"]),
                ]
            )
コード例 #29
0
    def get_consecutive_blocks(
        self,
        input_constants: Dict,
        num_blocks: int,
        block_list: List[FullBlock] = [],
        seconds_per_block=constants["BLOCK_TIME_TARGET"],
        seed: bytes = b"",
    ) -> List[FullBlock]:
        test_constants: Dict[str, Any] = constants.copy()
        for key, value in input_constants.items():
            test_constants[key] = value

        if len(block_list) == 0:
            if "GENESIS_BLOCK" in test_constants:
                block_list.append(
                    FullBlock.from_bytes(test_constants["GENESIS_BLOCK"]))
            else:
                block_list.append(
                    self.create_genesis_block(test_constants,
                                              sha256(seed).digest(), seed))
            prev_difficulty = test_constants["DIFFICULTY_STARTING"]
            curr_difficulty = prev_difficulty
            curr_ips = test_constants["VDF_IPS_STARTING"]
        elif len(block_list) < (test_constants["DIFFICULTY_EPOCH"] +
                                test_constants["DIFFICULTY_DELAY"]):
            # First epoch (+delay), so just get first difficulty
            prev_difficulty = block_list[0].weight
            curr_difficulty = block_list[0].weight
            assert test_constants["DIFFICULTY_STARTING"] == prev_difficulty
            curr_ips = test_constants["VDF_IPS_STARTING"]
        else:
            curr_difficulty = block_list[-1].weight - block_list[-2].weight
            prev_difficulty = (
                block_list[-1 - test_constants["DIFFICULTY_EPOCH"]].weight -
                block_list[-2 - test_constants["DIFFICULTY_EPOCH"]].weight)
            assert block_list[-1].header_block.proof_of_time
            curr_ips = calculate_ips_from_iterations(
                block_list[-1].header_block.proof_of_space,
                curr_difficulty,
                block_list[-1].header_block.proof_of_time.number_of_iterations,
                test_constants["MIN_BLOCK_TIME"],
            )

        starting_height = block_list[-1].height + 1
        timestamp = block_list[-1].header_block.header.data.timestamp
        for next_height in range(starting_height,
                                 starting_height + num_blocks):
            if (next_height > test_constants["DIFFICULTY_EPOCH"]
                    and next_height % test_constants["DIFFICULTY_EPOCH"]
                    == test_constants["DIFFICULTY_DELAY"]):
                # Calculates new difficulty
                height1 = uint64(next_height -
                                 (test_constants["DIFFICULTY_EPOCH"] +
                                  test_constants["DIFFICULTY_DELAY"]) - 1)
                height2 = uint64(next_height -
                                 (test_constants["DIFFICULTY_EPOCH"]) - 1)
                height3 = uint64(next_height -
                                 (test_constants["DIFFICULTY_DELAY"]) - 1)
                if height1 >= 0:
                    block1 = block_list[height1]
                    assert block1.header_block.challenge
                    iters1 = block1.header_block.challenge.total_iters
                    timestamp1 = block1.header_block.header.data.timestamp
                else:
                    block1 = block_list[0]
                    assert block1.header_block.challenge
                    timestamp1 = (block1.header_block.header.data.timestamp -
                                  test_constants["BLOCK_TIME_TARGET"])
                    iters1 = block1.header_block.challenge.total_iters
                timestamp2 = block_list[
                    height2].header_block.header.data.timestamp
                timestamp3 = block_list[
                    height3].header_block.header.data.timestamp

                block3 = block_list[height3]
                assert block3.header_block.challenge
                iters3 = block3.header_block.challenge.total_iters
                term1 = (test_constants["DIFFICULTY_DELAY"] * prev_difficulty *
                         (timestamp3 - timestamp2) *
                         test_constants["BLOCK_TIME_TARGET"])

                term2 = ((test_constants["DIFFICULTY_WARP_FACTOR"] - 1) *
                         (test_constants["DIFFICULTY_EPOCH"] -
                          test_constants["DIFFICULTY_DELAY"]) *
                         curr_difficulty * (timestamp2 - timestamp1) *
                         test_constants["BLOCK_TIME_TARGET"])

                # Round down after the division
                new_difficulty: uint64 = uint64(
                    (term1 + term2) //
                    (test_constants["DIFFICULTY_WARP_FACTOR"] *
                     (timestamp3 - timestamp2) * (timestamp2 - timestamp1)))

                if new_difficulty >= curr_difficulty:
                    new_difficulty = min(
                        new_difficulty,
                        uint64(test_constants["DIFFICULTY_FACTOR"] *
                               curr_difficulty),
                    )
                else:
                    new_difficulty = max([
                        uint64(1),
                        new_difficulty,
                        uint64(curr_difficulty //
                               test_constants["DIFFICULTY_FACTOR"]),
                    ])

                new_ips = uint64(
                    (iters3 - iters1) // (timestamp3 - timestamp1))
                if new_ips >= curr_ips:
                    curr_ips = min(
                        new_ips,
                        uint64(test_constants["IPS_FACTOR"] * new_ips))
                else:
                    curr_ips = max([
                        uint64(1),
                        new_ips,
                        uint64(curr_ips // test_constants["IPS_FACTOR"]),
                    ])

                prev_difficulty = curr_difficulty
                curr_difficulty = new_difficulty
            time_taken = seconds_per_block
            timestamp += time_taken
            block_list.append(
                self.create_next_block(
                    test_constants,
                    block_list[-1],
                    timestamp,
                    curr_difficulty,
                    curr_ips,
                    seed,
                ))
        return block_list
コード例 #30
0
    async def new_signage_point_harvester(
            self, new_challenge: harvester_protocol.NewSignagePointHarvester,
            peer: WSChiaConnection):
        """
        The harvester receives a new signage point from the farmer, this happens at the start of each slot.
        The harvester does a few things:
        1. The harvester applies the plot filter for each of the plots, to select the proportion which are eligible
        for this signage point and challenge.
        2. The harvester gets the qualities for each plot. This is approximately 7 reads per plot which qualifies.
        Note that each plot may have 0, 1, 2, etc qualities for that challenge: but on average it will have 1.
        3. Checks the required_iters for each quality and the given signage point, to see which are eligible for
        inclusion (required_iters < sp_interval_iters).
        4. Looks up the full proof of space in the plot for each quality, approximately 64 reads per quality
        5. Returns the proof of space to the farmer
        """
        if len(self.harvester.pool_public_keys) == 0 or len(
                self.harvester.farmer_public_keys) == 0:
            # This means that we have not received the handshake yet
            return

        start = time.time()
        assert len(new_challenge.challenge_hash) == 32

        # Refresh plots to see if there are any new ones
        if start - self.harvester.last_load_time > 120:
            await self.harvester.refresh_plots()
            self.harvester.last_load_time = time.time()

        loop = asyncio.get_running_loop()

        def blocking_lookup(
                filename: Path,
                plot_info: PlotInfo) -> List[Tuple[bytes32, ProofOfSpace]]:
            # Uses the DiskProver object to lookup qualities. This is a blocking call,
            # so it should be run in a thread pool.
            try:
                sp_challenge_hash = ProofOfSpace.calculate_pos_challenge(
                    plot_info.prover.get_id(),
                    new_challenge.challenge_hash,
                    new_challenge.sp_hash,
                )
                try:
                    quality_strings = plot_info.prover.get_qualities_for_challenge(
                        sp_challenge_hash)
                except Exception as e:
                    self.harvester.log.error(f"Error using prover object {e}")
                    return []

                responses: List[Tuple[bytes32, ProofOfSpace]] = []
                if quality_strings is not None:
                    # Found proofs of space (on average 1 is expected per plot)
                    for index, quality_str in enumerate(quality_strings):
                        required_iters: uint64 = calculate_iterations_quality(
                            self.harvester.constants.
                            DIFFICULTY_CONSTANT_FACTOR,
                            quality_str,
                            plot_info.prover.get_size(),
                            new_challenge.difficulty,
                            new_challenge.sp_hash,
                        )
                        sp_interval_iters = calculate_sp_interval_iters(
                            self.harvester.constants,
                            new_challenge.sub_slot_iters)
                        if required_iters < sp_interval_iters:
                            # Found a very good proof of space! will fetch the whole proof from disk,
                            # then send to farmer
                            try:
                                proof_xs = plot_info.prover.get_full_proof(
                                    sp_challenge_hash, index)
                            except RuntimeError:
                                self.harvester.log.error(
                                    f"Exception fetching full proof for {filename}"
                                )
                                continue

                            plot_public_key = ProofOfSpace.generate_plot_public_key(
                                plot_info.local_sk.get_g1(),
                                plot_info.farmer_public_key)
                            responses.append((
                                quality_str,
                                ProofOfSpace(
                                    sp_challenge_hash,
                                    plot_info.pool_public_key,
                                    plot_info.pool_contract_puzzle_hash,
                                    plot_public_key,
                                    uint8(plot_info.prover.get_size()),
                                    proof_xs,
                                ),
                            ))
                return responses
            except Exception as e:
                self.harvester.log.error(f"Unknown error: {e}")
                return []

        async def lookup_challenge(
                filename: Path, plot_info: PlotInfo
        ) -> List[harvester_protocol.NewProofOfSpace]:
            # Executes a DiskProverLookup in a thread pool, and returns responses
            all_responses: List[harvester_protocol.NewProofOfSpace] = []
            if self.harvester._is_shutdown:
                return []
            proofs_of_space_and_q: List[Tuple[
                bytes32, ProofOfSpace]] = await loop.run_in_executor(
                    self.harvester.executor, blocking_lookup, filename,
                    plot_info)
            for quality_str, proof_of_space in proofs_of_space_and_q:
                all_responses.append(
                    harvester_protocol.NewProofOfSpace(
                        new_challenge.challenge_hash,
                        new_challenge.sp_hash,
                        quality_str.hex() + str(filename.resolve()),
                        proof_of_space,
                        new_challenge.signage_point_index,
                    ))
            return all_responses

        awaitables = []
        passed = 0
        total = 0
        for try_plot_filename, try_plot_info in self.harvester.provers.items():
            if try_plot_filename.exists():
                # Passes the plot filter (does not check sp filter yet though, since we have not reached sp)
                # This is being executed at the beginning of the slot
                total += 1
                if ProofOfSpace.passes_plot_filter(
                        self.harvester.constants,
                        try_plot_info.prover.get_id(),
                        new_challenge.challenge_hash,
                        new_challenge.sp_hash,
                ):
                    passed += 1
                    awaitables.append(
                        lookup_challenge(try_plot_filename, try_plot_info))

        # Concurrently executes all lookups on disk, to take advantage of multiple disk parallelism
        total_proofs_found = 0
        for sublist_awaitable in asyncio.as_completed(awaitables):
            for response in await sublist_awaitable:
                total_proofs_found += 1
                msg = make_msg(ProtocolMessageTypes.new_proof_of_space,
                               response)
                await peer.send_message(msg)

        now = uint64(int(time.time()))
        farming_info = FarmingInfo(
            new_challenge.challenge_hash,
            new_challenge.sp_hash,
            now,
            uint32(passed),
            uint32(total_proofs_found),
            uint32(total),
        )
        pass_msg = make_msg(ProtocolMessageTypes.farming_info, farming_info)
        await peer.send_message(pass_msg)
        self.harvester.log.info(
            f"{len(awaitables)} plots were eligible for farming {new_challenge.challenge_hash.hex()[:10]}..."
            f" Found {total_proofs_found} proofs. Time: {time.time() - start:.5f} s. "
            f"Total {len(self.harvester.provers)} plots")