Ejemplo n.º 1
0
    async def coins_of_interest_added(self, coins: List[Coin],
                                      height: uint32) -> List[Coin]:
        (
            trade_removals,
            trade_additions,
        ) = await self.trade_manager.get_coins_of_interest()
        trade_adds: List[Coin] = []
        block: Optional[
            BlockRecord] = await self.blockchain.get_block_record_from_db(
                self.blockchain.height_to_hash(height))
        assert block is not None

        pool_rewards = set()
        farmer_rewards = set()

        prev = await self.blockchain.get_block_record_from_db(block.prev_hash)
        # [sub 1] [sub 2] [block 3] [sub 4] [sub 5] [block6]
        # [block 6] will contain rewards for [sub 1] [sub 2] [block 3]
        while prev is not None:
            # step 1 find previous block
            if prev.is_transaction_block:
                break
            prev = await self.blockchain.get_block_record_from_db(
                prev.prev_hash)

        if prev is not None:
            # include last block
            pool_rewards.add(bytes32(prev.height.to_bytes(32, "big")))
            farmer_rewards.add(std_hash(std_hash(prev.height)))
            prev = await self.blockchain.get_block_record_from_db(
                prev.prev_hash)

        while prev is not None:
            # step 2 traverse from previous block to the block before it
            pool_rewards.add(bytes32(prev.height.to_bytes(32, "big")))
            farmer_rewards.add(std_hash(std_hash(prev.height)))
            if prev.is_transaction_block:
                break
            prev = await self.blockchain.get_block_record_from_db(
                prev.prev_hash)

        for coin in coins:
            if coin.name() in trade_additions:
                trade_adds.append(coin)

            is_coinbase = False
            is_fee_reward = False
            if coin.parent_coin_info in pool_rewards:
                is_coinbase = True
            if coin.parent_coin_info in farmer_rewards:
                is_fee_reward = True

            info = await self.puzzle_store.wallet_info_for_puzzle_hash(
                coin.puzzle_hash)
            if info is not None:
                wallet_id, wallet_type = info
                await self.coin_added(coin, is_coinbase, is_fee_reward,
                                      uint32(wallet_id), wallet_type, height)

        return trade_adds
Ejemplo n.º 2
0
def open_backup_file(file_path, private_key):
    backup_file_text = file_path.read_text()
    backup_file_json = json.loads(backup_file_text)
    meta_data = backup_file_json["meta_data"]
    meta_data_bytes = json.dumps(meta_data).encode()
    sig = backup_file_json["signature"]

    backup_pk = master_sk_to_backup_sk(private_key)
    my_pubkey = backup_pk.get_g1()
    key_base_64 = base64.b64encode(bytes(backup_pk))
    f = Fernet(key_base_64)

    encrypted_data = backup_file_json["data"].encode()
    msg = std_hash(encrypted_data) + std_hash(meta_data_bytes)

    signature = SignatureMPL.from_bytes(hexstr_to_bytes(sig))
    pubkey = PublicKeyMPL.from_bytes(hexstr_to_bytes(meta_data["pubkey"]))

    sig_match_my = AugSchemeMPL.verify(my_pubkey, msg, signature)
    sig_match_backup = AugSchemeMPL.verify(pubkey, msg, signature)

    assert sig_match_my is True
    assert sig_match_backup is True

    data_bytes = f.decrypt(encrypted_data)
    data_text = data_bytes.decode()
    data_json = json.loads(data_text)
    unencrypted = {}
    unencrypted["data"] = data_json
    unencrypted["meta_data"] = meta_data
    return unencrypted
Ejemplo n.º 3
0
 def get_new_bucket(self, key: int, src_peer: Optional[PeerInfo] = None) -> int:
     if src_peer is None:
         src_peer = self.src
     assert src_peer is not None
     hash1 = int.from_bytes(
         bytes(
             std_hash(
                 key.to_bytes(32, byteorder="big")
                 + self.peer_info.get_group()
                 + src_peer.get_group()
             )[:8]
         ),
         byteorder="big",
     )
     hash1 = hash1 % NEW_BUCKETS_PER_SOURCE_GROUP
     hash2 = int.from_bytes(
         bytes(
             std_hash(
                 key.to_bytes(32, byteorder="big")
                 + src_peer.get_group()
                 + bytes([hash1])
             )[:8]
         ),
         byteorder="big",
     )
     return hash2 % NEW_BUCKET_COUNT
    async def create_wallet_backup(self, file_path: Path):
        all_wallets = await self.get_all_wallet_info_entries()
        for wallet in all_wallets:
            if wallet.id == 1:
                all_wallets.remove(wallet)
                break

        backup_pk = master_sk_to_backup_sk(self.private_key)
        now = uint64(int(time.time()))
        wallet_backup = WalletInfoBackup(all_wallets)

        backup: Dict[str, Any] = {}

        data = wallet_backup.to_json_dict()
        data["version"] = __version__
        data["fingerprint"] = self.private_key.get_g1().get_fingerprint()
        data["timestamp"] = now
        data["start_height"] = await self.get_start_height()
        key_base_64 = base64.b64encode(bytes(backup_pk))
        f = Fernet(key_base_64)
        data_bytes = json.dumps(data).encode()
        encrypted = f.encrypt(data_bytes)

        meta_data: Dict[str, Any] = {"timestamp": now, "pubkey": bytes(backup_pk.get_g1()).hex()}

        meta_data_bytes = json.dumps(meta_data).encode()
        signature = bytes(AugSchemeMPL.sign(backup_pk, std_hash(encrypted) + std_hash(meta_data_bytes))).hex()

        backup["data"] = encrypted.decode()
        backup["meta_data"] = meta_data
        backup["signature"] = signature

        backup_file_text = json.dumps(backup)
        file_path.write_text(backup_file_text)
Ejemplo n.º 5
0
    def create_next_block(
        self,
        input_constants: Dict,
        prev_block: FullBlock,
        timestamp: uint64,
        update_difficulty: bool,
        difficulty: uint64,
        min_iters: uint64,
        seed: bytes = b"",
        reward_puzzlehash: bytes32 = None,
        transactions: Program = None,
        aggsig: BLSSignature = None,
        fees: uint64 = uint64(0),
    ) -> FullBlock:
        """
        Creates the next block with the specified details.
        """
        test_constants: Dict[str, Any] = constants.copy()
        for key, value in input_constants.items():
            test_constants[key] = value
        assert prev_block.proof_of_time is not None
        if update_difficulty:
            challenge = Challenge(
                prev_block.proof_of_space.challenge_hash,
                std_hash(
                    prev_block.proof_of_space.get_hash()
                    + prev_block.proof_of_time.output.get_hash()
                ),
                difficulty,
            )
        else:
            challenge = Challenge(
                prev_block.proof_of_space.challenge_hash,
                std_hash(
                    prev_block.proof_of_space.get_hash()
                    + prev_block.proof_of_time.output.get_hash()
                ),
                None,
            )

        return self._create_block(
            test_constants,
            challenge.get_hash(),
            uint32(prev_block.height + 1),
            prev_block.header_hash,
            prev_block.header.data.total_iters,
            prev_block.weight,
            timestamp,
            uint64(difficulty),
            min_iters,
            seed,
            False,
            reward_puzzlehash,
            transactions,
            aggsig,
            fees,
        )
Ejemplo n.º 6
0
 def get_tried_bucket(self, key: int) -> int:
     hash1 = int.from_bytes(
         bytes(std_hash(key.to_bytes(32, byteorder="big") + self.peer_info.get_key())[:8]),
         byteorder="big",
     )
     hash1 = hash1 % TRIED_BUCKETS_PER_GROUP
     hash2 = int.from_bytes(
         bytes(std_hash(key.to_bytes(32, byteorder="big") + self.peer_info.get_group() + bytes([hash1]))[:8]),
         byteorder="big",
     )
     return hash2 % TRIED_BUCKET_COUNT
Ejemplo n.º 7
0
    def test_win_percentage(self):
        """
        Tests that the percentage of blocks won is proportional to the space of each farmer,
        with the assumption that all farmers have access to the same VDF speed.
        """
        farmer_ks = {
            uint8(32): 100,
            uint8(33): 100,
            uint8(34): 100,
            uint8(35): 100,
            uint8(36): 100,
        }
        farmer_space = {
            k: _expected_plot_size(uint8(k)) * count
            for k, count in farmer_ks.items()
        }
        total_space = sum(farmer_space.values())
        percentage_space = {
            k: float(sp / total_space)
            for k, sp in farmer_space.items()
        }
        wins = {k: 0 for k in farmer_ks.keys()}
        total_slots = 50
        num_sps = 16
        sp_interval_iters = uint64(100000000 // 32)
        difficulty = uint64(500000000000)

        for slot_index in range(total_slots):
            total_wins_in_slot = 0
            for sp_index in range(num_sps):
                sp_hash = std_hash(
                    slot_index.to_bytes(4, "big") +
                    sp_index.to_bytes(4, "big"))
                for k, count in farmer_ks.items():
                    for farmer_index in range(count):
                        quality = std_hash(
                            slot_index.to_bytes(4, "big") +
                            k.to_bytes(1, "big") + bytes(farmer_index))
                        required_iters = calculate_iterations_quality(
                            2**25, quality, k, difficulty, sp_hash)
                        if required_iters < sp_interval_iters:
                            wins[k] += 1
                            total_wins_in_slot += 1

        win_percentage = {
            k: wins[k] / sum(wins.values())
            for k in farmer_ks.keys()
        }
        for k in farmer_ks.keys():
            # Win rate is proportional to percentage of space
            assert abs(win_percentage[k] - percentage_space[k]) < 0.01
Ejemplo n.º 8
0
    def create_next_block(
            self,
            test_constants: ConsensusConstants,
            prev_block: FullBlock,
            timestamp: uint64,
            update_difficulty: bool,
            difficulty: int,
            min_iters: int,
            seed: bytes = b"",
            reward_puzzlehash: bytes32 = None,
            transactions: Program = None,
            aggsig: G2Element = None,
            fees: uint64 = uint64(0),
    ) -> FullBlock:
        """
        Creates the next block with the specified details.
        """
        assert prev_block.proof_of_time is not None
        if update_difficulty:
            challenge = Challenge(
                prev_block.proof_of_space.challenge_hash,
                std_hash(prev_block.proof_of_space.get_hash() +
                         prev_block.proof_of_time.output.get_hash()),
                uint64(difficulty),
            )
        else:
            challenge = Challenge(
                prev_block.proof_of_space.challenge_hash,
                std_hash(prev_block.proof_of_space.get_hash() +
                         prev_block.proof_of_time.output.get_hash()),
                None,
            )

        return self._create_block(
            test_constants,
            challenge.get_hash(),
            uint32(prev_block.height + 1),
            prev_block.header_hash,
            prev_block.header.data.total_iters,
            prev_block.weight,
            timestamp,
            uint64(difficulty),
            min_iters,
            seed,
            False,
            reward_puzzlehash,
            transactions,
            aggsig,
            fees,
        )
Ejemplo n.º 9
0
    async def test_alert(self, get_daemon):
        daemon = get_daemon
        selected = daemon.net_config["selected_network"]
        assert daemon.net_config["network_overrides"]["constants"][selected][
            "GENESIS_CHALLENGE"] is None
        alert_file_path = daemon.root_path / "alert.txt"

        alert_server = await AlertServer.create_alert_server(
            alert_file_path, 59000)
        create_not_ready_alert_file(alert_file_path, master_sk)
        await alert_server.run()
        expected_genesis = None

        def check_genesis(expected):
            return daemon.net_config["network_overrides"]["constants"][
                selected]["GENESIS_CHALLENGE"] == expected

        await asyncio.sleep(10)
        await time_out_assert(15, check_genesis, True, expected_genesis)

        preimage = "This is test preimage!"
        expected_genesis = std_hash(bytes(preimage, "utf-8")).hex()
        alert_file_path.unlink()
        create_alert_file(alert_file_path, master_sk, "This is test preimage!")

        await time_out_assert(15, check_genesis, True, expected_genesis)
Ejemplo n.º 10
0
def test_1():
    puzzle_program_1 = puzzle_program_for_index(uint32(1))
    puzzle_program_2 = puzzle_program_for_index(uint32(2))

    conditions = Program.to([
        make_create_coin_condition(std_hash(bytes(pp)), amount)
        for pp, amount in [(puzzle_program_1, 1000), (puzzle_program_2, 2000)]
    ])

    assert conditions is not None
    puzzle_reveal = p2_delegated_puzzle.puzzle_reveal_for_conditions(
        conditions)
    solution = p2_delegated_puzzle.solution_for_conditions(conditions)

    error, output_conditions, cost = conditions_for_solution(
        puzzle_reveal, solution)
    assert error is None
    from pprint import pprint

    assert output_conditions is not None
    output_conditions_dict = conditions_by_opcode(output_conditions)
    pprint(output_conditions_dict)
    input_coin_info_hash = bytes([0] * 32)
    created_outputs_for_conditions_dict(output_conditions_dict,
                                        input_coin_info_hash)
    aggsigs = aggsig_in_conditions_dict(output_conditions_dict)
    pprint(aggsigs)
Ejemplo n.º 11
0
 def init_plots(self, root_path):
     plot_dir = get_plot_dir()
     mkdir(plot_dir)
     temp_dir = plot_dir / "tmp"
     mkdir(temp_dir)
     args = Namespace()
     # Can't go much lower than 20, since plots start having no solutions and more buggy
     args.size = 22
     # Uses many plots for testing, in order to guarantee proofs of space at every height
     args.num = 20
     args.buffer = 100
     args.farmer_public_key = bytes(self.farmer_pk).hex()
     args.pool_public_key = bytes(self.pool_pk).hex()
     args.tmp_dir = temp_dir
     args.tmp2_dir = plot_dir
     args.final_dir = plot_dir
     args.plotid = None
     args.memo = None
     args.buckets = 0
     args.stripe_size = 2000
     args.num_threads = 0
     args.nobitfield = False
     args.exclude_final_dir = False
     test_private_keys = [AugSchemeMPL.key_gen(std_hash(i.to_bytes(2, "big"))) for i in range(args.num)]
     try:
         # No datetime in the filename, to get deterministic filenames and not re-plot
         create_plots(
             args,
             root_path,
             use_datetime=False,
             test_private_keys=test_private_keys,
         )
     except KeyboardInterrupt:
         shutil.rmtree(plot_dir, ignore_errors=True)
         sys.exit(1)
Ejemplo n.º 12
0
    async def test_sync_from_fork_point_and_weight_proof(self, three_nodes, default_1000_blocks, default_400_blocks):
        start = time.time()
        # Must be larger than "sync_block_behind_threshold" in the config
        num_blocks_initial = len(default_1000_blocks) - 50
        blocks_950 = default_1000_blocks[:num_blocks_initial]
        blocks_rest = default_1000_blocks[num_blocks_initial:]
        blocks_400 = default_400_blocks
        full_node_1, full_node_2, full_node_3 = three_nodes
        server_1 = full_node_1.full_node.server
        server_2 = full_node_2.full_node.server
        server_3 = full_node_3.full_node.server

        for block in blocks_950:
            await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))

        # Node 2 syncs from halfway
        for i in range(int(len(default_1000_blocks) / 2)):
            await full_node_2.full_node.respond_block(full_node_protocol.RespondBlock(default_1000_blocks[i]))

        # Node 3 syncs from a different blockchain
        for block in blocks_400:
            await full_node_3.full_node.respond_block(full_node_protocol.RespondBlock(block))

        await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_2.full_node.on_connect)
        await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_3.full_node.on_connect)

        # Also test request proof of weight
        # Have the request header hash
        res = await full_node_1.request_proof_of_weight(
            full_node_protocol.RequestProofOfWeight(blocks_950[-1].height + 1, blocks_950[-1].header_hash)
        )
        assert res is not None
        validated, _ = full_node_1.full_node.weight_proof_handler.validate_weight_proof(
            full_node_protocol.RespondProofOfWeight.from_bytes(res.data).wp
        )
        assert validated

        # Don't have the request header hash
        res = await full_node_1.request_proof_of_weight(
            full_node_protocol.RequestProofOfWeight(blocks_950[-1].height + 1, std_hash(b"12"))
        )
        assert res is None

        # The second node should eventually catch up to the first one, and have the
        # same tip at height num_blocks - 1 (or at least num_blocks - 3, in case we sync to below the tip)
        await time_out_assert(180, node_height_exactly, True, full_node_2, num_blocks_initial - 1)
        await time_out_assert(180, node_height_exactly, True, full_node_3, num_blocks_initial - 1)

        cons = list(server_1.all_connections.values())[:]
        for con in cons:
            await con.close()
        for block in blocks_rest:
            await full_node_3.full_node.respond_block(full_node_protocol.RespondBlock(block))
        await time_out_assert(120, node_height_exactly, True, full_node_3, 999)

        await server_2.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_2.full_node.on_connect)
        await server_3.start_client(PeerInfo(self_hostname, uint16(server_1._port)), full_node_3.full_node.on_connect)
        await server_3.start_client(PeerInfo(self_hostname, uint16(server_2._port)), full_node_3.full_node.on_connect)
        await time_out_assert(180, node_height_exactly, True, full_node_1, 999)
        await time_out_assert(180, node_height_exactly, True, full_node_2, 999)
Ejemplo n.º 13
0
    async def test_request_all_proof_hashes(self, two_nodes):
        full_node_1, full_node_2, server_1, server_2 = two_nodes
        blocks_list = await get_block_path(full_node_1)

        msgs = [
            _
            async for _ in full_node_1.request_all_proof_hashes(
                wallet_protocol.RequestAllProofHashes()
            )
        ]
        hashes = msgs[0].message.data.hashes
        assert len(hashes) >= len(blocks_list) - 2
        for i in range(len(hashes)):
            if i % test_constants.DIFFICULTY_EPOCH == test_constants.DIFFICULTY_DELAY:
                assert hashes[i][1] is not None
            elif i > 0:
                assert hashes[i][1] is None
            if (
                i % test_constants.DIFFICULTY_EPOCH
                == test_constants.DIFFICULTY_EPOCH - 1
            ):
                assert hashes[i][2] is not None
            else:
                assert hashes[i][2] is None
            assert hashes[i][0] == std_hash(
                blocks_list[i].proof_of_space.get_hash()
                + blocks_list[i].proof_of_time.output.get_hash()
            )
Ejemplo n.º 14
0
async def setup_simulators_and_wallets(
    simulator_count: int, wallet_count: int, dic: Dict, starting_height=None, key_seed=None, starting_port=50000
):
    simulators: List[FullNodeAPI] = []
    wallets = []
    node_iters = []

    consensus_constants = constants_for_dic(dic)
    for index in range(0, simulator_count):
        port = starting_port + index
        db_name = f"blockchain_test_{port}.db"
        sim = setup_full_node(consensus_constants, db_name, port, BlockTools(consensus_constants), simulator=True)
        simulators.append(await sim.__anext__())
        node_iters.append(sim)

    for index in range(0, wallet_count):
        if key_seed is None:
            seed = std_hash(uint32(index))
        else:
            seed = key_seed
        port = starting_port + 5000 + index
        wlt = setup_wallet_node(
            port,
            consensus_constants,
            BlockTools(consensus_constants),
            None,
            key_seed=seed,
            starting_height=starting_height,
        )
        wallets.append(await wlt.__anext__())
        node_iters.append(wlt)

    yield simulators, wallets

    await _teardown_nodes(node_iters)
Ejemplo n.º 15
0
 def calculate_plot_id_ph(
     pool_contract_puzzle_hash: bytes32,
     plot_public_key: G1Element,
 ) -> bytes32:
     return bytes32(
         std_hash(
             bytes(pool_contract_puzzle_hash) + bytes(plot_public_key)))
Ejemplo n.º 16
0
async def download_backup(host: str, private_key: PrivateKey):
    session = aiohttp.ClientSession()
    backup_privkey = master_sk_to_backup_sk(private_key)
    backup_pubkey = bytes(backup_privkey.get_g1()).hex()

    # Get nonce
    nonce_request = {"pubkey": backup_pubkey}
    nonce_url = f"{host}/get_download_nonce"
    nonce_response = await post(session, nonce_url, nonce_request)
    nonce = nonce_response["nonce"]

    # Sign nonce
    signature = bytes(
        AugSchemeMPL.sign(backup_privkey,
                          std_hash(hexstr_to_bytes(nonce)))).hex()
    # Request backup url
    get_backup_url = f"{host}/download_backup"
    backup_request = {"pubkey": backup_pubkey, "signature": signature}
    backup_response = await post(session, get_backup_url, backup_request)

    # Download from s3
    assert backup_response["success"] is True
    backup_url = backup_response["url"]
    backup_text = await get(session, backup_url)
    await session.close()
    return backup_text
Ejemplo n.º 17
0
def _map_summaries(
    sub_blocks_for_se: uint32,
    ses_hash: bytes32,
    sub_epoch_data: List[SubEpochData],
    curr_difficulty: uint64,
) -> Tuple[List[SubEpochSummary], uint128]:
    sub_epoch_data_weight: uint128 = uint128(0)
    summaries: List[SubEpochSummary] = []

    for idx, data in enumerate(sub_epoch_data):
        ses = SubEpochSummary(
            ses_hash,
            data.reward_chain_hash,
            data.num_sub_blocks_overflow,
            data.new_difficulty,
            data.new_sub_slot_iters,
        )

        if idx < len(sub_epoch_data) - 1:
            delta = 0
            if idx > 0:
                delta = sub_epoch_data[idx].num_sub_blocks_overflow
            sub_epoch_data_weight = sub_epoch_data_weight + uint128(  # type: ignore
                curr_difficulty *
                (sub_blocks_for_se +
                 sub_epoch_data[idx + 1].num_sub_blocks_overflow - delta))
        # if new epoch update diff and iters
        if data.new_difficulty is not None:
            curr_difficulty = data.new_difficulty

        # add to dict
        summaries.append(ses)
        ses_hash = std_hash(ses)
    return summaries, sub_epoch_data_weight
Ejemplo n.º 18
0
def bytes_from_mnemonic(mnemonic_str: str):
    mnemonic: List[str] = mnemonic_str.split(" ")
    if len(mnemonic) not in [12, 15, 18, 21, 24]:
        raise ValueError("Invalid mnemonic length")

    word_list = {
        word: i
        for i, word in enumerate(bip39_word_list().splitlines())
    }
    bit_array = BitArray()
    for i in range(0, len(mnemonic)):
        word = mnemonic[i]
        value = word_list[word]
        bit_array.append(BitArray(uint=value, length=11))

    CS: int = len(mnemonic) // 3
    ENT: int = len(mnemonic) * 11 - CS
    assert len(bit_array) == len(mnemonic) * 11
    assert ENT % 32 == 0

    entropy_bytes = bit_array[:ENT].bytes
    checksum_bytes = bit_array[ENT:]
    checksum = BitArray(std_hash(entropy_bytes))[:CS]

    assert len(checksum_bytes) == CS

    if checksum != checksum_bytes:
        raise ValueError("Invalid order of mnemonic words")

    return entropy_bytes
Ejemplo n.º 19
0
def hash_coin_list(coin_list: List[Coin]) -> bytes32:
    coin_list.sort(key=lambda x: x.name_str, reverse=True)
    buffer = bytearray()

    for coin in coin_list:
        buffer.extend(coin.name())

    return std_hash(buffer)
Ejemplo n.º 20
0
 def get_tree_hash(self) -> bytes32:
     if self.listp():
         left = self.to(self.first()).get_tree_hash()
         right = self.to(self.rest()).get_tree_hash()
         s = b"\2" + left + right
     else:
         atom = self.as_atom()
         s = b"\1" + atom
     return bytes32(std_hash(s))
Ejemplo n.º 21
0
def main():
    """
    Script for checking all plots in the plots.yaml file. Specify a number of challenge to test for each plot.
    """

    parser = argparse.ArgumentParser(
        description="Exodus plot checking script.")
    parser.add_argument("-n",
                        "--num",
                        help="Number of challenges",
                        type=int,
                        default=100)
    args = parser.parse_args()

    root_path = DEFAULT_ROOT_PATH
    plot_config = load_config(root_path, plot_config_filename)
    config = load_config(root_path, config_filename)

    initialize_logging("%(name)-22s", {"log_stdout": True}, root_path)
    log = logging.getLogger(__name__)

    v = Verifier()
    log.info("Loading plots in plots.yaml using harvester loading code\n")
    provers, _, _ = load_plots(config["harvester"], plot_config, None,
                               root_path)
    log.info(
        f"\n\nStarting to test each plot with {args.num} challenges each\n")
    for plot_path, pr in provers.items():
        total_proofs = 0
        try:
            for i in range(args.num):
                challenge = std_hash(i.to_bytes(32, "big"))
                for index, quality_str in enumerate(
                        pr.get_qualities_for_challenge(challenge)):
                    proof = pr.get_full_proof(challenge, index)
                    total_proofs += 1
                    ver_quality_str = v.validate_proof(pr.get_id(),
                                                       pr.get_size(),
                                                       challenge, proof)
                    assert quality_str == ver_quality_str
        except BaseException as e:
            if isinstance(e, KeyboardInterrupt):
                log.warning("Interrupted, closing")
                return
            log.error(
                f"{type(e)}: {e} error in proving/verifying for plot {plot_path}"
            )
        if total_proofs > 0:
            log.info(
                f"{plot_path}: Proofs {total_proofs} / {args.num}, {round(total_proofs/float(args.num), 4)}"
            )
        else:
            log.error(
                f"{plot_path}: Proofs {total_proofs} / {args.num}, {round(total_proofs/float(args.num), 4)}"
            )
Ejemplo n.º 22
0
 def get_bucket_position(self, key: int, is_new: bool, nBucket: int) -> int:
     ch = "N" if is_new else "K"
     hash1 = int.from_bytes(
         bytes(
             std_hash(
                 key.to_bytes(32, byteorder="big") + ch.encode() +
                 nBucket.to_bytes(3, byteorder="big") +
                 self.peer_info.get_key())[:8]),
         byteorder="big",
     )
     return hash1 % BUCKET_SIZE
Ejemplo n.º 23
0
    def __init__(
        self,
        constants: ConsensusConstants = test_constants,
        root_path: Optional[Path] = None,
    ):
        self._tempdir = None
        if root_path is None:
            self._tempdir = tempfile.TemporaryDirectory()
            root_path = Path(self._tempdir.name)

        self.root_path = root_path
        self.constants = constants
        create_default_chia_config(root_path)
        self.keychain = Keychain("testing-1.8.0", True)
        self.keychain.delete_all_keys()
        self.farmer_master_sk = self.keychain.add_private_key(
            bytes_to_mnemonic(std_hash(b"block_tools farmer key")), ""
        )
        self.pool_master_sk = self.keychain.add_private_key(bytes_to_mnemonic(std_hash(b"block_tools pool key")), "")
        self.farmer_pk = master_sk_to_farmer_sk(self.farmer_master_sk).get_g1()
        self.pool_pk = master_sk_to_pool_sk(self.pool_master_sk).get_g1()
        self.init_plots(root_path)

        initialize_ssl(root_path)
        self.farmer_ph: bytes32 = create_puzzlehash_for_pk(
            master_sk_to_wallet_sk(self.farmer_master_sk, uint32(0)).get_g1()
        )
        self.pool_ph: bytes32 = create_puzzlehash_for_pk(
            master_sk_to_wallet_sk(self.pool_master_sk, uint32(0)).get_g1()
        )

        self.all_sks: List[PrivateKey] = [sk for sk, _ in self.keychain.get_all_private_keys()]
        self.pool_pubkeys: List[G1Element] = [master_sk_to_pool_sk(sk).get_g1() for sk in self.all_sks]

        farmer_pubkeys: List[G1Element] = [master_sk_to_farmer_sk(sk).get_g1() for sk in self.all_sks]
        if len(self.pool_pubkeys) == 0 or len(farmer_pubkeys) == 0:
            raise RuntimeError("Keys not generated. Run `chia generate keys`")

        _, loaded_plots, _, _ = load_plots({}, {}, farmer_pubkeys, self.pool_pubkeys, None, root_path)
        self.plots: Dict[Path, PlotInfo] = loaded_plots
        self._config = load_config(self.root_path, "config.yaml")
Ejemplo n.º 24
0
 async def _address_relay(self):
     while not self.is_closed:
         try:
             relay_peer, num_peers = await self.relay_queue.get()
             relay_peer_info = PeerInfo(relay_peer.host, relay_peer.port)
             if not relay_peer_info.is_valid():
                 continue
             # https://en.bitcoin.it/wiki/Satoshi_Client_Node_Discovery#Address_Relay
             connections = self.global_connections.get_full_node_connections(
             )
             hashes = []
             cur_day = int(time.time()) // (24 * 60 * 60)
             for connection in connections:
                 peer_info = connection.get_peer_info()
                 cur_hash = int.from_bytes(
                     bytes(
                         std_hash(
                             self.key.to_bytes(32, byteorder="big") +
                             peer_info.get_key() +
                             cur_day.to_bytes(3, byteorder="big"))),
                     byteorder="big",
                 )
                 hashes.append((cur_hash, connection))
             hashes.sort(key=lambda x: x[0])
             for index, (_, connection) in enumerate(hashes):
                 if index >= num_peers:
                     break
                 peer_info = connection.get_peer_info()
                 pair = (peer_info.host, peer_info.port)
                 async with self.lock:
                     if (pair in self.neighbour_known_peers
                             and relay_peer.host
                             in self.neighbour_known_peers[pair]):
                         continue
                     if pair not in self.neighbour_known_peers:
                         self.neighbour_known_peers[pair] = set()
                     self.neighbour_known_peers[pair].add(relay_peer.host)
                 if connection.node_id is None:
                     continue
                 msg = OutboundMessage(
                     NodeType.FULL_NODE,
                     Message(
                         "respond_peers_full_node",
                         full_node_protocol.RespondPeers([relay_peer]),
                     ),
                     Delivery.SPECIFIC,
                     connection.node_id,
                 )
                 self.server.push_message(msg)
         except Exception as e:
             self.log.error(f"Exception in address relay: {e}")
             self.log.error(f"Traceback: {traceback.format_exc()}")
Ejemplo n.º 25
0
def create_alert_file(alert_file_path: Path, key, genesis_challenge_preimage: str):
    bytes_preimage = bytes(genesis_challenge_preimage, "UTF-8")
    genesis_challenge = std_hash(bytes_preimage)
    file_dict = {
        "ready": True,
        "genesis_challenge": genesis_challenge.hex(),
        "genesis_challenge_preimage": genesis_challenge_preimage,
    }
    data: str = json.dumps(file_dict)
    signature = AugSchemeMPL.sign(key, bytes(data, "utf-8"))
    file_data = {"data": data, "signature": f"{signature}"}
    file_data_json = json.dumps(file_data)
    alert_file_path.write_text(file_data_json)
Ejemplo n.º 26
0
 def _tree_hash(self, precalculated: Set[bytes32]) -> bytes32:
     """
     Hash values in `precalculated` are presumed to have been hashed already.
     """
     if self.listp():
         left = self.to(self.first())._tree_hash(precalculated)
         right = self.to(self.rest())._tree_hash(precalculated)
         s = b"\2" + left + right
     else:
         atom = self.as_atom()
         if atom in precalculated:
             return bytes32(atom)
         s = b"\1" + atom
     return bytes32(std_hash(s))
Ejemplo n.º 27
0
def _tree_hash(node: SExp, precalculated: Set[bytes32]) -> bytes32:
    """
    Hash values in `precalculated` are presumed to have been hashed already.
    """
    if node.listp():
        left = _tree_hash(node.first(), precalculated)
        right = _tree_hash(node.rest(), precalculated)
        s = b"\2" + left + right
    else:
        atom = node.as_atom()
        if atom in precalculated:
            return bytes32(atom)
        s = b"\1" + atom
    return bytes32(std_hash(s))
Ejemplo n.º 28
0
    async def validate_block_body(self, block: FullBlock) -> Optional[Err]:
        """
        Validates the transactions and body of the block. Returns None if everything
        validates correctly, or an Err if something does not validate.
        """

        # 6. The compact block filter must be correct, according to the body (BIP158)
        if std_hash(block.transactions_filter) != block.header.data.filter_hash:
            return Err.INVALID_TRANSACTIONS_FILTER_HASH

        fee_base = calculate_base_fee(block.height)
        # target reward_fee = 1/8 coinbase reward + tx fees
        if block.transactions_generator is not None:
            # 14. Make sure transactions generator hash is valid (or all 0 if not present)
            if (
                block.transactions_generator.get_tree_hash()
                != block.header.data.generator_hash
            ):
                return Err.INVALID_TRANSACTIONS_GENERATOR_HASH

            # 15. If not genesis, the transactions must be valid and fee must be valid
            # Verifies that fee_base + TX fees = fee_coin.amount
            err = await self._validate_transactions(block, fee_base)
            if err is not None:
                return err
        else:
            # Make sure transactions generator hash is valid (or all 0 if not present)
            if block.header.data.generator_hash != bytes32(bytes([0] * 32)):
                return Err.INVALID_TRANSACTIONS_GENERATOR_HASH

            # 16. If genesis, the fee must be the base fee, agg_sig must be None, and merkle roots must be valid
            if fee_base != block.header.data.total_transaction_fees:
                return Err.INVALID_BLOCK_FEE_AMOUNT
            root_error = self._validate_merkle_root(block)
            if root_error:
                return root_error

            # 17. Verify the pool signature even if there are no transactions
            pool_target_m = bytes(block.header.data.pool_target)
            validates = AugSchemeMPL.verify(
                block.proof_of_space.pool_public_key,
                pool_target_m,
                block.header.data.aggregated_signature,
            )
            if not validates:
                return Err.BAD_AGGREGATE_SIGNATURE

        return None
Ejemplo n.º 29
0
 def handle_sexp(sexp_stack, op_stack, precalculated: Set[bytes32]) -> None:
     sexp = sexp_stack.pop()
     if sexp.pair:
         p0, p1 = sexp.pair
         sexp_stack.append(p0)
         sexp_stack.append(p1)
         op_stack.append(handle_pair)
         op_stack.append(handle_sexp)
         op_stack.append(roll)
         op_stack.append(handle_sexp)
     else:
         if sexp.atom in precalculated:
             r = sexp.atom
         else:
             r = std_hash(b"\1" + sexp.atom)
         sexp_stack.append(r)
def calculate_iterations_quality(
    quality_string: bytes32,
    size: int,
    difficulty: int,
    cc_sp_output_hash: bytes32,
) -> uint64:
    """
    Calculates the number of iterations from the quality. The quality is converted to a number
    between 0 and 1, then divided by expected plot size, and finally multiplied by the
    difficulty.
    """
    sp_quality_string: bytes32 = std_hash(quality_string + cc_sp_output_hash)
    iters = uint64(
        uint128(int(difficulty) << 25) //
        quality_str_to_quality(sp_quality_string, size))
    return max(iters, uint64(1))