def test_win_percentage(self):
        """
        Tests that the percentage of blocks won is proportional to the space of each farmer,
        with the assumption that all farmers have access to the same VDF speed.
        """
        farmer_ks = {
            uint8(32): 100,
            uint8(33): 100,
            uint8(34): 100,
            uint8(35): 100,
            uint8(36): 100,
        }
        farmer_space = {
            k: _expected_plot_size(uint8(k)) * count
            for k, count in farmer_ks.items()
        }
        total_space = sum(farmer_space.values())
        percentage_space = {
            k: float(sp / total_space)
            for k, sp in farmer_space.items()
        }
        wins = {k: 0 for k in farmer_ks.keys()}
        total_slots = 50
        num_sps = 16
        sp_interval_iters = uint64(100000000 // 32)
        difficulty = uint64(500000000000)

        for slot_index in range(total_slots):
            total_wins_in_slot = 0
            for sp_index in range(num_sps):
                sp_hash = std_hash(
                    slot_index.to_bytes(4, "big") +
                    sp_index.to_bytes(4, "big"))
                for k, count in farmer_ks.items():
                    for farmer_index in range(count):
                        quality = std_hash(
                            slot_index.to_bytes(4, "big") +
                            k.to_bytes(1, "big") + bytes(farmer_index))
                        required_iters = calculate_iterations_quality(
                            2**25, quality, k, difficulty, sp_hash)
                        if required_iters < sp_interval_iters:
                            wins[k] += 1
                            total_wins_in_slot += 1

        win_percentage = {
            k: wins[k] / sum(wins.values())
            for k in farmer_ks.keys()
        }
        for k in farmer_ks.keys():
            # Win rate is proportional to percentage of space
            assert abs(win_percentage[k] - percentage_space[k]) < 0.01
    def test_win_percentage(self):
        """
        Tests that the percentage of blocks won is proportional to the space of each farmer,
        with the assumption that all farmers have access to the same VDF speed.
        """
        farmer_ks = [
            uint8(34),
            uint8(35),
            uint8(36),
            uint8(37),
            uint8(38),
            uint8(39),
            uint8(39),
            uint8(39),
            uint8(39),
            uint8(39),
            uint8(40),
            uint8(41),
        ]
        farmer_space = [_expected_plot_size(uint8(k)) for k in farmer_ks]
        total_space = sum(farmer_space)
        percentage_space = [float(sp / total_space) for sp in farmer_space]
        wins = [0 for _ in range(len(farmer_ks))]
        total_blocks = 5000

        for b_index in range(total_blocks):
            qualities = [
                std_hash(b_index.to_bytes(32, "big") + bytes(farmer_index))
                for farmer_index in range(len(farmer_ks))
            ]
            iters = [
                calculate_iterations_quality(
                    qualities[i],
                    farmer_ks[i],
                    uint64(50000000),
                    uint64(5000 * 30),
                ) for i in range(len(qualities))
            ]
            wins[iters.index(min(iters))] += 1

        win_percentage = [
            wins[w] / total_blocks for w in range(len(farmer_ks))
        ]
        for i in range(len(percentage_space)):
            # Win rate is proportional to percentage of space
            assert abs(win_percentage[i] - percentage_space[i]) < 0.01
Exemplo n.º 3
0
def calculate_iterations_quality(
    difficulty_constant_factor: uint128,
    quality_string: bytes32,
    size: int,
    difficulty: uint64,
    cc_sp_output_hash: bytes32,
) -> uint64:
    """
    Calculates the number of iterations from the quality. This is derives as the difficulty times the constant factor
    times a random number between 0 and 1 (based on quality string), divided by plot size.
    """
    sp_quality_string: bytes32 = std_hash(quality_string + cc_sp_output_hash)

    iters = uint64(
        int(difficulty) * int(difficulty_constant_factor) *
        int.from_bytes(sp_quality_string, "big", signed=False) //
        (int(pow(2, 256)) * int(_expected_plot_size(size))))
    return max(iters, uint64(1))
Exemplo n.º 4
0
def load_plots(
    provers: Dict[Path, PlotInfo],
    failed_to_open_filenames: Dict[Path, int],
    farmer_public_keys: Optional[List[G1Element]],
    pool_public_keys: Optional[List[G1Element]],
    match_str: Optional[str],
    root_path: Path,
    open_no_key_filenames=False,
) -> Tuple[bool, Dict[Path, PlotInfo], Dict[Path, int], Set[Path]]:
    start_time = time.time()
    config_file = load_config(root_path, "config.yaml", "harvester")
    changed = False
    no_key_filenames: Set[Path] = set()
    log.info(f'Searching directories {config_file["plot_directories"]}')

    plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(config_file)
    all_filenames: List[Path] = []
    for paths in plot_filenames.values():
        all_filenames += paths
    total_size = 0
    new_provers: Dict[Path, PlotInfo] = {}

    if match_str is not None:
        log.info(
            f'Only loading plots that contain "{match_str}" in the file or directory name'
        )

    for filename in all_filenames:
        filename_str = str(filename)
        if match_str is not None and match_str not in filename_str:
            continue
        if filename.exists():
            if filename in failed_to_open_filenames and (
                    time.time() - failed_to_open_filenames[filename]) < 1200:
                # Try once every 20 minutes to open the file
                continue
            if filename in provers:
                stat_info = filename.stat()
                if stat_info.st_mtime == provers[filename].time_modified:
                    total_size += stat_info.st_size
                    new_provers[filename] = provers[filename]
                    continue
            try:
                prover = DiskProver(str(filename))
                expected_size = _expected_plot_size(
                    prover.get_size()) * UI_ACTUAL_SPACE_CONSTANT_FACTOR / 2.0
                stat_info = filename.stat()

                # TODO: consider checking if the file was just written to (which would mean that the file is still
                # being copied). A segfault might happen in this edge case.

                if prover.get_size(
                ) >= 30 and stat_info.st_size < 0.98 * expected_size:
                    log.warning(
                        f"Not farming plot {filename}. Size is {stat_info.st_size / (1024**3)} GiB, but expected"
                        f" at least: {expected_size / (1024 ** 3)} GiB. We assume the file is being copied."
                    )
                    continue

                (
                    pool_public_key,
                    farmer_public_key,
                    local_master_sk,
                ) = parse_plot_info(prover.get_memo())
                # Only use plots that correct keys associated with them
                if farmer_public_keys is not None and farmer_public_key not in farmer_public_keys:
                    log.warning(
                        f"Plot {filename} has a farmer public key that is not in the farmer's pk list."
                    )
                    no_key_filenames.add(filename)
                    if not open_no_key_filenames:
                        continue

                if pool_public_keys is not None and pool_public_key not in pool_public_keys:
                    log.warning(
                        f"Plot {filename} has a pool public key that is not in the farmer's pool pk list."
                    )
                    no_key_filenames.add(filename)
                    if not open_no_key_filenames:
                        continue

                stat_info = filename.stat()
                local_sk = master_sk_to_local_sk(local_master_sk)
                plot_public_key: G1Element = ProofOfSpace.generate_plot_public_key(
                    local_sk.get_g1(), farmer_public_key)
                new_provers[filename] = PlotInfo(
                    prover,
                    pool_public_key,
                    farmer_public_key,
                    plot_public_key,
                    local_sk,
                    stat_info.st_size,
                    stat_info.st_mtime,
                )
                total_size += stat_info.st_size
                changed = True
            except Exception as e:
                tb = traceback.format_exc()
                log.error(f"Failed to open file {filename}. {e} {tb}")
                failed_to_open_filenames[filename] = int(time.time())
                continue
            log.info(
                f"Found plot {filename} of size {new_provers[filename].prover.get_size()}"
            )

    log.info(
        f"Loaded a total of {len(new_provers)} plots of size {total_size / (1024 ** 4)} TiB, in"
        f" {time.time()-start_time} seconds")
    return changed, new_provers, failed_to_open_filenames, no_key_filenames