async def request_signatures( self, request: harvester_protocol.RequestSignatures): """ The farmer requests a signature on the header hash, for one of the proofs that we found. A signature is created on the header hash using the harvester private key. This can also be used for pooling. """ plot_filename = Path(request.plot_identifier[64:]).resolve() try: plot_info = self.harvester.provers[plot_filename] except KeyError: self.harvester.log.warning( f"KeyError plot {plot_filename} does not exist.") return # Look up local_sk from plot to save locked memory ( pool_public_key_or_puzzle_hash, farmer_public_key, local_master_sk, ) = parse_plot_info(plot_info.prover.get_memo()) local_sk = master_sk_to_local_sk(local_master_sk) agg_pk = ProofOfSpace.generate_plot_public_key(local_sk.get_g1(), farmer_public_key) # This is only a partial signature. When combined with the farmer's half, it will # form a complete PrependSignature. message_signatures: List[Tuple[bytes32, G2Element]] = [] for message in request.messages: signature: G2Element = AugSchemeMPL.sign(local_sk, message, agg_pk) message_signatures.append((message, signature)) response: harvester_protocol.RespondSignatures = harvester_protocol.RespondSignatures( request.plot_identifier, request.challenge_hash, request.sp_hash, local_sk.get_g1(), farmer_public_key, message_signatures, ) return make_msg(ProtocolMessageTypes.respond_signatures, response)
def blocking_lookup(filename: Path, plot_info: PlotInfo) -> List[Tuple[bytes32, ProofOfSpace]]: # Uses the DiskProver object to lookup qualities. This is a blocking call, # so it should be run in a thread pool. try: sp_challenge_hash = ProofOfSpace.calculate_pos_challenge( plot_info.prover.get_id(), new_challenge.challenge_hash, new_challenge.sp_hash, ) try: quality_strings = plot_info.prover.get_qualities_for_challenge(sp_challenge_hash) except Exception as e: self.harvester.log.error(f"Error using prover object {e}") return [] responses: List[Tuple[bytes32, ProofOfSpace]] = [] if quality_strings is not None: # Found proofs of space (on average 1 is expected per plot) for index, quality_str in enumerate(quality_strings): required_iters: uint64 = calculate_iterations_quality( self.harvester.constants.DIFFICULTY_CONSTANT_FACTOR, quality_str, plot_info.prover.get_size(), new_challenge.difficulty, new_challenge.sp_hash, ) sp_interval_iters = calculate_sp_interval_iters( self.harvester.constants, new_challenge.sub_slot_iters ) if required_iters < sp_interval_iters: # Found a very good proof of space! will fetch the whole proof from disk, # then send to farmer try: proof_xs = plot_info.prover.get_full_proof(sp_challenge_hash, index) except RuntimeError: self.harvester.log.error(f"Exception fetching full proof for {filename}") continue # Look up local_sk from plot to save locked memory ( pool_public_key_or_puzzle_hash, farmer_public_key, local_master_sk, ) = parse_plot_info(plot_info.prover.get_memo()) local_sk = master_sk_to_local_sk(local_master_sk) plot_public_key = ProofOfSpace.generate_plot_public_key( local_sk.get_g1(), farmer_public_key ) responses.append( ( quality_str, ProofOfSpace( sp_challenge_hash, plot_info.pool_public_key, plot_info.pool_contract_puzzle_hash, plot_public_key, uint8(plot_info.prover.get_size()), proof_xs, ), ) ) return responses except Exception as e: self.harvester.log.error(f"Unknown error: {e}") return []
def check_plots(root_path, num, challenge_start, grep_string, list_duplicates, debug_show_memo): config = load_config(root_path, "config.yaml") if num is not None: if num == 0: log.warning("Not opening plot files") else: if num < 5: log.warning( f"{num} challenges is too low, setting it to the minimum of 5" ) num = 5 if num < 30: log.warning( "Use 30 challenges (our default) for balance of speed and accurate results" ) else: num = 30 if challenge_start is not None: num_start = challenge_start num_end = num_start + num else: num_start = 0 num_end = num challenges = num_end - num_start if grep_string is not None: match_str = grep_string else: match_str = None if list_duplicates: log.warning("Checking for duplicate Plot IDs") log.info("Plot filenames expected to end with -[64 char plot ID].plot") show_memo: bool = debug_show_memo if list_duplicates: plot_filenames: Dict[Path, List[Path]] = get_plot_filenames( config["harvester"]) all_filenames: List[Path] = [] for paths in plot_filenames.values(): all_filenames += paths find_duplicate_plot_IDs(all_filenames) if num == 0: return v = Verifier() log.info("Loading plots in config.yaml using plot_tools loading code\n") kc: Keychain = Keychain() pks = [ master_sk_to_farmer_sk(sk).get_g1() for sk, _ in kc.get_all_private_keys() ] pool_public_keys = [ G1Element.from_bytes(bytes.fromhex(pk)) for pk in config["farmer"]["pool_public_keys"] ] _, provers, failed_to_open_filenames, no_key_filenames = load_plots( {}, {}, pks, pool_public_keys, match_str, show_memo, root_path, open_no_key_filenames=True, ) if len(provers) > 0: log.info("") log.info("") log.info(f"Starting to test each plot with {num} challenges each\n") total_good_plots: Counter = Counter() total_bad_plots = 0 total_size = 0 bad_plots_list: List[Path] = [] for plot_path, plot_info in provers.items(): pr = plot_info.prover log.info(f"Testing plot {plot_path} k={pr.get_size()}") log.info(f"\tPool public key: {plot_info.pool_public_key}") # Look up local_sk from plot to save locked memory ( pool_public_key_or_puzzle_hash, farmer_public_key, local_master_sk, ) = parse_plot_info(pr.get_memo()) local_sk = master_sk_to_local_sk(local_master_sk) log.info(f"\tFarmer public key: {farmer_public_key}") log.info(f"\tLocal sk: {local_sk}") total_proofs = 0 caught_exception: bool = False for i in range(num_start, num_end): challenge = std_hash(i.to_bytes(32, "big")) # Some plot errors cause get_qualities_for_challenge to throw a RuntimeError try: for index, quality_str in enumerate( pr.get_qualities_for_challenge(challenge)): # Other plot errors cause get_full_proof or validate_proof to throw an AssertionError try: proof = pr.get_full_proof(challenge, index) total_proofs += 1 ver_quality_str = v.validate_proof( pr.get_id(), pr.get_size(), challenge, proof) assert quality_str == ver_quality_str except AssertionError as e: log.error( f"{type(e)}: {e} error in proving/verifying for plot {plot_path}" ) caught_exception = True except BaseException as e: if isinstance(e, KeyboardInterrupt): log.warning("Interrupted, closing") return log.error( f"{type(e)}: {e} error in getting challenge qualities for plot {plot_path}" ) caught_exception = True if caught_exception is True: break if total_proofs > 0 and caught_exception is False: log.info( f"\tProofs {total_proofs} / {challenges}, {round(total_proofs/float(challenges), 4)}" ) total_good_plots[pr.get_size()] += 1 total_size += plot_path.stat().st_size else: total_bad_plots += 1 log.error( f"\tProofs {total_proofs} / {challenges}, {round(total_proofs/float(challenges), 4)}" ) bad_plots_list.append(plot_path) log.info("") log.info("") log.info("Summary") total_plots: int = sum(list(total_good_plots.values())) log.info( f"Found {total_plots} valid plots, total size {total_size / (1024 * 1024 * 1024 * 1024):.5f} TiB" ) for (k, count) in sorted(dict(total_good_plots).items()): log.info(f"{count} plots of size {k}") grand_total_bad = total_bad_plots + len(failed_to_open_filenames) if grand_total_bad > 0: log.warning(f"{grand_total_bad} invalid plots found:") for bad_plot_path in bad_plots_list: log.warning(f"{bad_plot_path}") if len(no_key_filenames) > 0: log.warning( f"There are {len(no_key_filenames)} plots with a farmer or pool public key that " f"is not on this machine. The farmer private key must be in the keychain in order to " f"farm them, use 'chia keys' to transfer keys. The pool public keys must be in the config.yaml" )