def aggregate_pub_keys(_keys): try: keys = list(map(lambda x: PublicKey.from_bytes(x), _keys)) except RuntimeError: raise ValueError("Bad public key") return PublicKey.aggregate(keys).serialize()
def reply_commit(blockchain, logger, values): logger.debug("in reply_commit fxn") signature = values.get('tc_signed') node_address = values.get('address') if signature is None or node_address is None: return jsonify("Error: invalid json received, Bad request"), 400 if node_address not in blockchain.public_key_list: return jsonify("Bad request"), 400 signature = BLS.deserialize(signature, Signature) hash_of_priority_block = blockchain.proposed_block.get_hash() temp_array = [] for c in hash_of_priority_block: temp_array.append(ord(c)) msg = bytes(temp_array) signature.set_aggregation_info( AggregationInfo.from_msg( PublicKey.from_bytes( bytes(blockchain.public_key_list[node_address], "ISO-8859-1")), msg)) verify_sign = signature.verify() if verify_sign: logger.debug("reply commit signature verified") blockchain.commit_accepted[node_address] = signature # print("commit accepted by ", len(blockchain.commit_accepted)) return jsonify("True"), 200 else: logger.warning("reply commit signature tempered") return jsonify("False"), 300
async def harvester_handshake( self, harvester_handshake: harvester_protocol.HarvesterHandshake): """ Handshake between the harvester and farmer. The harvester receives the pool public keys, which must be put into the plots, before the plotting process begins. We cannot use any plots which don't have one of the pool keys. """ for partial_filename, plot_config in self.plot_config["plots"].items(): if "plot_root" in self.config: filename = os.path.join(self.config["plot_root"], partial_filename) else: filename = os.path.join(ROOT_DIR, "plots", partial_filename) pool_pubkey = PublicKey.from_bytes( bytes.fromhex(plot_config["pool_pk"])) # Only use plots that correct pools associated with them if pool_pubkey in harvester_handshake.pool_pubkeys: if os.path.isfile(filename): self.provers[partial_filename] = DiskProver(filename) else: log.warn(f"Plot at {filename} does not exist.") else: log.warning( f"Plot {filename} has a pool key that is not in the farmer's pool_pk list." )
async def get_derivation_record( self, index: uint32, wallet_id: uint32) -> Optional[DerivationRecord]: """ Returns the derivation record by index and wallet id. """ cursor = await self.db_connection.execute( "SELECT * FROM derivation_paths WHERE derivation_index=? and wallet_id=?;", ( index, wallet_id, ), ) row = await cursor.fetchone() await cursor.close() if row is not None and row[0] is not None: return DerivationRecord( row[0], bytes.fromhex(row[2]), PublicKey.from_bytes(bytes.fromhex(row[1])), row[3], row[4], ) return None
async def get_pool_pks_hack(self) -> List[Tuple[uint32, PublicKey]]: # TODO: this API call is a hack to allow us to see block winners. Replace with coin/UTXU set. cursor = await self.db.execute("SELECT * from small_header_blocks") rows = await cursor.fetchall() return [( SmallHeaderBlock.from_bytes(row[3]).height, PublicKey.from_bytes(bytes.fromhex(row[2])), ) for row in rows]
async def add_plot(self, request: Dict) -> Dict: filename = request["filename"] if "pool_pk" in request: pool_pk = PublicKey.from_bytes(bytes.fromhex(request["pool_pk"])) else: pool_pk = None plot_sk = PrivateKey.from_bytes(bytes.fromhex(request["plot_sk"])) success = self.service._add_plot(filename, plot_sk, pool_pk) return {"success": success}
def main(): """ Script for checking all plots in the plots.yaml file. Specify a number of challenge to test for each plot. """ parser = argparse.ArgumentParser(description="Chia plot checking script.") parser.add_argument("-n", "--num", help="Number of challenges", type=int, default=1000) args = parser.parse_args() v = Verifier() if os.path.isfile(plot_config_filename): plot_config = safe_load(open(plot_config_filename, "r")) for plot_filename, plot_info in plot_config["plots"].items(): plot_seed: bytes32 = ProofOfSpace.calculate_plot_seed( PublicKey.from_bytes(bytes.fromhex(plot_info["pool_pk"])), PrivateKey.from_bytes(bytes.fromhex( plot_info["sk"])).get_public_key(), ) if not os.path.isfile(plot_filename): # Tries relative path full_path: str = os.path.join(plot_root, plot_filename) if not os.path.isfile(full_path): # Tries absolute path full_path: str = plot_filename if not os.path.isfile(full_path): print(f"Plot file {full_path} not found.") continue pr = DiskProver(full_path) else: pr = DiskProver(plot_filename) total_proofs = 0 try: for i in range(args.num): challenge = sha256(i.to_bytes(32, "big")).digest() for index, quality in enumerate( pr.get_qualities_for_challenge(challenge)): proof = pr.get_full_proof(challenge, index) total_proofs += 1 ver_quality = v.validate_proof(plot_seed, pr.get_size(), challenge, proof) assert quality == ver_quality except BaseException as e: print( f"{type(e)}: {e} error in proving/verifying for plot {plot_filename}" ) print( f"{plot_filename}: Proofs {total_proofs} / {args.num}, {round(total_proofs/float(args.num), 4)}" ) else: print(f"Not plot file found at {plot_config_filename}")
def set_sender_values(self, AP_puzzlehash, a_pubkey_used): if isinstance(AP_puzzlehash, str): self.AP_puzzlehash = puzzlehash_from_string(AP_puzzlehash) else: self.AP_puzzlehash = AP_puzzlehash if isinstance(a_pubkey_used, str): a_pubkey = PublicKey.from_bytes(bytes.fromhex(a_pubkey_used)) self.a_pubkey = a_pubkey else: self.a_pubkey = a_pubkey_used
def __init__( self, farmer_config: Dict, pool_config: Dict, keychain: Keychain, override_constants={}, ): self.config = farmer_config self.harvester_responses_header_hash: Dict[bytes32, bytes32] = {} self.harvester_responses_challenge: Dict[bytes32, bytes32] = {} self.harvester_responses_proofs: Dict[bytes32, ProofOfSpace] = {} self.harvester_responses_proof_hash_to_qual: Dict[bytes32, bytes32] = {} self.challenges: Dict[ uint128, List[farmer_protocol.ProofOfSpaceFinalized]] = {} self.challenge_to_weight: Dict[bytes32, uint128] = {} self.challenge_to_height: Dict[bytes32, uint32] = {} self.challenge_to_best_iters: Dict[bytes32, uint64] = {} self.challenge_to_estimates: Dict[bytes32, List[float]] = {} self.seen_challenges: Set[bytes32] = set() self.unfinished_challenges: Dict[uint128, List[bytes32]] = {} self.current_weight: uint128 = uint128(0) self.proof_of_time_estimate_ips: uint64 = uint64(10000) self.constants = consensus_constants.copy() self._shut_down = False self.server = None self.keychain = keychain self.state_changed_callback: Optional[Callable] = None # This is the farmer configuration self.wallet_target = bytes.fromhex( self.config["xch_target_puzzle_hash"]) self.pool_public_keys = [ PublicKey.from_bytes(bytes.fromhex(pk)) for pk in self.config["pool_public_keys"] ] # This is the pool configuration, which should be moved out to the pool once it exists self.pool_target = bytes.fromhex(pool_config["xch_target_puzzle_hash"]) self.pool_sks = [ sk.get_private_key() for (sk, _) in self.keychain.get_all_private_keys() ] self.pool_sks_map: Dict = {} for key in self.pool_sks: self.pool_sks_map[bytes(key.get_public_key())] = key assert len(self.wallet_target) == 32 assert len(self.pool_target) == 32 if len(self.pool_sks) == 0: error_str = "No keys exist. Please run 'chia keys generate' or open the UI." raise RuntimeError(error_str) for key, value in override_constants.items(): self.constants[key] = value
def verified_commit(blockchain, logger, values): block_hash = values.get('block') if block_hash is None: return jsonify("tempered Data"), 401 block = blockchain.commit_verified_list.get(block_hash) if block is None: return jsonify("verification block missing"), 402 signers = values.get('n_list') co_sig = values.get('co_sig') if (signers is None) or (co_sig is None): return jsonify("tempered block data"), 403 co_sig = BLS.deserialize(co_sig, Signature) flag = block.verify_block(blockchain) if not flag: return jsonify("invalid block!"), 301 node_address = block.harvester block_hash_hexdigest = block.get_hash() if node_address in blockchain.public_key_list: if len(signers) / len(blockchain.public_key_list) > 0.66: temp_array = [] for c in block_hash_hexdigest: temp_array.append(ord(c)) msg = bytes(temp_array) agg_info_list = [] for node in signers: if node in blockchain.public_key_list: agg_info = AggregationInfo.from_msg( PublicKey.from_bytes( bytes(blockchain.public_key_list[node], "ISO-8859-1")), msg) agg_info_list.append(agg_info) else: return jsonify("BlockChain couldn't updated "), 302 agg_public_key = AggregationInfo.merge_infos(agg_info_list) co_sig.set_aggregation_info(agg_public_key) verify_signature = co_sig.verify() if verify_signature: logger.debug("hey you verified commit block" + block.get_hash()) block.signers = signers block.signature = values.get('co_sig') blockchain.update_blockchain(block) return jsonify("BlockChain should be updated "), 200 else: return jsonify("BlockChain couldn't updated "), 303 else: logger.warning("you didn't get majority") return jsonify("BlockChain couldn't updated "), 304 logger.debug("node address didn't exists") return jsonify("BlockChain couldn't updated "), 305
async def request_proof_of_space( self, request: harvester_protocol.RequestProofOfSpace ): """ The farmer requests a signature on the header hash, for one of the proofs that we found. We look up the correct plot based on the quality, lookup the proof, and return it. """ response: Optional[harvester_protocol.RespondProofOfSpace] = None try: # Using the quality string, find the right plot and index from our solutions challenge_hash, filename, index = self.challenge_hashes[ request.quality_string ] except KeyError: log.warning(f"Quality string {request.quality_string} not found") return if index is not None: proof_xs: bytes try: try: proof_xs = self.provers[filename].get_full_proof( challenge_hash, index ) except RuntimeError: self.provers[filename] = DiskProver(str(filename)) proof_xs = self.provers[filename].get_full_proof( challenge_hash, index ) except KeyError: log.warning(f"KeyError plot {filename} does not exist.") pool_pubkey = PublicKey.from_bytes( bytes.fromhex(self.plot_config["plots"][filename]["pool_pk"]) ) plot_pubkey = PrivateKey.from_bytes( bytes.fromhex(self.plot_config["plots"][filename]["sk"]) ).get_public_key() proof_of_space: ProofOfSpace = ProofOfSpace( challenge_hash, pool_pubkey, plot_pubkey, uint8(self.provers[filename].get_size()), proof_xs, ) response = harvester_protocol.RespondProofOfSpace( request.quality_string, proof_of_space ) if response: yield OutboundMessage( NodeType.FARMER, Message("respond_proof_of_space", response), Delivery.RESPOND, )
def verify_sig(msg, _pub_key, _sig): try: sig = Signature.from_bytes(_sig) except RuntimeError: raise ValueError("Bad signature") try: pub_key = PublicKey.from_bytes(_pub_key) except RuntimeError: raise ValueError("Bad public key") sig.set_aggregation_info(AggregationInfo.from_msg(pub_key, msg)) ok = sig.verify() return ok
def verify_signature(self): temp_array = [] msg = self.sender + self.to + str(self.amount) + str(self.timestamp) for c in msg: temp_array.append(ord(c)) msg = bytes(temp_array) _signature = bytes(self.signature, "ISO-8859-1") _signature = Signature.from_bytes(_signature) public_key = PublicKey.from_bytes(bytes(self.sender, "ISO-8859-1")) _signature.set_aggregation_info( AggregationInfo.from_msg(public_key, msg)) return _signature.verify()
def load_plots( config_file: Dict, plot_config_file: Dict, pool_pubkeys: Optional[List[PublicKey]], root_path: Path, ) -> Tuple[Dict[str, DiskProver], List[str], List[str]]: provers: Dict[str, DiskProver] = {} failed_to_open_filenames: List[str] = [] not_found_filenames: List[str] = [] for partial_filename_str, plot_config in plot_config_file["plots"].items(): plot_root = path_from_root(root_path, config_file.get("plot_root", ".")) partial_filename = plot_root / partial_filename_str potential_filenames = [ partial_filename, path_from_root(plot_root, partial_filename_str), ] pool_pubkey = PublicKey.from_bytes( bytes.fromhex(plot_config["pool_pk"])) # Only use plots that correct pools associated with them if pool_pubkeys is not None and pool_pubkey not in pool_pubkeys: log.warning( f"Plot {partial_filename} has a pool key that is not in the farmer's pool_pk list." ) continue found = False failed_to_open = False for filename in potential_filenames: if filename.exists(): try: provers[partial_filename_str] = DiskProver(str(filename)) except Exception as e: log.error(f"Failed to open file {filename}. {e}") failed_to_open = True failed_to_open_filenames.append(partial_filename_str) break log.info( f"Loaded plot {filename} of size {provers[partial_filename_str].get_size()}" ) found = True break if not found and not failed_to_open: log.warning(f"Plot at {potential_filenames} does not exist.") not_found_filenames.append(partial_filename_str) return (provers, failed_to_open_filenames, not_found_filenames)
def deserialize(_obj, _type): try: if (type(_obj) != PrivateKey) or (type(_obj) != PublicKey) or (type(_obj) != Signature): _obj = bytes(_obj, "ISO-8859-1") if _type == PrivateKey: return PrivateKey.from_bytes(_obj) elif _type == PublicKey: return PublicKey.from_bytes(_obj) elif _type == Signature: return Signature.from_bytes(_obj) else: _obj except Exception as e: print("given parameters compatibility error") print(e) return None
async def spend_rl_coin(wallet, ledger_api): if wallet.rl_available_balance() == 0: print("Available rate limited coin balance is 0!") return receiver_pubkey = input("Enter receiver's pubkey: 0x") receiver_pubkey = PublicKey.from_bytes(bytes.fromhex(receiver_pubkey)).serialize() amount = -1 while amount > wallet.current_rl_balance or amount < 0: amount = input("Enter amount to give recipient: ") if amount == "q": return if not amount.isdigit(): amount = -1 amount = int(amount) puzzlehash = wallet.get_puzzlehash_for_pk(receiver_pubkey) spend_bundle = wallet.rl_generate_signed_transaction(amount, puzzlehash) _ = await ledger_api.push_tx(tx=spend_bundle)
def _get_plots(self) -> Tuple[List[Dict], List[str], List[str]]: response_plots: List[Dict] = [] for path, prover in self.provers.items(): plot_pk = PrivateKey.from_bytes( bytes.fromhex( self.plot_config["plots"][path]["sk"])).get_public_key() pool_pk = PublicKey.from_bytes( bytes.fromhex(self.plot_config["plots"][path]["pool_pk"])) response_plots.append({ "filename": str(path), "size": prover.get_size(), "plot-seed": prover.get_id(), "memo": prover.get_memo(), "plot_pk": bytes(plot_pk), "pool_pk": bytes(pool_pk), }) return (response_plots, self.failed_to_open_filenames, self.not_found_filenames)
async def harvester_handshake( self, harvester_handshake: harvester_protocol.HarvesterHandshake): """ Handshake between the harvester and farmer. The harvester receives the pool public keys, which must be put into the plots, before the plotting process begins. We cannot use any plots which don't have one of the pool keys. """ for partial_filename_str, plot_config in self.plot_config[ "plots"].items(): plot_root = path_from_root(DEFAULT_ROOT_PATH, self.config.get("plot_root", ".")) partial_filename = plot_root / partial_filename_str potential_filenames = [ partial_filename, path_from_root(plot_root, partial_filename_str), ] pool_pubkey = PublicKey.from_bytes( bytes.fromhex(plot_config["pool_pk"])) # Only use plots that correct pools associated with them if pool_pubkey not in harvester_handshake.pool_pubkeys: log.warning( f"Plot {partial_filename} has a pool key that is not in the farmer's pool_pk list." ) continue found = False failed_to_open = False for filename in potential_filenames: if filename.exists(): try: self.provers[partial_filename_str] = DiskProver( str(filename)) except ValueError: log.error(f"Failed to open file {filename}.") failed_to_open = True break log.info( f"Farming plot {filename} of size {self.provers[partial_filename_str].get_size()}" ) found = True break if not found and not failed_to_open: log.warning(f"Plot at {potential_filenames} does not exist.")
def test2(): seed = bytes([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) seed2 = bytes([1, 20, 102, 229, 1, 157]) sk = PrivateKey.from_seed(seed) sk_cp = PrivateKey.from_seed(seed) sk2 = PrivateKey.from_seed(seed2) pk = sk.get_public_key() pk2 = sk2.get_public_key() assert (sk == sk_cp) assert (sk != sk2) assert (pk.get_fingerprint() == 0xddad59bb) sk2_ser = sk2.serialize() pk2_ser = pk2.serialize() pk2_copy = PublicKey.from_bytes(pk2_ser) assert (pk2 == pk2_copy) assert (pk != pk2) assert (len(pk2_ser) == 48) assert (len(sk2_ser) == 32) message = bytes("this is the message", "utf-8") sig = sk.sign(message) sig_ser = sig.serialize() sig_cp = Signature.from_bytes(sig_ser) a1 = AggregationInfo.from_msg(pk, message) sig_cp.set_aggregation_info(a1) a2 = sig_cp.get_aggregation_info() assert (a1 == a2) sig2 = sk2.sign(message) assert (len(sig_ser) == 96) assert (sig != sig2) assert (sig == sig_cp) sig_agg = Signature.aggregate([sig, sig2]) result = sig_cp.verify() result2 = sig2.verify() result3 = sig_agg.verify() assert (result) assert (result2) assert (result3) sk2 = sk
def verify_block_signature(self, blockchain): if type(self.txn) == list: txn_hash = self.txn txn_hash.sort() elif type(self.txn) == dict: txn_hash = list(self.txn.keys()) txn_hash.sort() else: return False msg = dumps({ "index": self.index, "harvester": self.harvester, "previous_hash": self.previous_hash, "txn": txn_hash, "signature": "", "signers": "", "timestamp": self.timestamp, }) temp_array = [] for c in msg: temp_array.append(ord(c)) msg = bytes(temp_array) if type(self.signature) == Signature: pass else: self.signature = BLS.deserialize(self.signature, Signature) agg_info_list = [] for node in self.signers: if node in blockchain.public_key_list: agg_info = AggregationInfo.from_msg( PublicKey.from_bytes( bytes(blockchain.public_key_list[node], "ISO-8859-1")), msg) agg_info_list.append(agg_info) else: return False agg_public_key = AggregationInfo.merge_infos(agg_info_list) self.signature.set_aggregation_info(agg_public_key) return self.signature.verify()
async def get_derivation_record_for_puzzle_hash( self, puzzle_hash: str) -> Optional[DerivationRecord]: """ Returns the derivation record by index and wallet id. """ cursor = await self.db_connection.execute( "SELECT * FROM derivation_paths WHERE puzzle_hash=?;", (puzzle_hash, ), ) row = await cursor.fetchone() await cursor.close() if row is not None and row[0] is not None: return DerivationRecord( row[0], bytes.fromhex(row[2]), PublicKey.from_bytes(bytes.fromhex(row[1])), row[3], row[4], ) return None
def load_plots(config_file: Dict, plot_config_file: Dict, pool_pubkeys: List[PublicKey]) -> Dict[Path, DiskProver]: provers: Dict[Path, DiskProver] = {} for partial_filename_str, plot_config in plot_config_file["plots"].items(): plot_root = path_from_root(DEFAULT_ROOT_PATH, config_file.get("plot_root", ".")) partial_filename = plot_root / partial_filename_str potential_filenames = [ partial_filename, path_from_root(plot_root, partial_filename_str), ] pool_pubkey = PublicKey.from_bytes( bytes.fromhex(plot_config["pool_pk"])) # Only use plots that correct pools associated with them if pool_pubkey not in pool_pubkeys: log.warning( f"Plot {partial_filename} has a pool key that is not in the farmer's pool_pk list." ) continue found = False failed_to_open = False for filename in potential_filenames: if filename.exists(): try: provers[partial_filename_str] = DiskProver(str(filename)) except ValueError as e: log.error(f"Failed to open file {filename}. {e}") failed_to_open = True break log.info( f"Loaded plot {filename} of size {provers[partial_filename_str].get_size()}" ) found = True break if not found and not failed_to_open: log.warning(f"Plot at {potential_filenames} does not exist.") return provers
def verify_offline_block_signature(self, blockchain): if type(self.txn) == list: txn_hash = self.txn txn_hash.sort() elif type(self.txn) == dict: txn_hash = list(self.txn.keys()) txn_hash.sort() else: return False block = copy(self) block.signature = "" block.signers = "" block.txn = txn_hash msg = block.get_hash() temp_array = [] for c in msg: temp_array.append(ord(c)) msg = bytes(temp_array) if type(self.signature) == Signature: pass else: self.signature = BLS.deserialize(self.signature, Signature) agg_info_list = [] for node in self.signers: if node in blockchain.public_key_list: agg_info = AggregationInfo.from_msg( PublicKey.from_bytes( bytes(blockchain.public_key_list[node], "ISO-8859-1")), msg) agg_info_list.append(agg_info) else: return False agg_public_key = AggregationInfo.merge_infos(agg_info_list) self.signature.set_aggregation_info(agg_public_key) return self.signature.verify()
async def harvester_handshake( self, harvester_handshake: harvester_protocol.HarvesterHandshake ): """ Handshake between the harvester and farmer. The harvester receives the pool public keys, which must be put into the plots, before the plotting process begins. We cannot use any plots which don't have one of the pool keys. """ for partial_filename, plot_config in self.plot_config["plots"].items(): potential_filenames = [partial_filename] if "plot_root" in self.config: potential_filenames.append( os.path.join(self.config["plot_root"], partial_filename) ) else: potential_filenames.append( os.path.join(ROOT_DIR, "plots", partial_filename) ) pool_pubkey = PublicKey.from_bytes(bytes.fromhex(plot_config["pool_pk"])) # Only use plots that correct pools associated with them if pool_pubkey not in harvester_handshake.pool_pubkeys: log.warning( f"Plot {partial_filename} has a pool key that is not in the farmer's pool_pk list." ) continue found = False for filename in potential_filenames: if os.path.isfile(filename): self.provers[partial_filename] = DiskProver(filename) log.info( f"Farming plot {filename} of size {self.provers[partial_filename].get_size()}" ) found = True break if not found: log.warning(f"Plot at {potential_filenames} does not exist.")
async def create_rl_coin(wallet, ledger_api): utxo_list = list(wallet.my_utxos) if len(utxo_list) == 0: print("No UTXOs available.") return print("Select UTXO for origin: ") num = 0 for utxo in utxo_list: print(f"{num}) coin_name:{utxo.name()} amount:{utxo.amount}") num += 1 selected = get_int("Select UTXO for origin: ") origin = utxo_list[selected] print("Rate limit is defined as amount of Chia per time interval.(Blocks)\n") rate = get_int("Specify the Chia amount limit: ") interval = get_int("Specify the interval length (blocks): ") print("Specify the pubkey of receiver") pubkey = input(prompt) my_pubkey = hexbytes(wallet.get_next_public_key().serialize()) send_amount = get_int("Enter amount to give recipient: ") print(f"\n\nInitialization string: {origin.parent_coin_info}:{origin.puzzle_hash}:" f"{origin.amount}:{origin.name()}:{rate}:{interval}:{my_pubkey}") print("\nPaste Initialization string to the receiver") print("Press Enter to continue:") input(prompt) pubkey = PublicKey.from_bytes(bytes.fromhex(pubkey)).serialize() rl_puzzle = wallet.rl_puzzle_for_pk(pubkey, rate, interval, origin.name(), my_pubkey) rl_puzzlehash = ProgramHash(rl_puzzle) wallet.clawback_puzzlehash = rl_puzzlehash wallet.clawback_origin = origin.name() wallet.clawback_limit = rate wallet.clawback_interval = interval wallet.clawback_pk = my_pubkey wallet.rl_receiver_pk = pubkey spend_bundle = wallet.generate_signed_transaction_with_origin(send_amount, rl_puzzlehash, origin.name()) _ = await ledger_api.push_tx(tx=spend_bundle)
def main(): """ Script for creating plots and adding them to the plot config file. """ root_path = DEFAULT_ROOT_PATH plot_config_filename = config_path_for_filename(root_path, "plots.yaml") key_config_filename = config_path_for_filename(root_path, "keys.yaml") parser = argparse.ArgumentParser(description="Chia plotting script.") parser.add_argument("-k", "--size", help="Plot size", type=int, default=20) parser.add_argument("-n", "--num_plots", help="Number of plots", type=int, default=10) parser.add_argument("-i", "--index", help="First plot index", type=int, default=0) parser.add_argument("-p", "--pool_pub_key", help="Hex public key of pool", type=str, default="") parser.add_argument( "-t", "--tmp_dir", help= "Temporary directory for plotting files (relative to final directory)", type=Path, default=Path("./plots.tmp"), ) new_plots_root = path_from_root( root_path, load_config(root_path, "config.yaml").get("harvester", {}).get("new_plot_root", "plots"), ) parser.add_argument( "-d", "--final_dir", help="Final directory for plots (relative or absolute)", type=Path, default=new_plots_root, ) # We need the keys file, to access pool keys (if the exist), and the sk_seed. args = parser.parse_args() if not key_config_filename.exists(): raise RuntimeError("Keys not generated. Run chia-generate-keys") # The seed is what will be used to generate a private key for each plot key_config = load_config(root_path, key_config_filename) sk_seed: bytes = bytes.fromhex(key_config["sk_seed"]) pool_pk: PublicKey if len(args.pool_pub_key) > 0: # Use the provided pool public key, useful for using an external pool pool_pk = PublicKey.from_bytes(bytes.fromhex(args.pool_pub_key)) else: # Use the pool public key from the config, useful for solo farming pool_sk = PrivateKey.from_bytes( bytes.fromhex(key_config["pool_sks"][0])) pool_pk = pool_sk.get_public_key() print( f"Creating {args.num_plots} plots, from index {args.index} to " f"{args.index + args.num_plots - 1}, of size {args.size}, sk_seed {sk_seed.hex()} ppk {pool_pk}" ) tmp_dir = args.final_dir / args.tmp_dir mkdir(tmp_dir) mkdir(args.final_dir) for i in range(args.index, args.index + args.num_plots): # Generate a sk based on the seed, plot size (k), and index sk: PrivateKey = PrivateKey.from_seed(sk_seed + args.size.to_bytes(1, "big") + i.to_bytes(4, "big")) # The plot seed is based on the pool and plot pks plot_seed: bytes32 = ProofOfSpace.calculate_plot_seed( pool_pk, sk.get_public_key()) filename: str = f"plot-{i}-{args.size}-{plot_seed}.dat" full_path: Path = args.final_dir / filename if not full_path.exists(): # Creates the plot. This will take a long time for larger plots. plotter: DiskPlotter = DiskPlotter() plotter.create_plot_disk( str(tmp_dir), str(args.final_dir), filename, args.size, bytes([]), plot_seed, ) else: print(f"Plot {filename} already exists") # Updates the config if necessary. plot_config = load_config(root_path, plot_config_filename) plot_config_plots_new = deepcopy(plot_config.get("plots", [])) relative_path = make_path_relative(full_path, root_path) if (relative_path not in plot_config_plots_new and full_path not in plot_config_plots_new): plot_config_plots_new[str(full_path)] = { "sk": bytes(sk).hex(), "pool_pk": bytes(pool_pk).hex(), } plot_config["plots"].update(plot_config_plots_new) # Dumps the new config to disk. save_config(root_path, plot_config_filename, plot_config) try: tmp_dir.rmdir() except Exception: print(f"warning: couldn't delete {tmp_dir}")
def main(): """ Script for creating plots and adding them to the plot config file. """ root_path = DEFAULT_ROOT_PATH plot_config_filename = config_path_for_filename(root_path, "plots.yaml") parser = argparse.ArgumentParser(description="Chia plotting script.") parser.add_argument("-k", "--size", help="Plot size", type=int, default=26) parser.add_argument( "-n", "--num_plots", help="Number of plots", type=int, default=1 ) parser.add_argument( "-i", "--index", help="First plot index", type=int, default=None ) parser.add_argument( "-p", "--pool_pub_key", help="Hex public key of pool", type=str, default="" ) parser.add_argument( "-s", "--sk_seed", help="Secret key seed in hex", type=str, default=None ) parser.add_argument( "-t", "--tmp_dir", help="Temporary directory for plotting files", type=Path, default=Path("."), ) parser.add_argument( "-2", "--tmp2_dir", help="Second temporary directory for plotting files", type=Path, default=Path("."), ) new_plots_root = path_from_root( root_path, load_config(root_path, "config.yaml") .get("harvester", {}) .get("new_plot_root", "plots"), ) parser.add_argument( "-d", "--final_dir", help="Final directory for plots (relative or absolute)", type=Path, default=new_plots_root, ) args = parser.parse_args() if args.sk_seed is None and args.index is not None: log( f"You have specified the -i (index) argument without the -s (sk_seed) argument." f" The program has changes, so that the sk_seed is now generated randomly, so -i is no longer necessary." f" Please run the program without -i." ) quit() if args.index is None: args.index = 0 # The seed is what will be used to generate a private key for each plot if args.sk_seed is not None: sk_seed: bytes = bytes.fromhex(args.sk_seed) log(f"Using the provided sk_seed {sk_seed.hex()}.") else: sk_seed = token_bytes(32) log( f"Using sk_seed {sk_seed.hex()}. Note that sk seed is now generated randomly, as opposed " f"to from keys.yaml. If you want to use a specific seed, use the -s argument." ) pool_pk: PublicKey if len(args.pool_pub_key) > 0: # Use the provided pool public key, useful for using an external pool pool_pk = PublicKey.from_bytes(bytes.fromhex(args.pool_pub_key)) else: # Use the pool public key from the config, useful for solo farming keychain = Keychain() all_public_keys = keychain.get_all_public_keys() if len(all_public_keys) == 0: raise RuntimeError( "There are no private keys in the keychain, so we cannot create a plot. " "Please generate keys using 'chia keys generate' or pass in a pool pk with -p" ) pool_pk = all_public_keys[0].get_public_key() log( f"Creating {args.num_plots} plots, from index {args.index} to " f"{args.index + args.num_plots - 1}, of size {args.size}, sk_seed {sk_seed.hex()} ppk {pool_pk}" ) mkdir(args.tmp_dir) mkdir(args.tmp2_dir) mkdir(args.final_dir) finished_filenames = [] for i in range(args.index, args.index + args.num_plots): # Generate a sk based on the seed, plot size (k), and index sk: PrivateKey = PrivateKey.from_seed( sk_seed + args.size.to_bytes(1, "big") + i.to_bytes(4, "big") ) # The plot seed is based on the pool and plot pks plot_seed: bytes32 = ProofOfSpace.calculate_plot_seed( pool_pk, sk.get_public_key() ) dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M") filename: str = f"plot-k{args.size}-{dt_string}-{plot_seed}.dat" full_path: Path = args.final_dir / filename plot_config = load_config(root_path, plot_config_filename) plot_config_plots_new = deepcopy(plot_config.get("plots", [])) filenames = [Path(k).name for k in plot_config_plots_new.keys()] already_in_config = any(plot_seed.hex() in fname for fname in filenames) if already_in_config: log(f"Plot {filename} already exists (in config)") continue if not full_path.exists(): # Creates the plot. This will take a long time for larger plots. plotter: DiskPlotter = DiskPlotter() plotter.create_plot_disk( str(args.tmp_dir), str(args.tmp2_dir), str(args.final_dir), filename, args.size, bytes([]), plot_seed, ) finished_filenames.append(filename) else: log(f"Plot {filename} already exists") # Updates the config if necessary. plot_config = load_config(root_path, plot_config_filename) plot_config_plots_new = deepcopy(plot_config.get("plots", [])) plot_config_plots_new[str(full_path)] = { "sk": bytes(sk).hex(), "pool_pk": bytes(pool_pk).hex(), } plot_config["plots"].update(plot_config_plots_new) # Dumps the new config to disk. save_config(root_path, plot_config_filename, plot_config) log("") log("Summary:") try: args.tmp_dir.rmdir() except Exception: log( f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty." ) try: args.tmp2_dir.rmdir() except Exception: log( f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty." ) log(f"Created a total of {len(finished_filenames)} new plots") for filename in finished_filenames: log(filename)
def test1(): seed = bytes([ 0, 50, 6, 244, 24, 199, 1, 25, 52, 88, 192, 19, 18, 12, 89, 6, 220, 18, 102, 58, 209, 82, 12, 62, 89, 110, 182, 9, 44, 20, 254, 22 ]) sk = PrivateKey.from_seed(seed) pk = sk.get_public_key() msg = bytes([100, 2, 254, 88, 90, 45, 23]) sig = sk.sign(msg) sk_bytes = sk.serialize() pk_bytes = pk.serialize() sig_bytes = sig.serialize() sk = PrivateKey.from_bytes(sk_bytes) pk = PublicKey.from_bytes(pk_bytes) sig = Signature.from_bytes(sig_bytes) sig.set_aggregation_info(AggregationInfo.from_msg(pk, msg)) ok = sig.verify() assert (ok) seed = bytes([1]) + seed[1:] sk1 = PrivateKey.from_seed(seed) seed = bytes([2]) + seed[1:] sk2 = PrivateKey.from_seed(seed) pk1 = sk1.get_public_key() sig1 = sk1.sign(msg) pk2 = sk2.get_public_key() sig2 = sk2.sign(msg) agg_sig = Signature.aggregate([sig1, sig2]) agg_pubkey = PublicKey.aggregate([pk1, pk2]) agg_sig.set_aggregation_info(AggregationInfo.from_msg(agg_pubkey, msg)) assert (agg_sig.verify()) seed = bytes([3]) + seed[1:] sk3 = PrivateKey.from_seed(seed) pk3 = sk3.get_public_key() msg2 = bytes([100, 2, 254, 88, 90, 45, 23]) sig1 = sk1.sign(msg) sig2 = sk2.sign(msg) sig3 = sk3.sign(msg2) agg_sig_l = Signature.aggregate([sig1, sig2]) agg_sig_final = Signature.aggregate([agg_sig_l, sig3]) sig_bytes = agg_sig_final.serialize() agg_sig_final = Signature.from_bytes(sig_bytes) a1 = AggregationInfo.from_msg(pk1, msg) a2 = AggregationInfo.from_msg(pk2, msg) a3 = AggregationInfo.from_msg(pk3, msg2) a1a2 = AggregationInfo.merge_infos([a1, a2]) a_final = AggregationInfo.merge_infos([a1a2, a3]) print(a_final) agg_sig_final.set_aggregation_info(a_final) ok = agg_sig_final.verify() ok = agg_sig_l.verify() agg_sig_final = agg_sig_final.divide_by([agg_sig_l]) ok = agg_sig_final.verify() agg_sk = PrivateKey.aggregate([sk1, sk2], [pk1, pk2]) agg_sk.sign(msg) seed = bytes([ 1, 50, 6, 244, 24, 199, 1, 25, 52, 88, 192, 19, 18, 12, 89, 6, 220, 18, 102, 58, 209, 82, 12, 62, 89, 110, 182, 9, 44, 20, 254, 22 ]) esk = ExtendedPrivateKey.from_seed(seed) epk = esk.get_extended_public_key() sk_child = esk.private_child(0).private_child(5) pk_child = epk.public_child(0).public_child(5) buffer1 = pk_child.serialize() buffer2 = sk_child.serialize() print(len(buffer1), buffer1) print(len(buffer2), buffer2) assert (sk_child.get_extended_public_key() == pk_child)
def pub_keys_deseriazlize(_pubs): try: pubs = list(map(lambda x: PublicKey.from_bytes(x), _pubs)) except RuntimeError: raise ValueError("Bad public key") return pubs
def main(): """ Script for creating plots and adding them to the plot config file. """ parser = argparse.ArgumentParser(description="Chia plotting script.") parser.add_argument("-k", "--size", help="Plot size", type=int, default=20) parser.add_argument("-n", "--num_plots", help="Number of plots", type=int, default=10) parser.add_argument("-p", "--pool_pub_key", help="Hex public key of pool", type=str, default="") parser.add_argument( "-t", "--tmp_dir", help="Temporary directory for plotting files (relative or absolute)", type=str, default="./plots", ) parser.add_argument( "-d", "--final_dir", help="Final directory for plots (relative or absolute)", type=str, default="./plots", ) # We need the keys file, to access pool keys (if the exist), and the sk_seed. args = parser.parse_args() if not os.path.isfile(key_config_filename): raise RuntimeError( "Keys not generated. Run python3 ./scripts/regenerate_keys.py.") # The seed is what will be used to generate a private key for each plot key_config = safe_load(open(key_config_filename, "r")) sk_seed: bytes = bytes.fromhex(key_config["sk_seed"]) pool_pk: PublicKey if len(args.pool_pub_key) > 0: # Use the provided pool public key, useful for using an external pool pool_pk = PublicKey.from_bytes(bytes.fromhex(args.pool_pub_key)) else: # Use the pool public key from the config, useful for solo farming pool_sk = PrivateKey.from_bytes( bytes.fromhex(key_config["pool_sks"][0])) pool_pk = pool_sk.get_public_key() print( f"Creating {args.num_plots} plots of size {args.size}, sk_seed {sk_seed.hex()} ppk {pool_pk}" ) for i in range(args.num_plots): # Generate a sk based on the seed, plot size (k), and index sk: PrivateKey = PrivateKey.from_seed(sk_seed + args.size.to_bytes(1, "big") + i.to_bytes(4, "big")) # The plot seed is based on the pool and plot pks plot_seed: bytes32 = ProofOfSpace.calculate_plot_seed( pool_pk, sk.get_public_key()) filename: str = f"plot-{i}-{args.size}-{plot_seed}.dat" full_path: str = os.path.join(args.final_dir, filename) if os.path.isfile(full_path): print(f"Plot {filename} already exists") else: # Creates the plot. This will take a long time for larger plots. plotter: DiskPlotter = DiskPlotter() plotter.create_plot_disk(args.tmp_dir, args.final_dir, filename, args.size, bytes([]), plot_seed) # Updates the config if necessary. if os.path.isfile(plot_config_filename): plot_config = safe_load(open(plot_config_filename, "r")) else: plot_config = {"plots": {}} plot_config_plots_new = deepcopy(plot_config["plots"]) if full_path not in plot_config_plots_new: plot_config_plots_new[full_path] = { "sk": bytes(sk).hex(), "pool_pk": bytes(pool_pk).hex(), } plot_config["plots"].update(plot_config_plots_new) # Dumps the new config to disk. with open(plot_config_filename, "w") as f: safe_dump(plot_config, f)