def test_get_name_puzzle_conditions(self): # this tests that extra block or coin data doesn't confuse `get_name_puzzle_conditions` gen = block_generator() cost, r = run_generator(gen, max_cost=MAX_COST) print(r) npc_result = get_name_puzzle_conditions(gen, max_cost=MAX_COST, cost_per_byte=COST_PER_BYTE, safe_mode=False) assert npc_result.error is None assert npc_result.clvm_cost == EXPECTED_COST cond_1 = ConditionWithArgs( ConditionOpcode.CREATE_COIN, [bytes([0] * 31 + [1]), int_to_bytes(500)]) CONDITIONS = [ (ConditionOpcode.CREATE_COIN, [cond_1]), ] npc = NPC( coin_name=bytes32.fromhex( "e8538c2d14f2a7defae65c5c97f5d4fae7ee64acef7fec9d28ad847a0880fd03" ), puzzle_hash=bytes32.fromhex( "9dcf97a184f32623d11a73124ceb99a5709b083721e878a16d78f596718ba7b2" ), conditions=CONDITIONS, ) assert npc_result.npc_list == [npc]
async def get_derivation_record( self, index: uint32, wallet_id: uint32) -> Optional[DerivationRecord]: """ Returns the derivation record by index and wallet id. """ cursor = await self.db_connection.execute( "SELECT * FROM derivation_paths WHERE derivation_index=? and wallet_id=?;", ( index, wallet_id, ), ) row = await cursor.fetchone() await cursor.close() if row is not None and row[0] is not None: return DerivationRecord( uint32(row[0]), bytes32.fromhex(row[2]), G1Element.from_bytes(bytes.fromhex(row[1])), WalletType(row[3]), uint32(row[4]), ) return None
async def get_block_records_close_to_peak( self, blocks_n: int ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ res = await self.db.execute("SELECT header_hash, height from block_records WHERE is_peak = 1") row = await res.fetchone() await res.close() if row is None: return {}, None header_hash_bytes, peak_height = row peak: bytes32 = bytes32(bytes.fromhex(header_hash_bytes)) formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {peak_height - blocks_n}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash_bytes, block_record_bytes = row header_hash = bytes32.fromhex(header_hash_bytes) ret[header_hash] = BlockRecord.from_bytes(block_record_bytes) return ret, peak
def row_to_record(self, row) -> DerivationRecord: return DerivationRecord( uint32(row[0]), bytes32.fromhex(row[2]), G1Element.from_bytes(bytes.fromhex(row[1])), WalletType(row[3]), uint32(row[4]), bool(row[6]), )
async def _load_blocks_from(self, height: uint32, prev_hash: bytes32): while height > 0: # load 5000 blocks at a time window_end = max(0, height - 5000) if self.db.db_version == 2: query = ( "SELECT header_hash,prev_hash,height,sub_epoch_summary from full_blocks " "INDEXED BY height WHERE height>=? AND height <?") else: query = ( "SELECT header_hash,prev_hash,height,sub_epoch_summary from block_records " "INDEXED BY height WHERE height>=? AND height <?") async with self.db.db.execute(query, (window_end, height)) as cursor: # maps block-hash -> (height, prev-hash, sub-epoch-summary) ordered: Dict[bytes32, Tuple[uint32, bytes32, Optional[bytes]]] = {} if self.db.db_version == 2: for r in await cursor.fetchall(): ordered[r[0]] = (r[2], r[1], r[3]) else: for r in await cursor.fetchall(): ordered[bytes32.fromhex( r[0])] = (r[2], bytes32.fromhex(r[1]), r[3]) while height > window_end: entry = ordered[prev_hash] assert height == entry[0] + 1 height = entry[0] if entry[2] is not None: if (self.get_hash(height) == prev_hash and height in self.__sub_epoch_summaries and self.__sub_epoch_summaries[height] == entry[2]): return self.__sub_epoch_summaries[height] = entry[2] self.__set_hash(height, prev_hash) prev_hash = entry[1]
async def get_peak_heights_dicts(self) -> Tuple[Dict[uint32, bytes32], Dict[uint32, SubEpochSummary]]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ res = await self.db.execute("SELECT header_hash from block_records WHERE is_peak = 1") row = await res.fetchone() await res.close() if row is None: return {}, {} peak: bytes32 = bytes32.fromhex(row[0]) cursor = await self.db.execute("SELECT header_hash,prev_hash,height,sub_epoch_summary from block_records") rows = await cursor.fetchall() await cursor.close() hash_to_prev_hash: Dict[bytes32, bytes32] = {} hash_to_height: Dict[bytes32, uint32] = {} hash_to_summary: Dict[bytes32, SubEpochSummary] = {} for row in rows: hash_to_prev_hash[bytes32.fromhex(row[0])] = bytes32.fromhex(row[1]) hash_to_height[bytes32.fromhex(row[0])] = row[2] if row[3] is not None: hash_to_summary[bytes32.fromhex(row[0])] = SubEpochSummary.from_bytes(row[3]) height_to_hash: Dict[uint32, bytes32] = {} sub_epoch_summaries: Dict[uint32, SubEpochSummary] = {} curr_header_hash = peak curr_height = hash_to_height[curr_header_hash] while True: height_to_hash[curr_height] = curr_header_hash if curr_header_hash in hash_to_summary: sub_epoch_summaries[curr_height] = hash_to_summary[curr_header_hash] if curr_height == 0: break curr_header_hash = hash_to_prev_hash[curr_header_hash] curr_height = hash_to_height[curr_header_hash] return height_to_hash, sub_epoch_summaries
async def get_header_blocks_in_range( self, start: int, stop: int, ) -> Dict[bytes32, HeaderBlock]: formatted_str = f"SELECT header_hash, block from header_blocks WHERE height >= {start} and height <= {stop}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, HeaderBlock] = {} for row in rows: header_hash_bytes, block_record_bytes = row header_hash = bytes32.fromhex(header_hash_bytes) ret[header_hash] = HeaderBlock.from_bytes(block_record_bytes) return ret
def farm_coin( self, puzzle_hash: bytes32, birthday: CoinTimestamp, amount: int = 1024, prefix=bytes32.fromhex("ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000"), # noqa ) -> Coin: parent = bytes32( [ a | b for a, b in zip( prefix, birthday.height.to_bytes(32, "big"), ) ], ) # parent = birthday.height.to_bytes(32, "big") coin = Coin(parent, puzzle_hash, uint64(amount)) self._add_coin_entry(coin, birthday) return coin
async def get_block_records( self, ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ cursor = await self.db.execute("SELECT header_hash, block, is_peak from block_records") rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} peak: Optional[bytes32] = None for row in rows: header_hash_bytes, block_record_bytes, is_peak = row header_hash = bytes32.fromhex(header_hash_bytes) ret[header_hash] = BlockRecord.from_bytes(block_record_bytes) if is_peak: assert peak is None # Sanity check, only one peak peak = header_hash return ret, peak
async def get_block_records_in_range( self, start: int, stop: int, ) -> Dict[bytes32, BlockRecord]: """ Returns a dictionary with all blocks, as well as the header hash of the peak, if present. """ formatted_str = f"SELECT header_hash, block from block_records WHERE height >= {start} and height <= {stop}" cursor = await self.db.execute(formatted_str) rows = await cursor.fetchall() await cursor.close() ret: Dict[bytes32, BlockRecord] = {} for row in rows: header_hash_bytes, block_record_bytes = row header_hash = bytes32.fromhex(header_hash_bytes) ret[header_hash] = BlockRecord.from_bytes(block_record_bytes) return ret
async def get_derivation_record_for_puzzle_hash(self, puzzle_hash: bytes32) -> Optional[DerivationRecord]: """ Returns the derivation record by index and wallet id. """ cursor = await self.db_connection.execute( "SELECT * FROM derivation_paths WHERE puzzle_hash=?;", (puzzle_hash.hex(),), ) row = await cursor.fetchone() await cursor.close() if row is not None and row[0] is not None: return DerivationRecord( uint32(row[0]), bytes32.fromhex(row[2]), G1Element.from_bytes(bytes.fromhex(row[1])), WalletType(row[3]), uint32(row[4]), bool(row[6]), ) return None
async def pprint_pool_wallet_state( wallet_client: WalletRpcClient, wallet_id: int, pool_wallet_info: PoolWalletInfo, address_prefix: str, pool_state_dict: Dict, unconfirmed_transactions: List[TransactionRecord], ): if pool_wallet_info.current.state == PoolSingletonState.LEAVING_POOL and pool_wallet_info.target is None: expected_leave_height = pool_wallet_info.singleton_block_height + pool_wallet_info.current.relative_lock_height print( f"Current state: INVALID_STATE. Please leave/join again after block height {expected_leave_height}" ) else: print( f"Current state: {PoolSingletonState(pool_wallet_info.current.state).name}" ) print( f"Current state from block height: {pool_wallet_info.singleton_block_height}" ) print(f"Launcher ID: {pool_wallet_info.launcher_id}") print( "Target address (not for plotting): " f"{encode_puzzle_hash(pool_wallet_info.current.target_puzzle_hash, address_prefix)}" ) print(f"Owner public key: {pool_wallet_info.current.owner_pubkey}") print( f"P2 singleton address (pool contract address for plotting): " f"{encode_puzzle_hash(pool_wallet_info.p2_singleton_puzzle_hash, address_prefix)}" ) if pool_wallet_info.target is not None: print( f"Target state: {PoolSingletonState(pool_wallet_info.target.state).name}" ) print(f"Target pool URL: {pool_wallet_info.target.pool_url}") if pool_wallet_info.current.state == PoolSingletonState.SELF_POOLING.value: balances: Dict = await wallet_client.get_wallet_balance(str(wallet_id)) balance = balances["confirmed_wallet_balance"] typ = WalletType(int(WalletType.POOLING_WALLET)) address_prefix, scale = wallet_coin_unit(typ, address_prefix) print( f"Claimable balance: {print_balance(balance, scale, address_prefix)}" ) if pool_wallet_info.current.state == PoolSingletonState.FARMING_TO_POOL: print(f"Current pool URL: {pool_wallet_info.current.pool_url}") if pool_wallet_info.launcher_id in pool_state_dict: print( f"Current difficulty: {pool_state_dict[pool_wallet_info.launcher_id]['current_difficulty']}" ) print( f"Points balance: {pool_state_dict[pool_wallet_info.launcher_id]['current_points']}" ) print( f"Relative lock height: {pool_wallet_info.current.relative_lock_height} blocks" ) payout_instructions: str = pool_state_dict[ pool_wallet_info.launcher_id]["pool_config"]["payout_instructions"] try: payout_address = encode_puzzle_hash( bytes32.fromhex(payout_instructions), address_prefix) print( f"Payout instructions (pool will pay to this address): {payout_address}" ) except Exception: print( f"Payout instructions (pool will pay you with this): {payout_instructions}" ) if pool_wallet_info.current.state == PoolSingletonState.LEAVING_POOL: expected_leave_height = pool_wallet_info.singleton_block_height + pool_wallet_info.current.relative_lock_height if pool_wallet_info.target is not None: print( f"Expected to leave after block height: {expected_leave_height}" )
async def load_backup(self, filename: str): try: f = open(filename, "r") details = f.readline().split(":") f.close() origin = Coin( bytes32.fromhex(details[0]), bytes32.fromhex(details[1]), uint64(int(details[2])), ) backup_ids = [] for d in details[3].split(","): backup_ids.append(bytes.fromhex(d)) num_of_backup_ids_needed = uint64(int(details[5])) if num_of_backup_ids_needed > len(backup_ids): raise Exception innerpuz: Program = Program.from_bytes(bytes.fromhex(details[4])) did_info: DIDInfo = DIDInfo( origin, backup_ids, num_of_backup_ids_needed, self.did_info.parent_info, innerpuz, None, None, None, False, ) await self.save_info(did_info, False) await self.wallet_state_manager.update_wallet_puzzle_hashes(self.wallet_info.id) # full_puz = did_wallet_puzzles.create_fullpuz(innerpuz, origin.name()) # All additions in this block here: new_puzhash = await self.get_new_inner_hash() new_pubkey = bytes( (await self.wallet_state_manager.get_unused_derivation_record(self.wallet_info.id)).pubkey ) parent_info = None node = self.wallet_state_manager.wallet_node.get_full_node_peer() children = await self.wallet_state_manager.wallet_node.fetch_children(node, origin.name()) while True: if len(children) == 0: break children_state: CoinState = children[0] coin = children_state.coin name = coin.name() children = await self.wallet_state_manager.wallet_node.fetch_children(node, name) future_parent = LineageProof( coin.parent_coin_info, innerpuz.get_tree_hash(), coin.amount, ) await self.add_parent(coin.name(), future_parent, False) if children_state.spent_height != children_state.created_height: did_info = DIDInfo( origin, backup_ids, num_of_backup_ids_needed, self.did_info.parent_info, innerpuz, coin, new_puzhash, new_pubkey, False, ) await self.save_info(did_info, False) assert children_state.created_height puzzle_solution_request = wallet_protocol.RequestPuzzleSolution( coin.parent_coin_info, children_state.created_height ) parent_state: CoinState = ( await self.wallet_state_manager.wallet_node.get_coin_state([coin.parent_coin_info]) )[0] response = await node.request_puzzle_solution(puzzle_solution_request) req_puz_sol = response.response assert req_puz_sol.puzzle is not None parent_innerpuz = did_wallet_puzzles.get_innerpuzzle_from_puzzle(req_puz_sol.puzzle) assert parent_innerpuz is not None parent_info = LineageProof( parent_state.coin.parent_coin_info, parent_innerpuz.get_tree_hash(), parent_state.coin.amount, ) await self.add_parent(coin.parent_coin_info, parent_info, False) assert parent_info is not None return None except Exception as e: raise e
async def load_backup(self, filename: str): try: f = open(filename, "r") details = f.readline().split(":") f.close() origin = Coin( bytes32.fromhex(details[0]), bytes32.fromhex(details[1]), uint64(int(details[2])), ) backup_ids = [] for d in details[3].split(","): backup_ids.append(bytes.fromhex(d)) num_of_backup_ids_needed = uint64(int(details[5])) if num_of_backup_ids_needed > len(backup_ids): raise Exception innerpuz: Program = Program.from_bytes(bytes.fromhex(details[4])) did_info: DIDInfo = DIDInfo( origin, backup_ids, num_of_backup_ids_needed, self.did_info.parent_info, innerpuz, None, None, None, False, ) await self.save_info(did_info, False) await self.wallet_state_manager.update_wallet_puzzle_hashes( self.wallet_info.id) full_puz = did_wallet_puzzles.create_fullpuz( innerpuz, origin.name()) full_puzzle_hash = full_puz.get_tree_hash() ( sub_height, header_hash, ) = await self.wallet_state_manager.search_blockrecords_for_puzzlehash( full_puzzle_hash) assert sub_height is not None assert header_hash is not None full_nodes = self.wallet_state_manager.server.connection_by_type[ NodeType.FULL_NODE] additions: Union[RespondAdditions, RejectAdditionsRequest, None] = None for id, node in full_nodes.items(): request = wallet_protocol.RequestAdditions( sub_height, header_hash, None) additions = await node.request_additions(request) if additions is not None: break if isinstance(additions, RejectAdditionsRequest): continue assert additions is not None assert isinstance(additions, RespondAdditions) # All additions in this block here: new_puzhash = await self.get_new_inner_hash() new_pubkey = bytes( (await self.wallet_state_manager.get_unused_derivation_record( self.wallet_info.id)).pubkey) all_parents: Set[bytes32] = set() for puzzle_list_coin in additions.coins: puzzle_hash, coins = puzzle_list_coin for coin in coins: all_parents.add(coin.parent_coin_info) parent_info = None for puzzle_list_coin in additions.coins: puzzle_hash, coins = puzzle_list_coin if puzzle_hash == full_puzzle_hash: # our coin for coin in coins: future_parent = LineageProof( coin.parent_coin_info, innerpuz.get_tree_hash(), coin.amount, ) await self.add_parent(coin.name(), future_parent, False) if coin.name() not in all_parents: did_info = DIDInfo( origin, backup_ids, num_of_backup_ids_needed, self.did_info.parent_info, innerpuz, coin, new_puzhash, new_pubkey, False, ) await self.save_info(did_info, False) removal_request = wallet_protocol.RequestRemovals( sub_height, header_hash, None) removals_response = await node.request_removals( removal_request) for coin_tuple in removals_response.coins: if coin_tuple[0] == coin.parent_coin_info: puzzle_solution_request = wallet_protocol.RequestPuzzleSolution( coin.parent_coin_info, sub_height) response = await node.request_puzzle_solution( puzzle_solution_request) req_puz_sol = response.response assert req_puz_sol.puzzle is not None parent_innerpuz = did_wallet_puzzles.get_innerpuzzle_from_puzzle( req_puz_sol.puzzle) assert parent_innerpuz is not None parent_info = LineageProof( coin_tuple[1].parent_coin_info, parent_innerpuz.get_tree_hash(), coin_tuple[1].amount, ) await self.add_parent( coin.parent_coin_info, parent_info, False) break assert parent_info is not None return None except Exception as e: raise e
async def convert_v1_to_v2(in_path: Path, out_path: Path) -> None: import aiosqlite from chia.util.db_wrapper import DBWrapper if out_path.exists(): print(f"output file already exists. {out_path}") raise RuntimeError("already exists") print(f"opening file for reading: {in_path}") async with aiosqlite.connect(in_path) as in_db: try: async with in_db.execute( "SELECT * from database_version") as cursor: row = await cursor.fetchone() if row is not None and row[0] != 1: print( f"blockchain database already version {row[0]}\nDone") raise RuntimeError("already v2") except aiosqlite.OperationalError: pass store_v1 = await BlockStore.create(DBWrapper(in_db, db_version=1)) print(f"opening file for writing: {out_path}") async with aiosqlite.connect(out_path) as out_db: await out_db.execute("pragma journal_mode=OFF") await out_db.execute("pragma synchronous=OFF") await out_db.execute("pragma cache_size=131072") await out_db.execute("pragma locking_mode=exclusive") print("initializing v2 version") await out_db.execute("CREATE TABLE database_version(version int)") await out_db.execute("INSERT INTO database_version VALUES(?)", (2, )) print("initializing v2 block store") await out_db.execute("CREATE TABLE full_blocks(" "header_hash blob PRIMARY KEY," "prev_hash blob," "height bigint," "sub_epoch_summary blob," "is_fully_compactified tinyint," "in_main_chain tinyint," "block blob," "block_record blob)") await out_db.execute("CREATE TABLE sub_epoch_segments_v3(" "ses_block_hash blob PRIMARY KEY," "challenge_segments blob)") await out_db.execute( "CREATE TABLE current_peak(key int PRIMARY KEY, hash blob)") peak_hash, peak_height = await store_v1.get_peak() print(f"peak: {peak_hash.hex()} height: {peak_height}") await out_db.execute("INSERT INTO current_peak VALUES(?, ?)", (0, peak_hash)) await out_db.commit() print("[1/5] converting full_blocks") height = peak_height + 1 hh = peak_hash commit_in = BLOCK_COMMIT_RATE rate = 1.0 start_time = time() block_start_time = start_time block_values = [] async with in_db.execute( "SELECT header_hash, prev_hash, block, sub_epoch_summary FROM block_records ORDER BY height DESC" ) as cursor: async with in_db.execute( "SELECT header_hash, height, is_fully_compactified, block FROM full_blocks ORDER BY height DESC" ) as cursor_2: await out_db.execute("begin transaction") async for row in cursor: header_hash = bytes.fromhex(row[0]) if header_hash != hh: continue # progress cursor_2 until we find the header hash while True: row_2 = await cursor_2.fetchone() if row_2 is None: print( f"ERROR: could not find block {hh.hex()}") raise RuntimeError( f"block {hh.hex()} not found") if bytes.fromhex(row_2[0]) == hh: break assert row_2[1] == height - 1 height = row_2[1] is_fully_compactified = row_2[2] block_bytes = row_2[3] prev_hash = bytes.fromhex(row[1]) block_record = row[2] ses = row[3] block_values.append(( hh, prev_hash, height, ses, is_fully_compactified, 1, # in_main_chain zstd.compress(block_bytes), block_record, )) hh = prev_hash if (height % 1000) == 0: print( f"\r{height: 10d} {(peak_height-height)*100/peak_height:.2f}% " f"{rate:0.1f} blocks/s ETA: {height//rate} s ", end="", ) sys.stdout.flush() commit_in -= 1 if commit_in == 0: commit_in = BLOCK_COMMIT_RATE await out_db.executemany( "INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?, ?, ?, ?)", block_values) await out_db.commit() await out_db.execute("begin transaction") block_values = [] end_time = time() rate = BLOCK_COMMIT_RATE / (end_time - start_time) start_time = end_time await out_db.executemany( "INSERT OR REPLACE INTO full_blocks VALUES(?, ?, ?, ?, ?, ?, ?, ?)", block_values) await out_db.commit() end_time = time() print( f"\r {end_time - block_start_time:.2f} seconds " ) print("[2/5] converting sub_epoch_segments_v3") commit_in = SES_COMMIT_RATE ses_values = [] ses_start_time = time() async with in_db.execute( "SELECT ses_block_hash, challenge_segments FROM sub_epoch_segments_v3" ) as cursor: count = 0 await out_db.execute("begin transaction") async for row in cursor: block_hash = bytes32.fromhex(row[0]) ses = row[1] ses_values.append((block_hash, ses)) count += 1 if (count % 100) == 0: print(f"\r{count:10d} ", end="") sys.stdout.flush() commit_in -= 1 if commit_in == 0: commit_in = SES_COMMIT_RATE await out_db.executemany( "INSERT INTO sub_epoch_segments_v3 VALUES (?, ?)", ses_values) await out_db.commit() await out_db.execute("begin transaction") ses_values = [] await out_db.executemany( "INSERT INTO sub_epoch_segments_v3 VALUES (?, ?)", ses_values) await out_db.commit() end_time = time() print( f"\r {end_time - ses_start_time:.2f} seconds " ) print("[3/5] converting hint_store") commit_in = HINT_COMMIT_RATE hint_start_time = time() hint_values = [] await out_db.execute( "CREATE TABLE hints(coin_id blob, hint blob, UNIQUE (coin_id, hint))" ) await out_db.commit() async with in_db.execute( "SELECT coin_id, hint FROM hints") as cursor: count = 0 await out_db.execute("begin transaction") async for row in cursor: hint_values.append((row[0], row[1])) commit_in -= 1 if commit_in == 0: commit_in = HINT_COMMIT_RATE await out_db.executemany( "INSERT OR IGNORE INTO hints VALUES(?, ?) ON CONFLICT DO NOTHING", hint_values) await out_db.commit() await out_db.execute("begin transaction") hint_values = [] await out_db.executemany( "INSERT OR IGNORE INTO hints VALUES (?, ?)", hint_values) await out_db.commit() end_time = time() print( f"\r {end_time - hint_start_time:.2f} seconds " ) print("[4/5] converting coin_store") await out_db.execute( "CREATE TABLE coin_record(" "coin_name blob PRIMARY KEY," " confirmed_index bigint," " spent_index bigint," # if this is zero, it means the coin has not been spent " coinbase int," " puzzle_hash blob," " coin_parent blob," " amount blob," # we use a blob of 8 bytes to store uint64 " timestamp bigint)") await out_db.commit() commit_in = COIN_COMMIT_RATE rate = 1.0 start_time = time() coin_values = [] coin_start_time = start_time async with in_db.execute( "SELECT coin_name, confirmed_index, spent_index, coinbase, puzzle_hash, coin_parent, amount, timestamp " "FROM coin_record WHERE confirmed_index <= ?", (peak_height, ), ) as cursor: count = 0 await out_db.execute("begin transaction") async for row in cursor: spent_index = row[2] # in order to convert a consistent snapshot of the # blockchain state, any coin that was spent *after* our # cutoff must be converted into an unspent coin if spent_index > peak_height: spent_index = 0 coin_values.append(( bytes.fromhex(row[0]), row[1], spent_index, row[3], bytes.fromhex(row[4]), bytes.fromhex(row[5]), row[6], row[7], )) count += 1 if (count % 2000) == 0: print( f"\r{count//1000:10d}k coins {rate:0.1f} coins/s ", end="") sys.stdout.flush() commit_in -= 1 if commit_in == 0: commit_in = COIN_COMMIT_RATE await out_db.executemany( "INSERT INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?)", coin_values) await out_db.commit() await out_db.execute("begin transaction") coin_values = [] end_time = time() rate = COIN_COMMIT_RATE / (end_time - start_time) start_time = end_time await out_db.executemany( "INSERT INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?)", coin_values) await out_db.commit() end_time = time() print( f"\r {end_time - coin_start_time:.2f} seconds " ) print("[5/5] build indices") index_start_time = time() print(" block store") await BlockStore.create(DBWrapper(out_db, db_version=2)) print(" coin store") await CoinStore.create(DBWrapper(out_db, db_version=2)) print(" hint store") await HintStore.create(DBWrapper(out_db, db_version=2)) end_time = time() print( f"\r {end_time - index_start_time:.2f} seconds " )
def test_pool_lifecycle(self): # START TESTS # Generate starting info key_lookup = KeyTool() sk: PrivateKey = PrivateKey.from_bytes( secret_exponent_for_index(1).to_bytes(32, "big"), ) pk: G1Element = G1Element.from_bytes( public_key_for_index(1, key_lookup)) starting_puzzle: Program = puzzle_for_pk(pk) starting_ph: bytes32 = starting_puzzle.get_tree_hash() # Get our starting standard coin created START_AMOUNT: uint64 = 1023 coin_db = CoinStore() time = CoinTimestamp(10000000, 1) coin_db.farm_coin(starting_ph, time, START_AMOUNT) starting_coin: Coin = next(coin_db.all_unspent_coins()) # LAUNCHING # Create the escaping inner puzzle GENESIS_CHALLENGE = bytes32.fromhex( "ccd5bb71183532bff220ba46c268991a3ff07eb358e8255a65c30a2dce0e5fbb") launcher_coin = singleton_top_layer.generate_launcher_coin( starting_coin, START_AMOUNT, ) DELAY_TIME = uint64(60800) DELAY_PH = starting_ph launcher_id = launcher_coin.name() relative_lock_height: uint32 = uint32(5000) # use a dummy pool state pool_state = PoolState( owner_pubkey=pk, pool_url="", relative_lock_height=relative_lock_height, state=3, # farming to pool target_puzzle_hash=starting_ph, version=1, ) # create a new dummy pool state for travelling target_pool_state = PoolState( owner_pubkey=pk, pool_url="", relative_lock_height=relative_lock_height, state=2, # Leaving pool target_puzzle_hash=starting_ph, version=1, ) # Standard format comment comment = Program.to([("p", bytes(pool_state)), ("t", DELAY_TIME), ("h", DELAY_PH)]) pool_wr_innerpuz: bytes32 = create_waiting_room_inner_puzzle( starting_ph, relative_lock_height, pk, launcher_id, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, ) pool_wr_inner_hash = pool_wr_innerpuz.get_tree_hash() pooling_innerpuz: Program = create_pooling_inner_puzzle( starting_ph, pool_wr_inner_hash, pk, launcher_id, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, ) # Driver tests assert is_pool_singleton_inner_puzzle(pooling_innerpuz) assert is_pool_singleton_inner_puzzle(pool_wr_innerpuz) assert get_pubkey_from_member_inner_puzzle(pooling_innerpuz) == pk # Generating launcher information conditions, launcher_coinsol = singleton_top_layer.launch_conditions_and_coinsol( starting_coin, pooling_innerpuz, comment, START_AMOUNT) # Creating solution for standard transaction delegated_puzzle: Program = puzzle_for_conditions(conditions) full_solution: Program = solution_for_conditions(conditions) starting_coinsol = CoinSolution( starting_coin, starting_puzzle, full_solution, ) # Create the spend bundle sig: G2Element = sign_delegated_puz(delegated_puzzle, starting_coin) spend_bundle = SpendBundle( [starting_coinsol, launcher_coinsol], sig, ) # Spend it! coin_db.update_coin_store_for_spend_bundle( spend_bundle, time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) # Test that we can retrieve the extra data assert get_delayed_puz_info_from_launcher_spend(launcher_coinsol) == ( DELAY_TIME, DELAY_PH) assert solution_to_extra_data(launcher_coinsol) == pool_state # TEST TRAVEL AFTER LAUNCH # fork the state fork_coin_db: CoinStore = copy.deepcopy(coin_db) post_launch_coinsol, _ = create_travel_spend( launcher_coinsol, launcher_coin, pool_state, target_pool_state, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, ) # Spend it! fork_coin_db.update_coin_store_for_spend_bundle( SpendBundle([post_launch_coinsol], G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) # HONEST ABSORB time = CoinTimestamp(10000030, 2) # create the farming reward p2_singleton_puz: Program = create_p2_singleton_puzzle( SINGLETON_MOD_HASH, launcher_id, DELAY_TIME, DELAY_PH, ) p2_singleton_ph: bytes32 = p2_singleton_puz.get_tree_hash() assert uncurry_pool_waitingroom_inner_puzzle(pool_wr_innerpuz) == ( starting_ph, relative_lock_height, pk, p2_singleton_ph, ) assert launcher_id_to_p2_puzzle_hash(launcher_id, DELAY_TIME, DELAY_PH) == p2_singleton_ph assert get_seconds_and_delayed_puzhash_from_p2_singleton_puzzle( p2_singleton_puz) == (DELAY_TIME, DELAY_PH) coin_db.farm_coin(p2_singleton_ph, time, 1750000000000) coin_sols: List[CoinSolution] = create_absorb_spend( launcher_coinsol, pool_state, launcher_coin, 2, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, # height ) # Spend it! coin_db.update_coin_store_for_spend_bundle( SpendBundle(coin_sols, G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) # ABSORB A NON EXISTENT REWARD (Negative test) last_coinsol: CoinSolution = list( filter( lambda e: e.coin.amount == START_AMOUNT, coin_sols, ))[0] coin_sols: List[CoinSolution] = create_absorb_spend( last_coinsol, pool_state, launcher_coin, 2, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, # height ) # filter for only the singleton solution singleton_coinsol: CoinSolution = list( filter( lambda e: e.coin.amount == START_AMOUNT, coin_sols, ))[0] # Spend it and hope it fails! try: coin_db.update_coin_store_for_spend_bundle( SpendBundle([singleton_coinsol], G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) except BadSpendBundleError as e: assert str( e ) == "condition validation failure Err.ASSERT_ANNOUNCE_CONSUMED_FAILED" # SPEND A NON-REWARD P2_SINGLETON (Negative test) # create the dummy coin non_reward_p2_singleton = Coin( bytes32(32 * b"3"), p2_singleton_ph, uint64(1337), ) coin_db._add_coin_entry(non_reward_p2_singleton, time) # construct coin solution for the p2_singleton coin bad_coinsol = CoinSolution( non_reward_p2_singleton, p2_singleton_puz, Program.to([ pooling_innerpuz.get_tree_hash(), non_reward_p2_singleton.name(), ]), ) # Spend it and hope it fails! try: coin_db.update_coin_store_for_spend_bundle( SpendBundle([singleton_coinsol, bad_coinsol], G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) except BadSpendBundleError as e: assert str( e ) == "condition validation failure Err.ASSERT_ANNOUNCE_CONSUMED_FAILED" # ENTER WAITING ROOM # find the singleton singleton = get_most_recent_singleton_coin_from_coin_solution( last_coinsol) # get the relevant coin solution travel_coinsol, _ = create_travel_spend( last_coinsol, launcher_coin, pool_state, target_pool_state, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, ) # Test that we can retrieve the extra data assert solution_to_extra_data(travel_coinsol) == target_pool_state # sign the serialized state data = Program.to(bytes(target_pool_state)).get_tree_hash() sig: G2Element = AugSchemeMPL.sign( sk, (data + singleton.name() + DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA), ) # Spend it! coin_db.update_coin_store_for_spend_bundle( SpendBundle([travel_coinsol], sig), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) # ESCAPE TOO FAST (Negative test) # find the singleton singleton = get_most_recent_singleton_coin_from_coin_solution( travel_coinsol) # get the relevant coin solution return_coinsol, _ = create_travel_spend( travel_coinsol, launcher_coin, target_pool_state, pool_state, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, ) # sign the serialized target state sig = AugSchemeMPL.sign( sk, (data + singleton.name() + DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA), ) # Spend it and hope it fails! try: coin_db.update_coin_store_for_spend_bundle( SpendBundle([return_coinsol], sig), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) except BadSpendBundleError as e: assert str( e ) == "condition validation failure Err.ASSERT_HEIGHT_RELATIVE_FAILED" # ABSORB WHILE IN WAITING ROOM time = CoinTimestamp(10000060, 3) # create the farming reward coin_db.farm_coin(p2_singleton_ph, time, 1750000000000) # generate relevant coin solutions coin_sols: List[CoinSolution] = create_absorb_spend( travel_coinsol, target_pool_state, launcher_coin, 3, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, # height ) # Spend it! coin_db.update_coin_store_for_spend_bundle( SpendBundle(coin_sols, G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) # LEAVE THE WAITING ROOM time = CoinTimestamp(20000000, 10000) # find the singleton singleton_coinsol: CoinSolution = list( filter( lambda e: e.coin.amount == START_AMOUNT, coin_sols, ))[0] singleton: Coin = get_most_recent_singleton_coin_from_coin_solution( singleton_coinsol) # get the relevant coin solution return_coinsol, _ = create_travel_spend( singleton_coinsol, launcher_coin, target_pool_state, pool_state, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, ) # Test that we can retrieve the extra data assert solution_to_extra_data(return_coinsol) == pool_state # sign the serialized target state data = Program.to([ pooling_innerpuz.get_tree_hash(), START_AMOUNT, bytes(pool_state) ]).get_tree_hash() sig: G2Element = AugSchemeMPL.sign( sk, (data + singleton.name() + DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA), ) # Spend it! coin_db.update_coin_store_for_spend_bundle( SpendBundle([return_coinsol], sig), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, ) # ABSORB ONCE MORE FOR GOOD MEASURE time = CoinTimestamp(20000000, 10005) # create the farming reward coin_db.farm_coin(p2_singleton_ph, time, 1750000000000) coin_sols: List[CoinSolution] = create_absorb_spend( return_coinsol, pool_state, launcher_coin, 10005, GENESIS_CHALLENGE, DELAY_TIME, DELAY_PH, # height ) # Spend it! coin_db.update_coin_store_for_spend_bundle( SpendBundle(coin_sols, G2Element()), time, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, )
async def pprint_pool_wallet_state( wallet_client: WalletRpcClient, wallet_id: int, pool_wallet_info: PoolWalletInfo, address_prefix: str, pool_state_dict: Dict, plot_counts: Counter, ): if pool_wallet_info.current.state == PoolSingletonState.LEAVING_POOL and pool_wallet_info.target is None: expected_leave_height = pool_wallet_info.singleton_block_height + pool_wallet_info.current.relative_lock_height print( f"Current state: INVALID_STATE. Please leave/join again after block height {expected_leave_height}" ) else: print( f"Current state: {PoolSingletonState(pool_wallet_info.current.state).name}" ) print( f"Current state from block height: {pool_wallet_info.singleton_block_height}" ) print(f"Launcher ID: {pool_wallet_info.launcher_id}") print( "Target address (not for plotting): " f"{encode_puzzle_hash(pool_wallet_info.current.target_puzzle_hash, address_prefix)}" ) print( f"Number of plots: {plot_counts[pool_wallet_info.p2_singleton_puzzle_hash]}" ) print(f"Owner public key: {pool_wallet_info.current.owner_pubkey}") print( f"Pool contract address (use ONLY for plotting - do not send money to this address): " f"{encode_puzzle_hash(pool_wallet_info.p2_singleton_puzzle_hash, address_prefix)}" ) if pool_wallet_info.target is not None: print( f"Target state: {PoolSingletonState(pool_wallet_info.target.state).name}" ) print(f"Target pool URL: {pool_wallet_info.target.pool_url}") if pool_wallet_info.current.state == PoolSingletonState.SELF_POOLING.value: balances: Dict = await wallet_client.get_wallet_balance(str(wallet_id)) balance = balances["confirmed_wallet_balance"] typ = WalletType(int(WalletType.POOLING_WALLET)) address_prefix, scale = wallet_coin_unit(typ, address_prefix) print( f"Claimable balance: {print_balance(balance, scale, address_prefix)}" ) if pool_wallet_info.current.state == PoolSingletonState.FARMING_TO_POOL: print(f"Current pool URL: {pool_wallet_info.current.pool_url}") if pool_wallet_info.launcher_id in pool_state_dict: pool_state = pool_state_dict[pool_wallet_info.launcher_id] print( f"Current difficulty: {pool_state_dict[pool_wallet_info.launcher_id]['current_difficulty']}" ) print( f"Points balance: {pool_state_dict[pool_wallet_info.launcher_id]['current_points']}" ) points_found_24h = [ points for timestamp, points in pool_state["points_found_24h"] ] points_acknowledged_24h = [ points for timestamp, points in pool_state["points_acknowledged_24h"] ] summed_points_found_24h = sum(points_found_24h) summed_points_acknowledged_24h = sum(points_acknowledged_24h) if summed_points_found_24h == 0: success_pct = 0.0 else: success_pct = summed_points_acknowledged_24h / summed_points_found_24h print(f"Points found (24h): {summed_points_found_24h}") print(f"Percent Successful Points (24h): {success_pct:.2%}") print( f"Relative lock height: {pool_wallet_info.current.relative_lock_height} blocks" ) payout_instructions: str = pool_state_dict[ pool_wallet_info.launcher_id]["pool_config"]["payout_instructions"] try: payout_address = encode_puzzle_hash( bytes32.fromhex(payout_instructions), address_prefix) print( f"Payout instructions (pool will pay to this address): {payout_address}" ) except Exception: print( f"Payout instructions (pool will pay you with this): {payout_instructions}" ) if pool_wallet_info.current.state == PoolSingletonState.LEAVING_POOL: expected_leave_height = pool_wallet_info.singleton_block_height + pool_wallet_info.current.relative_lock_height if pool_wallet_info.target is not None: print( f"Expected to leave after block height: {expected_leave_height}" )
async def create(cls, blockchain_dir: Path, db: DBWrapper) -> "BlockHeightMap": self = BlockHeightMap() self.db = db self.__dirty = 0 self.__height_to_hash = bytearray() self.__sub_epoch_summaries = {} self.__height_to_hash_filename = blockchain_dir / "height-to-hash" self.__ses_filename = blockchain_dir / "sub-epoch-summaries" if db.db_version == 2: async with self.db.db.execute( "SELECT hash FROM current_peak WHERE key = 0") as cursor: peak_row = await cursor.fetchone() if peak_row is None: return self async with db.db.execute( "SELECT header_hash,prev_hash,height,sub_epoch_summary FROM full_blocks WHERE header_hash=?", (peak_row[0], ), ) as cursor: row = await cursor.fetchone() if row is None: return self else: async with await db.db.execute( "SELECT header_hash,prev_hash,height,sub_epoch_summary from block_records WHERE is_peak=1" ) as cursor: row = await cursor.fetchone() if row is None: return self try: async with aiofiles.open(self.__height_to_hash_filename, "rb") as f: self.__height_to_hash = bytearray(await f.read()) except Exception: # it's OK if this file doesn't exist, we can rebuild it pass try: async with aiofiles.open(self.__ses_filename, "rb") as f: self.__sub_epoch_summaries = { k: v for (k, v) in SesCache.from_bytes(await f.read()).content } except Exception: # it's OK if this file doesn't exist, we can rebuild it pass peak: bytes32 prev_hash: bytes32 if db.db_version == 2: peak = row[0] prev_hash = row[1] else: peak = bytes32.fromhex(row[0]) prev_hash = bytes32.fromhex(row[1]) height = row[2] # allocate memory for height to hash map # this may also truncate it, if thie file on disk had an invalid size new_size = (height + 1) * 32 size = len(self.__height_to_hash) if size > new_size: del self.__height_to_hash[new_size:] else: self.__height_to_hash += bytearray([0] * (new_size - size)) # if the peak hash is already in the height-to-hash map, we don't need # to load anything more from the DB if self.get_hash(height) != peak: self.__set_hash(height, peak) if row[3] is not None: self.__sub_epoch_summaries[height] = row[3] # prepopulate the height -> hash mapping await self._load_blocks_from(height, prev_hash) await self.maybe_flush() return self
from chia.types.announcement import Announcement from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.condition_tools import parse_sexp_to_conditions from chia.wallet.puzzles.load_clvm import load_clvm SINGLETON_MOD = load_clvm("singleton_top_layer.clvm") LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm") P2_SINGLETON_MOD = load_clvm("p2_singleton.clvm") POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm") POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm") LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash() SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash() LAUNCHER_ID = Program.to(b"launcher-id").get_tree_hash() POOL_REWARD_PREFIX_MAINNET = bytes32.fromhex( "ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000") def singleton_puzzle(launcher_id: Program, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> Program: return SINGLETON_MOD.curry( (SINGLETON_MOD_HASH, (launcher_id, launcher_puzzle_hash)), inner_puzzle) def p2_singleton_puzzle(launcher_id: Program, launcher_puzzle_hash: bytes32) -> Program: return P2_SINGLETON_MOD.curry(SINGLETON_MOD_HASH, launcher_id, launcher_puzzle_hash)
async def create_plots( args, keys: PlotKeys, root_path, use_datetime=True, test_private_keys: Optional[List] = None ) -> Tuple[Dict[bytes32, Path], Dict[bytes32, Path]]: config_filename = config_path_for_filename(root_path, "config.yaml") config = load_config(root_path, config_filename) if args.tmp2_dir is None: args.tmp2_dir = args.tmp_dir assert (keys.pool_public_key is None) != (keys.pool_contract_puzzle_hash is None) num = args.num if args.size < config["min_mainnet_k_size"] and test_private_keys is None: log.warning( f"Creating plots with size k={args.size}, which is less than the minimum required for mainnet" ) if args.size < 22: log.warning("k under 22 is not supported. Increasing k to 22") args.size = 22 if keys.pool_public_key is not None: log.info( f"Creating {num} plots of size {args.size}, pool public key: " f"{bytes(keys.pool_public_key).hex()} farmer public key: {bytes(keys.farmer_public_key).hex()}" ) else: assert keys.pool_contract_puzzle_hash is not None log.info( f"Creating {num} plots of size {args.size}, pool contract address: " f"{keys.pool_contract_address} farmer public key: {bytes(keys.farmer_public_key).hex()}" ) tmp_dir_created = False if not args.tmp_dir.exists(): mkdir(args.tmp_dir) tmp_dir_created = True tmp2_dir_created = False if not args.tmp2_dir.exists(): mkdir(args.tmp2_dir) tmp2_dir_created = True mkdir(args.final_dir) created_plots: Dict[bytes32, Path] = {} existing_plots: Dict[bytes32, Path] = {} for i in range(num): # Generate a random master secret key if test_private_keys is not None: assert len(test_private_keys) == num sk: PrivateKey = test_private_keys[i] else: sk = AugSchemeMPL.key_gen(token_bytes(32)) # The plot public key is the combination of the harvester and farmer keys # New plots will also include a taproot of the keys, for extensibility include_taproot: bool = keys.pool_contract_puzzle_hash is not None plot_public_key = ProofOfSpace.generate_plot_public_key( master_sk_to_local_sk(sk).get_g1(), keys.farmer_public_key, include_taproot) # The plot id is based on the harvester, farmer, and pool keys if keys.pool_public_key is not None: plot_id: bytes32 = ProofOfSpace.calculate_plot_id_pk( keys.pool_public_key, plot_public_key) plot_memo: bytes32 = stream_plot_info_pk(keys.pool_public_key, keys.farmer_public_key, sk) else: assert keys.pool_contract_puzzle_hash is not None plot_id = ProofOfSpace.calculate_plot_id_ph( keys.pool_contract_puzzle_hash, plot_public_key) plot_memo = stream_plot_info_ph(keys.pool_contract_puzzle_hash, keys.farmer_public_key, sk) if args.plotid is not None: log.info(f"Debug plot ID: {args.plotid}") plot_id = bytes32(bytes.fromhex(args.plotid)) if args.memo is not None: log.info(f"Debug memo: {args.memo}") plot_memo = bytes32.fromhex(args.memo) # Uncomment next two lines if memo is needed for dev debug plot_memo_str: str = plot_memo.hex() log.info(f"Memo: {plot_memo_str}") dt_string = datetime.now().strftime("%Y-%m-%d-%H-%M") if use_datetime: filename: str = f"plot-k{args.size}-{dt_string}-{plot_id}.plot" else: filename = f"plot-k{args.size}-{plot_id}.plot" full_path: Path = args.final_dir / filename resolved_final_dir: str = str(Path(args.final_dir).resolve()) plot_directories_list: str = config["harvester"]["plot_directories"] if args.exclude_final_dir: log.info( f"NOT adding directory {resolved_final_dir} to harvester for farming" ) if resolved_final_dir in plot_directories_list: log.warning( f"Directory {resolved_final_dir} already exists for harvester, please remove it manually" ) else: if resolved_final_dir not in plot_directories_list: # Adds the directory to the plot directories if it is not present log.info( f"Adding directory {resolved_final_dir} to harvester for farming" ) config = add_plot_directory(root_path, resolved_final_dir) if not full_path.exists(): log.info(f"Starting plot {i + 1}/{num}") # Creates the plot. This will take a long time for larger plots. plotter: DiskPlotter = DiskPlotter() plotter.create_plot_disk( str(args.tmp_dir), str(args.tmp2_dir), str(args.final_dir), filename, args.size, plot_memo, plot_id, args.buffer, args.buckets, args.stripe_size, args.num_threads, args.nobitfield, ) created_plots[plot_id] = full_path else: log.info(f"Plot {filename} already exists") existing_plots[plot_id] = full_path log.info("Summary:") if tmp_dir_created: try: args.tmp_dir.rmdir() except Exception: log.info( f"warning: did not remove primary temporary folder {args.tmp_dir}, it may not be empty." ) if tmp2_dir_created: try: args.tmp2_dir.rmdir() except Exception: log.info( f"warning: did not remove secondary temporary folder {args.tmp2_dir}, it may not be empty." ) log.info(f"Created a total of {len(created_plots)} new plots") for created_path in created_plots.values(): log.info(created_path.name) return created_plots, existing_plots