def check_pacback_version(current_version, rp_path, meta_exists, meta): if meta_exists is False: PS.Write_To_Log('VersionControl', 'Restore Point is Missing MetaData', log_file) # Check for Full RP Created Before V1.5 if os.path.exists(rp_path + '.tar') or os.path.exists(rp_path + '.tar.gz'): PS.prError( 'Full Restore Points Generated Before Version 1.5.0 Are No Longer Compatible With Newer Versions of Pacback!' ) PS.Abort_With_Log( 'VersionControl', 'RP Version is > V1.5 and MetaData is Missing', 'Without Meta Data Pacback Can\'t Upgrade This Restore Point!', log_file) elif meta_exists is True: # Find version in metadate file for m in meta: if m.split(':')[0] == 'Pacback Version': target_version = m.split(':')[1].strip() break # Parse version into vars cv_M = int(current_version.split('.')[0]) cv_m = int(current_version.split('.')[1]) cv_p = int(current_version.split('.')[2]) #### tv_M = int(target_version.split('.')[0]) tv_m = int(target_version.split('.')[1]) tv_p = int(target_version.split('.')[2]) if current_version != target_version: PS.Write_To_Log( 'VersionControl', 'Current Version ' + current_version + ' Miss-Matched With ' + target_version, log_file) else: PS.Write_To_Log('VersionControl', 'Both Versions Match ' + current_version, log_file) # Check for Full RP's Created Before V1.5 if tv_M == 1 and tv_m < 5: if os.path.exists(rp_path + '.tar') or os.path.exists(rp_path + '.tar.gz'): PS.prError( 'Full Restore Points Generated Before V1.5.0 Are No Longer Compatible With Newer Versions of Pacback!' ) PS.Write_To_Log( 'VersionControl', 'Detected Restore Point Version Generated > V1.5', log_file) upgrade = PS.YN_Frame( 'Do You Want to Upgrade This Restore Point?') if upgrade is True: upgrade_to_hardlinks(rp_path) else: PS.Abort_With_Log('VersionControl', 'User Exited Upgrade', 'Aborting!', log_file)
def print_rp_info(num): rp_meta = rp_paths + '/rp' + num + '.meta' if os.path.exists(rp_meta): meta = PS.Read_List(rp_meta) meta = PS.Read_Between('Pacback RP', 'Pacman List', meta, re_flag=True) print('============================') for s in meta[:-1]: print(s) print('============================') elif os.path.exists(rp_meta): PS.prError('Meta is Missing For This Restore Point!') else: PS.prError('No Restore Point #' + num + ' Was NOT Found!')
def pre_fligh_check(): base_dir = os.path.dirname(os.path.realpath(__file__))[:-5] old_rp_path = base_dir + '/restore-points' if os.path.exists(old_rp_path): PS.Start_Log('PreFlight', log_file) PS.prError('Looks Like You Are Upgrading From A Version Before 1.6!') PS.prWorking('Migrating Your Restore Point Folder Now...') check_if_root() PS.MK_Dir('/var/lib/pacback', sudo=False) os.system('mv ' + old_rp_path + ' /var/lib/pacback') os.system( 'chown root:root /var/lib/pacback && chmod 700 /var/lib/pacback') PS.Write_To_Log('PreFlight', 'Pacback Successfully Migrated To /var/lib/pacback', log_file)
def rollback_packages(pkg_list): '''Allows User to Rollback Any Number of Packages By Name''' PS.Start_Log('RbPkgs', log_file) PS.prWorking('Searching File System for Packages...') cache = fetch_paccache() pkg_paths = list() PS.Write_To_Log('UserSearch', 'Started Search for ' + ' '.join(pkg_list), log_file) for pkg in pkg_list: found_pkgs = user_pkg_search(pkg, cache) sort_pkgs = sorted(found_pkgs, reverse=True) if len(found_pkgs) > 0: PS.Write_To_Log( 'UserSearch', 'Found ' + str(len(found_pkgs)) + ' pkgs for ' + pkg, log_file) PS.prSuccess('Pacback Found the Following Package Versions for ' + pkg + ':') answer = PS.Multi_Choice_Frame(sort_pkgs) if answer is False: PS.Write_To_Log('UserSearch', 'User Force Exited Selection For ' + pkg, log_file) else: for x in cache: if re.findall(re.escape(answer), x): path = x pkg_paths.append(path) break else: PS.prError('No Packages Found Under the Name: ' + pkg) PS.Write_To_Log('UserSearch', 'Search ' + pkg.upper() + ' Returned Zero Results', log_file) PS.pacman(' '.join(pkg_paths), '-U') PS.Write_To_Log('UserSearch', 'Sent ' + ' '.join(pkg_paths) + ' to Pacman -U', log_file) PS.End_Log('RbPkgs', log_file)
def user_pkg_search(search_pkg, cache): '''Provides more accurate searches for single pkg names without a version.''' pkgs = trim_pkg_list(cache) found = set() for p in pkgs: r = re.split("\d+-\d+|\d+(?:\.\d+)+|\d:\d+(?:\.\d+)+", p)[0] if r.strip()[-1] == '-': x = r.strip()[:-1] else: x = r if re.fullmatch(re.escape(search_pkg.lower().strip()), x): found.add(p) if not found: PS.prError('No Packages Found!') if PS.YN_Frame('Do You Want to Extend the Regex Search?') is True: for p in pkgs: if re.findall(re.escape(search_pkg.lower().strip()), p): found.add(p) return found
def unlock_rollback(): '''Restores Mirrorlist in /etc/pacman.d/mirrorlist Which Releases Archive Date Rollback''' PS.Start_Log('UnlockRollback', log_file) # Check if mirrorlist is locked if len(PS.Read_List('/etc/pacman.d/mirrorlist')) == 1: PS.Write_To_Log('UnlockRollback', 'Lock Detected on Mirrorlist', log_file) if os.path.exists('/etc/pacman.d/mirrolist.pacback'): PS.Write_To_Log('UnlockRollback', 'Backup Mirrorlist Is Missing', log_file) fetch = PS.YN_Frame( 'Pacback Can\'t Find Your Backup Mirrorlist! Do You Want to Fetch a New US HTTPS Mirrorlist?' ) if fetch is True: os.system( "curl -s 'https://www.archlinux.org/mirrorlist/?country=US&protocol=https&use_mirror_status=on' | sed -e 's/^#Server/Server/' -e '/^#/d' | sudo tee /etc/pacman.d/mirrorlist.pacback >/dev/null" ) else: PS.Abort_With_Log( 'UnlockRollback', 'Backup Mirrorlist Is Missing and User Declined Download', 'Please Manually Replace Your Mirrorlist!', log_file) os.system( 'sudo cp /etc/pacman.d/mirrorlist.pacback /etc/pacman.d/mirrorlist' ) PS.Write_To_Log('UnlockRollback', 'Mirrorlist Was Restored Successfully', log_file) else: PS.Write_To_Log('UnlockRollback', 'No Mirrorlist Lock Was Found', log_file) PS.End_Log('UnlockRollback', log_file) return PS.prError('Pacback Does NOT Have an Active Date Lock!') # Update? update = PS.YN_Frame('Do You Want to Update Your System Now?') if update is True: os.system('sudo pacman -Syu') PS.Write_To_Log('UnlockRollback', 'User Ran -Syu Upgrade', log_file) if update is False: print('Skipping Update!') PS.End_Log('UnlockRollback', log_file)
def rollback_to_rp(version, rp_num): PS.Start_Log('RollbackRP', log_file) ##################### # Stage Rollback Vars ##################### rp_num = str(rp_num).zfill(2) rp_path = '/var/lib/pacback/restore-points/rp' + rp_num rp_tar = rp_path + '/rp' + rp_num + '_dirs.tar' rp_meta = rp_path + '.meta' current_pkgs = pu.pacman_Q() # Set Full RP Status if os.path.exists(rp_path): full_rp = True PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Full RP', log_file) else: full_rp = False PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Light RP', log_file) # Set Meta Status, Read Meta, Diff Packages, Set Vars if os.path.exists(rp_meta): meta_exists = True PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Has MetaData', log_file) meta = PS.Read_List(rp_meta) meta_dirs = PS.Read_Between('= Dir List =', '= Pacman List =', meta, re_flag=True)[:-1] meta_old_pkgs = PS.Read_Between('= Pacman List =', '<Endless>', meta, re_flag=True) # Checking for New and Changed Packages changed_pkgs = set(set(meta_old_pkgs) - current_pkgs) meta_old_pkg_strp = {pkg.split(' ')[0] for pkg in meta_old_pkgs} current_pkg_strp = {pkg.split(' ')[0] for pkg in current_pkgs} added_pkgs = set(current_pkg_strp - meta_old_pkg_strp) m_search = PS.Replace_Spaces(changed_pkgs) PS.Write_To_Log('RollbackRP', 'Finished Reading RP MetaData', log_file) else: meta_exists = False meta = None PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Missing MetaData', log_file) # Abort If No Files Are Found if meta_exists is False and full_rp is False: PS.Abort_With_Log('RollbackRP', 'Restore Point #' + rp_num + ' Was NOT FOUND!', 'Restore Point #' + rp_num + ' Was NOT FOUND!', log_file) # Compare Versions vc.check_pacback_version(version, rp_path, meta_exists, meta) #################### # Full Restore Point #################### if full_rp is True: if meta_exists is True: # Pass If No Packages Have Changed if len(changed_pkgs) > 0: PS.Write_To_Log( 'RollbackRP', str(len(changed_pkgs)) + ' Packages Have Been Changed', log_file) found_pkgs = pu.search_paccache(m_search, pu.fetch_paccache()) PS.pacman(' '.join(found_pkgs), '-U') PS.Write_To_Log('RollbackRP', 'Send Found Packages to pacman -U', log_file) else: PS.prSuccess('No Packages Have Been Changed!') PS.Write_To_Log('RollbackRP', 'No Packages Have Been Changed', log_file) elif meta_exists is False: rp_cache = rp_path + '/pac_cache' PS.pacman(rp_cache + '/*', '--needed -U') PS.Write_To_Log('RollbackRP', 'Send pacman -U /* --needed', log_file) PS.prError('Restore Point #' + rp_num + ' MetaData Was NOT FOUND!') PS.Abort_With_Log('RollbackRP', 'Meta Is Missing So Skipping Advanced Features', 'Skipping Advanced Features!', log_file) ##################### # Light Restore Point ##################### elif meta_exists is True and full_rp is False: # Pass If No Packages Have Changed if len(changed_pkgs) > 0: PS.prWorking('Bulk Scanning for ' + str(len(meta_old_pkgs)) + ' Packages...') found_pkgs = pu.search_paccache(m_search, pu.fetch_paccache()) else: PS.prSuccess('No Packages Have Been Changed!') PS.Write_To_Log('RollbackRP', 'No Packages Have Been Changed', log_file) found_pkgs = {} if len(changed_pkgs) == 0: pass # Pass Comparison if All Packages Found elif len(found_pkgs) == len(changed_pkgs): PS.prSuccess('All Packages Found In Your Local File System!') PS.Write_To_Log('RollbackRP', 'All Packages Found', log_file) PS.pacman(' '.join(found_pkgs), '--needed -U') PS.Write_To_Log('RollbackRP', 'Sent Found Packages To pacman -U', log_file) # Branch if Packages are Missing elif len(found_pkgs) < len(changed_pkgs): PS.Write_To_Log( 'RollbackRP', str(len(found_pkgs) - len(changed_pkgs)) + ' Packages Are Where Not Found', log_file) missing_pkg = set(m_search - pu.trim_pkg_list(found_pkgs)) # Show Missing Pkgs PS.prWarning('Couldn\'t Find The Following Package Versions:') for pkg in missing_pkg: PS.prError(pkg) if PS.YN_Frame('Do You Want To Continue Anyway?') is True: PS.pacman(' '.join(found_pkgs), '-U') PS.Write_To_Log('RollbackRP', 'Sent Found Packages To pacman -U', log_file) else: PS.Abort_With_Log( 'RollbackRP', 'User Aborted Rollback Because of Missing Packages', 'Aborting Rollback!', log_file) # Ask User If They Want to Remove New Packages if len(added_pkgs) > 0: PS.prWarning( 'The Following Packages Are Installed But Are NOT Present in Restore Point #' + rp_num + ':') PS.Write_To_Log( 'RollbackRP', str(len(added_pkgs)) + ' Have Been Added Since RP Creation', log_file) for pkg in added_pkgs: PS.prAdded(pkg) if PS.YN_Frame('Do You Want to Remove These Packages From Your System?' ) is True: PS.pacman(' '.join(added_pkgs), '-R') PS.Write_To_Log('RollbackRP', 'Sent Added Packages To pacman -R', log_file) else: PS.prSuccess('No Packages Have Been Added!') PS.Write_To_Log('RollbackRP', 'No Packages Have Been Added Since RP Creation', log_file) ######################## # Stage Custom File Diff ######################## if len(meta_dirs) > 0: PS.Write_To_Log('RollbackRP', 'Custom Dirs Specified in RP Meta File', log_file) custom_dirs = rp_tar[:-4] if os.path.exists(rp_tar + '.gz'): PS.prWorking('Decompressing Restore Point....') if any(re.findall('pigz', line.lower()) for line in current_pkgs): os.system('pigz -d ' + rp_tar + '.gz -f') PS.Write_To_Log('RPDiff', 'Decompressed Custom Files With Pigz', log_file) else: PS.GZ_D(rp_tar + '.gz') PS.Write_To_Log('RPDiff', 'Decompressed Custom Files With Python', log_file) if os.path.exists(custom_dirs): PS.RM_Dir(custom_dirs, sudo=True) PS.prWorking('Unpacking Files from Restore Point Tar....') PS.Untar_Dir(rp_tar) PS.Write_To_Log('RPDiff', 'Unpacked Custom Files RP Tar', log_file) ################################ # Restore Files Without Checksum ################################ diff_yn = PS.YN_Frame( 'Do You Want to Checksum Diff Restore Point Files Against Your Current File System?' ) if diff_yn is False: PS.Write_To_Log('RPDiff', 'User Skipped Checksumming Files', log_file) PS.prWarning( 'OVERWRITING FILES WITHOUT CHECKSUMMING CAN BE EXTREMELY DANGEROUS!' ) ow = PS.YN_Frame( 'Do You Still Want to Continue and Restore ALL Files?') if ow is False: PS.Write_To_Log('RPDiff', 'User Declined Overwrite After Skipping Diff', log_file) print('Skipping! Restore Point Files Are Unpacked in ' + custom_dirs) PS.Write_To_Log('RPDiff', 'Left Files Unpacked in ' + custom_dirs, log_file) elif ow is True: print( 'Starting Full File Restore! Please Be Patient As All Files are Overwritten...' ) rp_fs = PS.Search_FS(custom_dirs) for f in rp_fs: PS.prWorking('Please Be Patient. This May Take a While...') os.system('sudo mkdir -p ' + PS.Escape_Bash('/'.join(f.split('/')[:-1])) + ' && sudo cp -af ' + PS.Escape_Bash(f) + ' ' + PS.Escape_Bash(f[len(custom_dirs):])) ############################ # Checksum and Compare Files ############################ elif diff_yn is True: PS.Write_To_Log('RPDiff', 'Started Checksumming Custom Files', log_file) rp_fs = PS.Search_FS(custom_dirs) rp_fs_trim = set(path[len(custom_dirs):] for path in PS.Search_FS(custom_dirs)) # Checksum Restore Point Files with a MultiProcessing Pool with mp.Pool(os.cpu_count()) as pool: rp_checksum = set( tqdm.tqdm(pool.imap(PS.Checksum_File, rp_fs), total=len(rp_fs), desc='Checksumming Restore Point Files')) sf_checksum = set( tqdm.tqdm(pool.imap(PS.Checksum_File, rp_fs_trim), total=len(rp_fs_trim), desc='Checksumming Source Files')) PS.Write_To_Log('RPDiff', 'Finished Checksumming Custom Files', log_file) # Compare Checksums For Files That Exist PS.Write_To_Log('RPDiff', 'Starting Sorting and Comparing Files', log_file) rp_csum_trim = set(path[len(custom_dirs):] for path in rp_checksum) rp_diff = sf_checksum.difference(rp_csum_trim) # Filter Removed and Changed Files diff_removed = set() diff_changed = set() for csum in rp_diff: if re.findall('FILE MISSING', csum): diff_removed.add(csum) else: diff_changed.add(csum.split(' : ')[0] + ' : FILE CHANGED!') # Find Added Files src_fs = set() for x in meta_dirs: for l in PS.Search_FS(x): src_fs.add(l) diff_new = src_fs.difference(rp_fs_trim) PS.Write_To_Log('RPDiff', 'Finished Comparing and Sorting Files', log_file) # Print Changed Files For User if len(diff_changed) + len(diff_new) + len(diff_removed) == 0: PS.Write_To_Log('RPDiff', 'Checksum Returned Zero Changed Files', log_file) PS.RM_Dir(custom_dirs, sudo=True) PS.Write_To_Log('RPDiff', 'Cleaned Up Files and Completed Successfully', log_file) PS.prSuccess('No Files Have Been Changed!') ################# # Overwrite Files ################# else: if len(diff_changed) > 0: PS.Write_To_Log( 'RPDiff', 'Found ' + str(len(diff_changed)) + ' Changed Files', log_file) PS.prWarning('The Following Files Have Changed:') for f in diff_changed: PS.prChanged(f) if PS.YN_Frame( 'Do You Want to Overwrite Files That Have Been CHANGED?' ) is True: PS.prWorking( 'Please Be Patient. This May Take a While...') for f in diff_changed: fs = (f.split(' : ')[0]) os.system('sudo cp -af ' + PS.Escape_Bash(custom_dirs + fs) + ' ' + PS.Escape_Bash(fs)) PS.Write_To_Log('RPDiff', 'Restored Changed Files', log_file) else: PS.Write_To_Log('RPDiff', 'User Declined Restoring Files', log_file) if len(diff_removed) > 0: PS.Write_To_Log( 'RPDiff', 'Found ' + str(len(diff_removed)) + ' Removed Files', log_file) PS.prWarning('The Following Files Have Been Removed:') for f in diff_removed: PS.prRemoved(f) if PS.YN_Frame( 'Do You Want to Add Files That Have Been REMOVED?' ) is True: PS.prWorking( 'Please Be Patient. This May Take a While...') for f in diff_removed: fs = (f.split(' : ')[0]) os.system( 'sudo mkdir -p ' + PS.Escape_Bash('/'.join(fs.split('/')[:-1])) + ' && sudo cp -af ' + PS.Escape_Bash(custom_dirs + fs) + ' ' + PS.Escape_Bash(fs)) PS.Write_To_Log('RPDiff', 'Restored Removed Files', log_file) else: PS.Write_To_Log('RPDiff', 'User Declined Restoring Files', log_file) if len(diff_new) > 0: PS.Write_To_Log( 'RPDiff', 'Found ' + str(len(diff_new)) + ' New Files', log_file) PS.prWarning('The Following Files Have Been Added:') for f in diff_new: PS.prAdded(f + ' : NEW FILE!') if PS.YN_Frame( 'Do You Want to Remove Files That Have Been ADDED?' ) is True: for f in diff_new: fs = (f.split(' : ')[0]) os.system('rm ' + fs) PS.Write_To_Log('RPDiff', 'Removed New Files', log_file) else: PS.Write_To_Log('RPDiff', 'User Declined Restoring Files', log_file) PS.RM_Dir(custom_dirs, sudo=True) PS.Write_To_Log('RPDiff', 'Done Comparing and Restoring Files', log_file) PS.prSuccess('File Diff and Restore Complete!') else: PS.prSuccess('Rollback to Restore Point #' + rp_num + ' Complete!') PS.Write_To_Log('RollbackRP', 'Rollback to RP #' + rp_num + ' Complete', log_file) PS.End_Log('RollbackRP', log_file)
def create_restore_point(version, rp_num, rp_full, dir_list, no_confirm, notes): PS.Start_Log('CreateRP', log_file) # Fail Safe for New Users if os.path.exists(rp_paths) is False: PS.MK_Dir('/var/lib/pacback', sudo=False) PS.MK_Dir(rp_paths, sudo=False) PS.Write_To_Log('CreateRP', 'Created Base RP Folder in /var/lib', log_file) # Set Base Vars rp_num = str(rp_num).zfill(2) rp_path = rp_paths + '/rp' + rp_num rp_tar = rp_path + '/rp' + rp_num + '_dirs.tar' rp_meta = rp_path + '.meta' found_pkgs = set() pac_size = 0 # Check for Existing Restore Points if os.path.exists(rp_path) or os.path.exists(rp_meta): if no_confirm is False: if int(rp_num) != 0: PS.prWarning('Restore Point #' + rp_num + ' Already Exists!') if PS.YN_Frame('Do You Want to Overwrite It?') is False: PS.Abort_With_Log('CreateRP', 'User Aborted Overwrite of RP #' + rp_num, 'Aborting!', log_file) PS.RM_File(rp_meta, sudo=False) PS.RM_Dir(rp_path, sudo=False) PS.Write_To_Log('CreateRP', 'Removed RP #' + rp_num + ' During Overwrite', log_file) ########################### # Full Restore Point Branch ########################### if rp_full is True: PS.Write_To_Log('CreateRP', 'Creating RP #' + rp_num + ' As Full RP', log_file) print('Building Full Restore Point...') # Set Vars For Full RP dir_size = 0 rp_files = set() pac_cache = rp_path + '/pac_cache' PS.prWorking('Retrieving Current Packages...') pkg_search = pu.pacman_Q(replace_spaces=True) # Search File System for Pkgs PS.prWorking('Bulk Scanning for ' + str(len(pkg_search)) + ' Packages...') found_pkgs = pu.search_paccache(pkg_search, pu.fetch_paccache()) pac_size = PS.Size_Of_Files(found_pkgs) # Ask About Missing Pkgs if len(found_pkgs) != len(pkg_search): PS.Write_To_Log('CreateRP', 'Not All Packages Where Found', log_file) if int(rp_num) != 0: if no_confirm is False: pkg_split = pu.trim_pkg_list(found_pkgs) PS.prError('The Following Packages Where NOT Found!') for pkg in set(pkg_search - pkg_split): PS.prWarning(pkg + ' Was NOT Found!') if PS.YN_Frame('Do You Still Want to Continue?') is False: PS.Abort_With_Log('CreateRP', 'User Aborted Due to Missing Pkgs', 'Aborting!', log_file) # HardLink Packages to RP PS.MK_Dir(rp_path, sudo=False) PS.MK_Dir(pac_cache, sudo=False) for pkg in tqdm.tqdm(found_pkgs, desc='Hardlinking Packages to Pacback RP'): os.system('sudo ln ' + pkg + ' ' + pac_cache + '/' + pkg.split('/')[-1]) PS.Write_To_Log('CreateRP', 'HardLinked ' + str(len(found_pkgs)) + ' Packages', log_file) # Find Custom Files for RP if dir_list: PS.Write_To_Log('CreateRP', 'User Defined Custom RP Files', log_file) # Find and Get Size of Custom Files for d in dir_list: for f in PS.Search_FS(d, 'set'): try: dir_size += os.path.getsize(f) except Exception: OSError rp_files.add(f) # Pack Custom Folders Into a Tar with tarfile.open(rp_tar, 'w') as tar: for f in tqdm.tqdm(rp_files, desc='Adding Dir\'s to Tar'): tar.add(f) PS.Write_To_Log('CreateRP', 'Tar Created For Custom RP Files', log_file) # Compress Custom Files If Added Larger Than 1GB if dir_size > 1073741824: PS.prWorking('Compressing Restore Point Files...') if any(re.findall('pigz', l.lower()) for l in pkg_search): os.system('pigz ' + rp_tar + ' -f') else: PS.GZ_C(rp_tar, rm=True) PS.Write_To_Log('CreateRP', 'Compressed Custom Files RP Tar', log_file) elif rp_full is False: PS.Write_To_Log('CreateRP', 'Creating RP #' + rp_num + ' As A Light RP', log_file) if len(dir_list) > 0: PS.Abort_With_Log('CreateRP', 'Custom Dirs Are Not Supported By LightRP', 'Light Restore Points DO NOT Support Custom Dirs! Please Use The `-f` Flag', log_file) print('Building Light Restore Point...') ######################### # Generate Meta Data File ######################### current_pkgs = pu.pacman_Q() meta_list = ['====== Pacback RP #' + rp_num + ' ======', 'Pacback Version: ' + version, 'Date Created: ' + dt.datetime.now().strftime("%Y/%m/%d"), 'Packages Installed: ' + str(len(current_pkgs)), 'Packages in RP: ' + str(len(found_pkgs)), 'Size of Packages in RP: ' + PS.Convert_Size(pac_size)] if notes: meta_list.append('Notes: ' + notes) if len(dir_list) != 0: meta_list.append('Dirs File Count: ' + str(len(rp_files))) meta_list.append('Dirs Total Size: ' + PS.Convert_Size(dir_size)) meta_list.append('') meta_list.append('========= Dir List =========') for d in dir_list: meta_list.append(d) meta_list.append('') meta_list.append('======= Pacman List ========') for pkg in current_pkgs: meta_list.append(pkg) # Export Final Meta Data File PS.Export_List(rp_meta, meta_list) PS.Write_To_Log('CreateRP', 'RP #' + rp_num + ' Was Successfully Created', log_file) PS.End_Log('CreateRP', log_file) PS.prSuccess('Restore Point #' + rp_num + ' Successfully Created!')