def pacback_hook(install): '''Installs or removes a standard alpm hook in /etc/pacman.d/hooks Runs as a PreTransaction hook during every upgrade.''' PS.Start_Log('PacbackHook', log_file) if install is True: PS.MK_Dir('/etc/pacman.d/hooks', sudo=False) PS.Uncomment_Line_Sed('HookDir', '/etc/pacman.conf', sudo=False) hook = [ '[Trigger]', 'Operation = Upgrade', 'Type = Package', 'Target = *', '', '[Action]', 'Description = Pre-Upgrade Pacback Hook', 'Depends = pacman', 'When = PreTransaction', 'Exec = /usr/bin/pacback --hook' ] PS.Export_List('/etc/pacman.d/hooks/pacback.hook', hook) PS.prSuccess('Pacback Hook is Now Installed!') PS.Write_To_Log('InstallHook', 'Installed Pacback Hook Successfully', log_file) elif install is False: PS.RM_File('/etc/pacman.d/hooks/pacback.hook', sudo=False) PS.Write_To_Log('RemoveHook', 'Removed Pacback Hook Successfully', log_file) PS.prSuccess('Pacback Hook Removed!') PS.End_Log('PacbackHook', log_file)
def clean_cache(count): '''Automated Cache Cleaning Using pacman, paccache, and pacback.''' PS.Start_Log('CleanCache', log_file) PS.prWorking('Starting Advanced Cache Cleaning...') if PS.YN_Frame('Do You Want To Uninstall Orphaned Packages?') is True: os.system('sudo pacman -R $(pacman -Qtdq)') PS.Write_To_Log('CleanCache', 'Ran pacman -Rns $(pacman -Qtdq)', log_file) if PS.YN_Frame('Do You Want To Remove Old Versions of Installed Packages?' ) is True: os.system('sudo paccache -rk ' + count) PS.Write_To_Log('CleanCache', 'Ran paccache -rk ' + count, log_file) if PS.YN_Frame('Do You Want To Remove Cached Orphans?') is True: os.system('sudo paccache -ruk0') PS.Write_To_Log('CleanCache', 'Ran paccache -ruk0', log_file) if PS.YN_Frame( 'Do You Want To Check For Old Pacback Restore Points?') is True: PS.Write_To_Log('CleanCache', 'Started Search For Old RPs', log_file) metas = PS.Search_FS(rp_paths, 'set') rps = {f for f in metas if f.endswith(".meta")} for m in rps: rp_num = m.split('/')[-1] # Find RP Create Date in Meta File meta = PS.Read_List(m) for l in meta: if l.split(':')[0] == 'Date Created': target_date = l.split(':')[1].strip() break # Parse and Format Dates for Compare today = dt.datetime.now().strftime("%Y/%m/%d") t_split = list(today.split('/')) today_date = dt.date(int(t_split[0]), int(t_split[1]), int(t_split[2])) o_split = list(target_date.split('/')) old_date = dt.date(int(o_split[0]), int(o_split[1]), int(o_split[2])) # Compare Days days = (today_date - old_date).days if days > 180: PS.prWarning(m.split('/')[-1] + ' Is Over 180 Days Old!') if PS.YN_Frame( 'Do You Want to Remove This Restore Point?') is True: PS.RM_File(m, sudo=True) PS.RM_Dir(m[:-5], sudo=True) PS.prSuccess('Restore Point Removed!') PS.Write_To_Log('CleanCache', 'Removed RP ' + rp_num, log_file) PS.prSuccess(rp_num + ' Is Only ' + str(days) + ' Days Old!') PS.Write_To_Log('CleanCache', 'RP ' + rp_num + ' Was Less Than 180 Days 0ld', log_file) PS.End_Log('CleanCache', log_file)
def pacman_Q(replace_spaces=False): '''Writes the output into /tmp, reads file, then removes file.''' os.system("pacman -Q > /tmp/pacman_q.meta") ql = PS.Read_List('/tmp/pacman_q.meta', typ='set') PS.RM_File('/tmp/pacman_q.meta', sudo=True) if replace_spaces is True: rl = {s.strip().replace(' ', '-') for s in ql} return rl else: return ql
def remove_rp(rp_num, nc): PS.Start_Log('RemoveRP', log_file) rp = rp_paths + '/rp' + rp_num + '.meta' if nc is False: if PS.YN_Frame('Do You Want to Remove This Restore Point?') is True: PS.RM_File(rp, sudo=False) PS.RM_Dir(rp[:-5], sudo=False) PS.prSuccess('Restore Point Removed!') PS.Write_To_Log('RemoveRP', 'Removed Restore Point ' + rp_num, log_file) else: PS.Write_To_Log('RemoveRP', 'User Declined Removing Restore Point ' + rp_num) elif nc is True: PS.RM_File(rp, sudo=False) PS.RM_Dir(rp[:-5], sudo=False) PS.prSuccess('Restore Point Removed!') PS.Write_To_Log('RemoveRP', 'Removed Restore Point ' + rp_num, log_file) PS.End_Log('RemoveRP', log_file)
def upgrade_to_hardlinks(rp_path): # This is a Total Hack Job. Don't Judge Me :( PS.prWorking('Unpacking...') PS.Write_To_Log('HardlinkUpgrade', 'Unpacking Old Restore Point For Conversion', log_file) if os.path.exists(rp_path + '.tar.gz'): PS.GZ_D(rp_path + '.tar.gz') PS.Untar_Dir(rp_path + '.tar') PS.Write_To_Log('HardlinkUpgrade', 'Unpacked Restore Point', log_file) # Read and Parse Meta Data meta = PS.Read_List(rp_path + '.meta') meta_old_pkgs = PS.Read_Between('======= Pacman List ========', '<Endless>', meta) meta_dirs = PS.Read_Between('========= Dir List =========', '======= Pacman List ========', meta)[:-1] PS.Write_To_Log('HardlinkUpgrade', 'Read RP MetaData', log_file) # Find Existing Package pc = PS.Search_FS(rp_path + '/pac_cache') found = PU.search_paccache(PS.Replace_Spaces(meta_old_pkgs), PU.fetch_paccache()) if len(found) == len(pc): PS.prSuccess('All Packages Found!') PS.Write_To_Log('HardlinkUpgrade', 'All Packages Where Found Elsewhere', log_file) PS.RM_Dir(rp_path + '/pac_cache', sudo=False) PS.MK_Dir(rp_path + '/pac_cache', sudo=False) for pkg in tqdm.tqdm(found, desc='Hardlinking Packages to Pacback RP'): os.system('sudo ln ' + pkg + ' ' + rp_path + '/pac_cache/' + pkg.split('/')[-1]) PS.Write_To_Log('HardlinkUpgrade', 'Hardlinked Packages From Other Locations', log_file) elif len(found) < len(pc): PS.Write_To_Log( 'HardlinkUpgrade', 'Not All Packages Where Found. Mergeing With Hardlinks', log_file) duplicate = PS.Trim_Dir(pc).intersection(PS.Trim_Dir(found)) for d in tqdm.tqdm(duplicate, desc='Mergeing and Hardlinking'): PS.RM_File(rp_path + '/pac_cache/' + d, sudo=False) for p in found: if p.split('/')[-1] == d.split('/')[-1]: os.system('sudo ln ' + p + ' ' + rp_path + '/pac_cache/' + d) break PS.Write_To_Log('HardlinkUpgrade', 'Successfully Merged Restore Point Packages', log_file) if len(meta_dirs) > 0: PS.Write_To_Log('HardlinkUpgrade', 'Detected Custom Files Saved In RP', log_file) f_list = set() rp_fs = PS.Search_FS(rp_path) for f in rp_fs: if f[len(rp_path):].split('/')[1] != 'pac_cache': f_list.add(f) with tarfile.open(rp_path + '/' + rp_path[-2:] + '_dirs.tar', 'w') as tar: for f in tqdm.tqdm(f_list, desc='Adding Dir\'s to Tar'): tar.add(f, f[len(rp_path):]) PS.Write_To_Log('HardlinkUpgrade', 'Added Custom Files To New RP Tar', log_file) for d in meta_dirs: PS.RM_Dir(rp_path + '/' + d.split('/')[1], sudo=False) PS.Write_To_Log('HardlinkUpgrade', 'Cleaned Unpacked Custom Files', log_file) PS.RM_File(rp_path + '.tar', sudo=False) PS.Write_To_Log('HardlinkUpgrade', 'Removed Old Restore Point Tar', log_file) PS.prSuccess('RP Version Upgrade Complete!') PS.Write_To_Log('HardlinkUpgrade', 'Restore Point Upgrade Complete', log_file)
def create_restore_point(version, rp_num, rp_full, dir_list, no_confirm, notes): PS.Start_Log('CreateRP', log_file) # Fail Safe for New Users if os.path.exists(rp_paths) is False: PS.MK_Dir('/var/lib/pacback', sudo=False) PS.MK_Dir(rp_paths, sudo=False) PS.Write_To_Log('CreateRP', 'Created Base RP Folder in /var/lib', log_file) # Set Base Vars rp_num = str(rp_num).zfill(2) rp_path = rp_paths + '/rp' + rp_num rp_tar = rp_path + '/rp' + rp_num + '_dirs.tar' rp_meta = rp_path + '.meta' found_pkgs = set() pac_size = 0 # Check for Existing Restore Points if os.path.exists(rp_path) or os.path.exists(rp_meta): if no_confirm is False: if int(rp_num) != 0: PS.prWarning('Restore Point #' + rp_num + ' Already Exists!') if PS.YN_Frame('Do You Want to Overwrite It?') is False: PS.Abort_With_Log('CreateRP', 'User Aborted Overwrite of RP #' + rp_num, 'Aborting!', log_file) PS.RM_File(rp_meta, sudo=False) PS.RM_Dir(rp_path, sudo=False) PS.Write_To_Log('CreateRP', 'Removed RP #' + rp_num + ' During Overwrite', log_file) ########################### # Full Restore Point Branch ########################### if rp_full is True: PS.Write_To_Log('CreateRP', 'Creating RP #' + rp_num + ' As Full RP', log_file) print('Building Full Restore Point...') # Set Vars For Full RP dir_size = 0 rp_files = set() pac_cache = rp_path + '/pac_cache' PS.prWorking('Retrieving Current Packages...') pkg_search = pu.pacman_Q(replace_spaces=True) # Search File System for Pkgs PS.prWorking('Bulk Scanning for ' + str(len(pkg_search)) + ' Packages...') found_pkgs = pu.search_paccache(pkg_search, pu.fetch_paccache()) pac_size = PS.Size_Of_Files(found_pkgs) # Ask About Missing Pkgs if len(found_pkgs) != len(pkg_search): PS.Write_To_Log('CreateRP', 'Not All Packages Where Found', log_file) if int(rp_num) != 0: if no_confirm is False: pkg_split = pu.trim_pkg_list(found_pkgs) PS.prError('The Following Packages Where NOT Found!') for pkg in set(pkg_search - pkg_split): PS.prWarning(pkg + ' Was NOT Found!') if PS.YN_Frame('Do You Still Want to Continue?') is False: PS.Abort_With_Log('CreateRP', 'User Aborted Due to Missing Pkgs', 'Aborting!', log_file) # HardLink Packages to RP PS.MK_Dir(rp_path, sudo=False) PS.MK_Dir(pac_cache, sudo=False) for pkg in tqdm.tqdm(found_pkgs, desc='Hardlinking Packages to Pacback RP'): os.system('sudo ln ' + pkg + ' ' + pac_cache + '/' + pkg.split('/')[-1]) PS.Write_To_Log('CreateRP', 'HardLinked ' + str(len(found_pkgs)) + ' Packages', log_file) # Find Custom Files for RP if dir_list: PS.Write_To_Log('CreateRP', 'User Defined Custom RP Files', log_file) # Find and Get Size of Custom Files for d in dir_list: for f in PS.Search_FS(d, 'set'): try: dir_size += os.path.getsize(f) except Exception: OSError rp_files.add(f) # Pack Custom Folders Into a Tar with tarfile.open(rp_tar, 'w') as tar: for f in tqdm.tqdm(rp_files, desc='Adding Dir\'s to Tar'): tar.add(f) PS.Write_To_Log('CreateRP', 'Tar Created For Custom RP Files', log_file) # Compress Custom Files If Added Larger Than 1GB if dir_size > 1073741824: PS.prWorking('Compressing Restore Point Files...') if any(re.findall('pigz', l.lower()) for l in pkg_search): os.system('pigz ' + rp_tar + ' -f') else: PS.GZ_C(rp_tar, rm=True) PS.Write_To_Log('CreateRP', 'Compressed Custom Files RP Tar', log_file) elif rp_full is False: PS.Write_To_Log('CreateRP', 'Creating RP #' + rp_num + ' As A Light RP', log_file) if len(dir_list) > 0: PS.Abort_With_Log('CreateRP', 'Custom Dirs Are Not Supported By LightRP', 'Light Restore Points DO NOT Support Custom Dirs! Please Use The `-f` Flag', log_file) print('Building Light Restore Point...') ######################### # Generate Meta Data File ######################### current_pkgs = pu.pacman_Q() meta_list = ['====== Pacback RP #' + rp_num + ' ======', 'Pacback Version: ' + version, 'Date Created: ' + dt.datetime.now().strftime("%Y/%m/%d"), 'Packages Installed: ' + str(len(current_pkgs)), 'Packages in RP: ' + str(len(found_pkgs)), 'Size of Packages in RP: ' + PS.Convert_Size(pac_size)] if notes: meta_list.append('Notes: ' + notes) if len(dir_list) != 0: meta_list.append('Dirs File Count: ' + str(len(rp_files))) meta_list.append('Dirs Total Size: ' + PS.Convert_Size(dir_size)) meta_list.append('') meta_list.append('========= Dir List =========') for d in dir_list: meta_list.append(d) meta_list.append('') meta_list.append('======= Pacman List ========') for pkg in current_pkgs: meta_list.append(pkg) # Export Final Meta Data File PS.Export_List(rp_meta, meta_list) PS.Write_To_Log('CreateRP', 'RP #' + rp_num + ' Was Successfully Created', log_file) PS.End_Log('CreateRP', log_file) PS.prSuccess('Restore Point #' + rp_num + ' Successfully Created!')