def pacman_hook(install, config): ''' Installs or removes a standard alpm hook in /usr/share/libalpm/hooks/ which runs as a PreTransaction hook during every pacman transaction. `install = True` Installs Pacman Hook `install = False` Removes Pacman Hook ''' if install is True: fname = 'utils.pacman_hook(install)' paf.write_to_log(fname, 'Starting Hook Installation...', config['log']) hook = [ '[Trigger]', 'Operation = Install', 'Operation = Remove', 'Operation = Upgrade', 'Type = Package', 'Target = *', '', '[Action]', 'Description = Pre-Upgrade Pacback Hook', 'Depends = pacman', 'When = PreTransaction', 'Exec = /usr/bin/pacback --hook' ] paf.export_iterable('/usr/share/libalpm/hooks/pacback.hook', hook) paf.prSuccess('Pacback Hook is Now Installed!') paf.write_to_log(fname, 'Installed Pacback PreTransaction Hook', config['log']) elif install is False: fname = 'utils.pacman_hook(remove)' paf.write_to_log(fname, 'Starting Hook Removal...', config['log']) paf.rm_file('/usr/share/libalpm/hooks/pacback.hook', sudo=False) paf.write_to_log(fname, 'Removed Pacback PreTransaction Hook', config['log']) paf.prSuccess('Pacback Hook Was Removed!')
def force_overwrite(config, unpack_path, p_len): ''' Restore Files Without Checksum ''' fname = 'custom_dirs.force_overwrite()' # Allow Exit Since This Is Bad Idea paf.prWarning( 'OVERWRITING FILES WITHOUT CHECKSUMS CAN BE EXTREMELY DANGEROUS!') if paf.yn_frame( 'Do You Still Want to Continue and Restore ALL The Files You Stored?' ) is False: return # Overwrite Files paf.write_to_log(fname, 'Starting Force Overwrite Process...', config['log']) print( 'Starting Full File Restore! Please Be Patient As All Files are Overwritten...' ) fs_stored = paf.find_files(unpack_path) try: fs_stored.remove(unpack_path + '/folder_permissions.pickle') except Exception: pass make_missing_dirs(config, unpack_path, p_len) for f in track(fs_stored, description='Overwriting Files'): shutil.move(f, f[p_len:]) paf.prSuccess('Done Overwriting Files!') paf.write_to_log(fname, 'Finished Force Overwrite Of Files', config['log'])
def gdelt_live(lang): last_eng = 'http://data.gdeltproject.org/gdeltv2/lastupdate.txt' last_trans = 'http://data.gdeltproject.org/gdeltv2/lastupdate-translation.txt' old_fetch = tempfile.gettempdir() + '/gdelt-live/prev-' + lang + '.txt' dl_path = tempfile.gettempdir() + '/gdelt-live/' + lang # Downloading Most Recent File List if 'english' == lang: dl = requests.get(last_eng) elif 'translation' == lang: dl = requests.get(last_trans) # Get File and Filter URLs status = (lang.upper() + ' Stream Status: ' + str(dl)[1:-1]) print('-' * len(status)) paf.prBold(status) print('-' * len(status)) urls = {''.join(x.split(' ')[2:]) for x in dl.text.split('\n')[:-1]} # Compare and Diff if os.path.exists(old_fetch): old = paf.read_file(old_fetch, 'set') new = set(urls.difference(old)) rm = set(old.difference(urls)) if len(new) == 0: paf.prSuccess(lang.upper() + ' Live Files are Already Up-to-Date!') return else: # Remove Old Files for x in rm: os.remove(dl_path + '/' + ''.join(x.split('/')[-1][:-4])) else: # Setup If First Run if not os.path.exists(dl_path): os.makedirs(dl_path) new = urls # Download URLs for url in new: try: print('Downloading: ' + ''.join(url.split('/')[-1])) resp = requests.get(url) print('Decompressing: ' + ''.join(url.split('/')[-1])) with zipfile.ZipFile(io.BytesIO(resp.content), 'r') as csvzip: csvzip.extractall(dl_path) except Exception: print("404: " + url) # Export Final Results paf.export_iterable(old_fetch, urls)
def packages(config, pkgs): ''' Allows the user to rollback packages by name. Packages are not sent to pacman until the user has selected all the packages they want to restore/change. ''' # Startup fname = 'restore.packages(' + str(len(pkgs)) + ')' pkg_paths = list() cache = utils.scan_caches(config) # Search For Each Package Name And Let User Select Version paf.write_to_log(fname, 'Started Search for ' + ', '.join(pkgs), config['log']) for pkg in pkgs: found_pkgs = utils.user_pkg_search(pkg, cache) sort_pkgs = sorted(found_pkgs, reverse=True) if found_pkgs: paf.write_to_log( fname, 'Found ' + str(len(found_pkgs)) + ' Cached Versions for `' + pkg + '`', config['log']) paf.prSuccess('Pacback Found the Following Versions for `' + pkg + '`:') answer = paf.multi_choice_frame(sort_pkgs) # Lets User Abort Package Selection if answer is False or None: paf.write_to_log(fname, 'User Selected NOTHING For ' + pkg, config['log']) else: for x in cache: if re.findall(re.escape(answer), x): pkg_paths.append(x) break else: paf.prError('No Packages Found Under the Name: ' + pkg) paf.write_to_log( fname, 'Search for ' + pkg.upper() + ' Returned ZERO Results!', config['log']) if pkg_paths: paf.pacman(' '.join(pkg_paths), '-U') paf.write_to_log(fname, 'Sent Pacman Selected Packages For Installation', config['log']) else: paf.write_to_log( fname, 'User Selected No Packages or No Packages Were Found', config['log'])
def gdelt_diff(lang, uc, config): dlp_path = config['base'] + '/prev-' + lang + '.txt' fzf_path = config['base'] + '/404-' + lang + '.txt' # Download and Filter URLs url = config[lang] print_stream_status(lang, url) paf.prBold('Downloading ' + lang.upper() + ' Stream Inventory File...') dln = requests.get(url) dlc = {''.join(x.split(' ')[2:]) for x in dln.text.split('\n')[:-1]} # Filter URL Based On Start Date if uc['start_date'] != 'all': d = uc['start_date'].split('/') days = {dt.replace('-', '') for dt in paf.date_to_today(int(d[0]), int(d[1]), int(d[2]))} filtered = set() for x in dlc: if paf.basename(x)[:8] in days: filtered.add(x) dlc = filtered # Run Install If Fresh Run if not os.path.exists(dlp_path): fresh_install(lang, uc, config) # Compare Previous Run dlp = paf.read_file(dlp_path) diff = set(dlc).difference(dlp) # Download Files Into Place if len(diff) > 10000: if paf.yn_frame(str(len(diff)) + ' Files Are Missing! Do You Still Want to Continue?') is True: print('This May Take a While! Starting Download...') else: sys.exit() if len(diff) > 0: fzf = fetch(diff, uc[lang + '_path']) paf.export_iterable(dlp_path, dlc) for x in paf.read_file(fzf_path): fzf.add(x) paf.export_iterable(fzf_path, fzf) else: paf.prSuccess('All Files Are Up To Date!')
def remove_rp(config, num, nc): fname = 'user.remove_rp(' + str(num) + ')' rm_info = { 'id': str(num).zfill(2), 'type': 'rp', 'TYPE': 'Restore Point', 'meta': config['rp_paths'] + '/rp' + str(num).zfill(2) + '.meta', 'meta_md5': config['rp_paths'] + '/.rp' + str(num).zfill(2) + '.md5', 'path': config['rp_paths'] + '/rp' + str(num).zfill(2) } if nc is False: if paf.yn_frame('Are You Sure You Want to Remove This Restore Point?' ) is False or None: return utils.remove_id(config, rm_info) paf.prSuccess('Restore Point Removed!') paf.write_to_log(fname, 'Removed Restore Point ' + num, config['log'])
def main(config, parms, pkg_results): ''' This is the main restore logic for pacback. It should NOT be called directly but instead called through a higher level 'API' like call. This logic does the actual work of downgrading, removing, and installing packages. ''' fname = 'restore.main(' + parms['type'] + parms['id'] + ')' # Branch if Packages Have Been Changed or Removed if pkg_results['search']: cache = utils.scan_caches(config) found_pkgs = utils.search_cache(pkg_results['search'], cache, config) # This is Very Bad if len(found_pkgs) > len(pkg_results['search']): paf.prError( 'Error: Somehow More Packages Were Found Than Were Searched For!' ) paf.write_to_log( fname, 'Error: Somehow More Packages Were Found Than Were Searched For!', config['log']) print('Starting Error Resolving Process...') error_handler_results = error.too_many_pkgs_found( config, parms, found_pkgs, pkg_results) if error_handler_results[0] is True: paf.prSuccess( 'Pacback Was Able To Automaticly Resolve This Error!') found_pkgs = error_handler_results[1] else: paf.prError( 'Pacback Was NOT Able To Automaticly Resolve This Error!') error.create_error_report() # Branch if Packages are Missing elif len(found_pkgs) < len(pkg_results['search']): missing_pkg = set(pkg_results['search'] - utils.trim_pkg_list(found_pkgs)) paf.write_to_log( fname, str(len(found_pkgs)) + ' Out of ' + str(len(pkg_results['search'])) + ' Packages Found', config['log']) paf.prWarning('Couldn\'t Find The Following Package Versions:') for pkg in missing_pkg: paf.prError(pkg) if paf.yn_frame('Do You Want To Continue Anyway?') is False: session.abort_fail( fname, 'User Aborted Rollback Because of Missing Packages', 'Aborting Rollback!', config) # This is the Best Case else: paf.prSuccess('All Packages Found In Your Local File System!') paf.write_to_log(fname, 'Found All Changed and Removed Packages', config['log']) print(str(len(found_pkgs))) paf.pacman(' '.join(found_pkgs), '-U') paf.write_to_log(fname, 'Sent Pacman Selected Packages', config['log']) else: paf.prSuccess('No Packages Have Been Changed or Removed!') paf.write_to_log(fname, 'No Packages Have Been Changed or Removed', config['log']) # Branch if Packages Have Been Added if pkg_results['a_pkgs']: print('') paf.write_to_log( fname, str(len(pkg_results['a_pkgs'])) + ' Have Been Added Since Creation', config['log']) paf.prWarning( str(len(pkg_results['a_pkgs'])) + ' Packages Have Been Added Since Creation') for pkg in pkg_results['a_pkgs']: paf.prAdded(pkg) print('') if paf.yn_frame( 'Do You Want to Remove These Packages From Your System?' ) is True: print('') paf.pacman(' '.join(pkg_results['a_pkgs']), '-R') paf.write_to_log(fname, 'Sent Added Packages To `pacman -R`', config['log']) else: paf.prSuccess('No Packages Have Been Added!') paf.write_to_log(fname, 'No Packages Have Been Added', config['log'])
def restore(config, info, dir_list, checksum): ''' This is the main 'api' entrance point for file restoration. This function orchestrates the process handing of work to other funcs. ''' fname = 'custom_dirs.restore()' unpack_path = info['tar'][:-4] p_len = len(unpack_path) paf.write_to_log(fname, 'PLACE HOLDER', config['log']) # Decompress Tar if os.path.exists(info['tar.gz']): paf.prWarning('Decompressing Custom Tar....') if any(re.findall('pigz', line.lower()) for line in utils.pacman_Q()): os.system('/usr/bin/pigz -d ' + info['tar.gz'] + ' -f') paf.write_to_log(fname, 'Decompressed Tar With Pigz', config['log']) else: paf.gz_d(info['tar.gz']) paf.write_to_log(fname, 'Decompressed Tar With Python', config['log']) # Check Tar Csum And Unpack if os.path.exists(info['tar']): # Checksum Tar print('Checking Integrity of Tar...') tar_csum = paf.checksum_file(info['tar'])[1] paf.write_to_log(fname, 'Checksummed Tar', config['log']) if tar_csum == checksum: paf.write_to_log(fname, 'Tar Passed Checksum Integrity Check', config['log']) paf.prSuccess('Tar Passed Integrity Check') else: paf.write_to_log(fname, 'Custom Tar Failed Integrity Check!', config['log']) paf.prError('Custom Tar Failed Integrity Check!') paf.prBold('Skipping Custom File Restoration!') return # Clean Then Unpack Tar paf.prWarning('Unpacking Files from Tar....') paf.rm_dir(unpack_path, sudo=True) paf.untar_dir(info['tar']) paf.write_to_log(fname, 'Unpacked Custom Files From Tar', config['log']) else: # Skip If Tar is Missing paf.write_to_log( fname, 'Meta Data File Spesifies A Tar That is Now Missing!', config['log']) paf.prError('This Restore Point is Missing It\'s Custom Tar!') return if paf.yn_frame( 'Do You Want to Compare Restore Point Files Against Your Current File System?' ) is True: results = compare_files(config, dir_list, unpack_path, p_len) # Exit If No Changes Made to Files if len(results['added']) + len(results['removed']) + len( results['changed']) == 0: paf.write_to_log( fname, 'Checksum Returned 0 Changed, Removed or Added Files', config['log']) paf.prSuccess('No Changes Have Been Made to Your File System!') else: smart_overwrite(config, results, unpack_path, p_len) else: force_overwrite(config, unpack_path, p_len) # Cleanup After Runtime repack(config, info, unpack_path)
def smart_overwrite(config, csum_results, unpack_path, p_len): ''' Main File Restoration Logic ''' fname = 'custom_dirs.smart_overwrite()' if csum_results['changed']: paf.write_to_log( fname, 'Found ' + str(len(csum_results['changed'])) + ' Changed Files', config['log']) print('') print('#################################') paf.prWarning('The Following Files Have Changed:') print('#################################') print('') for f in list(csum_results['changed']): paf.prChanged(f[0]) print('') if paf.yn_frame('Do You Want to Restore ' + str(len(csum_results['changed'])) + ' Files That Have Been CHANGED?') is True: for f in track(csum_results['changed'], description='Restoring Changed Files'): shutil.move(unpack_path + f[0], f[0]) paf.write_to_log(fname, 'Restored Changed Files', config['log']) else: paf.write_to_log(fname, 'User Declined Restoring Changed Files', config['log']) if csum_results['removed']: paf.write_to_log( fname, 'Found ' + str(len(csum_results['removed'])) + ' Removed Files', config['log']) print('') print('######################################') paf.prWarning('The Following Files Have Been Removed:') print('######################################') print('') for f in list(csum_results['removed']): paf.prRemoved(f[p_len:]) print('') if paf.yn_frame('Do You Want to Restore ' + str(len(csum_results['removed'])) + ' Files That Have Been REMOVED?') is True: make_missing_dirs(config, unpack_path, p_len) for f in track(csum_results['removed'], description='Restoring Removed Files'): os.shutil(f, f[p_len:]) paf.write_to_log(fname, 'Restored Removed Files', config['log']) else: paf.write_to_log(fname, 'User Declined Restoring Removed Files', config['log']) if csum_results['added']: paf.write_to_log( fname, 'Found ' + str(len(csum_results['added'])) + ' New Files', config['log']) print('') print('####################################') paf.prWarning('The Following Files Have Been Added:') print('####################################') print('') for f in list(csum_results['added']): paf.prAdded(f) print('') if paf.yn_frame('Do You Want to Remove ' + str(len(csum_results['added'])) + ' Files That Have Been ADDED?') is True: for f in track(csum_results['added'], description='Removing New Files'): os.remove(f) paf.write_to_log(fname, 'Removed New Files', config['log']) else: paf.write_to_log(fname, 'User Declined Removing New Files', config['log']) paf.prSuccess('Done Restoring Files!') paf.write_to_log(fname, 'Done Restoring Files', config['log'])
def main(config, parms, pkg_results): ''' This is the main restore logic for pacback. It should NOT be called directly as restore.main(). This logic does the actual work of downgrading, removing, and installing packages. ''' fname = 'restore.main(' + parms['type'] + parms['id'] + ')' # Branch if Packages Have Been Changed or Removed if pkg_results['search']: cache = utils.scan_caches(config) found_pkgs = utils.search_cache(pkg_results['search'], cache, config) # Branch if Packages are Missing if len(found_pkgs) != len(pkg_results['search']): missing_pkg = set(pkg_results['search'] - utils.trim_pkg_list(found_pkgs)) paf.write_to_log( fname, str(len(found_pkgs)) + ' Out of ' + str(len(pkg_results['search'])) + ' Packages Found', config['log']) paf.prWarning('Couldn\'t Find The Following Package Versions:') for pkg in missing_pkg: paf.prError(pkg) if paf.yn_frame('Do You Want To Continue Anyway?') is False: session.abort_fail( fname, 'User Aborted Rollback Because of Missing Packages', 'Aborting Rollback!', config) else: paf.prSuccess('All Packages Found In Your Local File System!') paf.write_to_log(fname, 'Found All Changed and Removed Packages', config['log']) paf.pacman(' '.join(found_pkgs), '-U') paf.write_to_log(fname, 'Sent Pacman Selected Packages', config['log']) else: paf.prSuccess('No Packages Have Been Changed or Removed!') paf.write_to_log(fname, 'No Packages Have Been Changed or Removed', config['log']) # Branch if Packages Have Been Added if pkg_results['a_pkgs']: print('') paf.write_to_log( fname, str(len(pkg_results['a_pkgs'])) + ' Have Been Added Since Creation', config['log']) paf.prWarning( str(len(pkg_results['a_pkgs'])) + ' Packages Have Been Added Since Creation') for pkg in pkg_results['a_pkgs']: paf.prAdded(pkg) print('') if paf.yn_frame( 'Do You Want to Remove These Packages From Your System?' ) is True: print('') paf.pacman(' '.join(pkg_results['a_pkgs']), '-R') paf.write_to_log(fname, 'Sent Added Packages To `pacman -R`', config['log']) else: paf.prSuccess('No Packages Have Been Added!') paf.write_to_log(fname, 'No Packages Have Been Added', config['log'])
def clean_cache(config, nc): ''' This provides automated cache cleaning using pacman, paccache, and pacback. ''' fname = 'utils.clean_cache()' paf.prBold('Starting Advanced Cache Cleaning...') paf.write_to_log(fname, 'Starting Advanced Cache Cleaning...', config['log']) print('') if nc is True or paf.yn_frame( 'Do You Want To Uninstall Orphaned Packages?') is True: os.system('/usr/bin/pacman -R $(/usr/bin/pacman -Qtdq)') paf.write_to_log(fname, 'Removed Orphaned Packages', config['log']) if nc is True or paf.yn_frame( 'Do You Want To Remove Old Versions of Installed Packages?' ) is True: os.system('/usr/bin/paccache -rk ' + str(config['keep_versions'])) paf.write_to_log(fname, 'Removed Old Package Versions', config['log']) if nc is True or paf.yn_frame( 'Do You Want To Remove Cached Orphans?') is True: os.system('/usr/bin/paccache -ruk0') paf.write_to_log(fname, 'Removed Cached Orphans', config['log']) if nc is True or paf.yn_frame( 'Do You Want To Check For Old Pacback Restore Points?') is True: paf.write_to_log(fname, 'Starting Search For Old Restore Points...', config['log']) meta_paths = sorted(f for f in paf.find_files(config['rp_paths']) if f.endswith(".meta")) today = dt.datetime.now().strftime("%Y/%m/%d") t_split = (today.split('/')) today_dt = dt.date(int(t_split[0]), int(t_split[1]), int(t_split[2])) for m in meta_paths: rp_info = { 'id': m[-7] + m[-6], 'type': 'rp', 'TYPE': 'Restore Point', 'meta': m, 'meta_md5': config['rp_paths'] + '/.rp' + m[-7] + m[-6] + '.md5', 'path': config['rp_paths'] + '/rp' + m[-7] + m[-6], 'pkgcache': config['rp_paths'] + '/rp' + m[-7] + m[-6] + '/pkg-cache' } # Format Dates for Compare m_dict = meta.read(config, m) o_split = (m_dict['date'].split('/')) old_dt = dt.date(int(o_split[0]), int(o_split[1]), int(o_split[2])) # Check How Old Restore Point Is days = (today_dt - old_dt).days if days > config['old_rp']: paf.prWarning('Failed: ' + rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' + str(days) + ' Days Old!') paf.write_to_log( fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' + str(days) + ' Days Old!', config['log']) if paf.yn_frame('Do You Want to Remove This ' + rp_info['TYPE'] + '?') is True: utils.remove_id(config, rp_info) paf.prSuccess('Restore Point Removed!') else: paf.write_to_log( fname, 'User Declined Removal of ' + rp_info['TYPE'] + ' ' + rp_info['id'], config['log']) else: paf.prSuccess('Passed: ' + rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' + str(days) + ' Days Old') paf.write_to_log( fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' + str(days) + ' Days Old', config['log']) paf.write_to_log(fname, 'Finished Advanced Cache Cleaning', config['log'])