def force_overwrite(config, unpack_path, p_len): ''' Restore Files Without Checksum ''' fname = 'custom_dirs.force_overwrite()' # Allow Exit Since This Is Bad Idea paf.prWarning( 'OVERWRITING FILES WITHOUT CHECKSUMS CAN BE EXTREMELY DANGEROUS!') if paf.yn_frame( 'Do You Still Want to Continue and Restore ALL The Files You Stored?' ) is False: return # Overwrite Files paf.write_to_log(fname, 'Starting Force Overwrite Process...', config['log']) print( 'Starting Full File Restore! Please Be Patient As All Files are Overwritten...' ) fs_stored = paf.find_files(unpack_path) try: fs_stored.remove(unpack_path + '/folder_permissions.pickle') except Exception: pass make_missing_dirs(config, unpack_path, p_len) for f in track(fs_stored, description='Overwriting Files'): shutil.move(f, f[p_len:]) paf.prSuccess('Done Overwriting Files!') paf.write_to_log(fname, 'Finished Force Overwrite Of Files', config['log'])
def find_pkgs_in_dir(path): ''' Scans a target directory for files ending in the `.pkg.tar.zst` and `.pkg.tar.xz` extensions. ''' cache = { f for f in paf.find_files(path) if f.endswith(".pkg.tar.xz") or f.endswith(".pkg.tar.zst") } return cache
def fresh_install(lang, uc, config): if uc[lang + '_path'] == '/path/here': paf.prWarning('Your Config File Has Not Been Setup for the ' + lang.upper() + ' Stream!') sys.exit('Edit the File ' + config['user_config'] + ' and Re-Run Your Command!') if not os.path.exists(uc[lang + '_path']): os.makedirs(uc[lang + '_path']) paf.prWarning('Scanning File System...') files = paf.basenames(paf.find_files(uc[lang + '_path'])) files = {"http://data.gdeltproject.org/gdeltv2/" + f for f in files} paf.export_iterable(config['base'] + '/prev-' + lang + '.txt', files) paf.export_iterable(config['base'] + '/404-' + lang + '.txt', [])
def store(config, info): ''' Packs up user defined directories. ''' fname = 'custom_dirs.pack()' paf.write_to_log( fname, str(len(info['dir_list'])) + ' Folders Selected For Storage', config['log']) tmpfile = tempfile.gettempdir() + '/folder_permissions.pickle' # Fetch Folder Permissions and Pickle folder_perms = set() for d in info['dir_list']: folder_perms.update(paf.get_permissions(d, 'folders')) pickle.dump(folder_perms, (open(tmpfile, 'wb'))) # Scan For Files files = paf.find_files(info['dir_list']) # Pack Custom Files Into Tar with tarfile.open(info['tar'], 'w') as tar: tar.add(tmpfile, arcname='folder_permissions.pickle') for f in track(files, description='Adding Files to Tar'): tar.add(f) paf.rm_file(tmpfile, sudo=False) paf.write_to_log(fname, 'Created ' + info['tar'], config['log']) # Create Checksum for Tar print('Creating Checksum...') pack_csum = paf.checksum_file(info['tar'])[1] paf.write_to_log(fname, 'Checksummed Tar ', config['log']) # Compresses Custom Tar print('Compressing Custom Tar...') if any(re.findall('pigz', l.lower()) for l in utils.pacman_Q()): os.system('/usr/bin/pigz ' + info['tar'] + ' -f') else: paf.gz_c(info['tar'], rm=True) paf.write_to_log(fname, 'Compressed ' + info['tar'], config['log']) pack_results = { 'file_count': len(files), 'raw_size': paf.convert_size(paf.size_of_files(files)), 'compressed_size': paf.convert_size(os.path.getsize(info['tar'] + '.gz')), 'csum': pack_csum } return pack_results
def compare_files(config, dir_list, unpack_path, p_len): ''' Compares and unpacked custom user files against the current system. Returns a dict of added, removed and changed files on the system. ''' fname = 'custom_dirs.compare_files()' # Core Compare Results diff_added = set() diff_removed = set() diff_large = set() diff_noread = set() diff_changed = set() # Compare Checksums For Files That Exist paf.write_to_log(fname, 'Started Sorting and Comparing Files...', config['log']) # Search Directories unpack_files = paf.find_files(unpack_path) current_files = paf.find_files(dir_list) # Find Added Files and Remove From Csum Queue diff_added.update(current_files - {f[p_len:] for f in unpack_files}) current_files.difference_update(diff_added) # Find Removed Files and Trim From Csum Queue diff_removed.update(unpack_files - {unpack_path + f for f in current_files}) unpack_files.difference_update(diff_removed) try: diff_removed.remove(unpack_path + '/folder_permissions.pickle') except KeyError: paf.write_to_log(fname, 'Error: Couldn\'t Find Permission Pickle.', config['log']) # Only Checksum Files That Exist in Both Current AND Unpack paf.write_to_log(fname, 'Started Checksumming Custom Files...', config['log']) unpack_csum = paf.checksum_files(unpack_files, output='Checksumming Stored Files') current_csum = paf.checksum_files(current_files, output='Checksumming Current Files') paf.write_to_log(fname, 'Finished Checksumming Custom Files', config['log']) # Find Exceptions and Trim for csum in unpack_csum: if csum[1] == 'TOO LARGE!': diff_large.add(csum) unpack_csum.remove(csum) paf.write_to_log(fname, csum[0] + ' Was Too Large To Checksum!', config['log']) elif csum[1] == 'UNREADABLE!': diff_noread.add(csum) unpack_csum.remove(csum) paf.write_to_log(fname, csum[0] + ' Was Unreadable!', config['log']) for csum in current_csum: if csum[1] == 'TOO LARGE!': diff_large.add(csum) current_csum.remove(csum) paf.write_to_log(fname, csum[0] + ' Was Too Large To Checksum!', config['log']) elif csum[1] == 'UNREADABLE!': diff_noread.add(csum) current_csum.remove(csum) paf.write_to_log(fname, csum[0] + ' Was Unreadable!', config['log']) # Find Changed Files diff_changed.update(current_csum - {(tpl[0][p_len:], tpl[1]) for tpl in unpack_csum}) paf.write_to_log(fname, 'Finished Comparing and Sorting Files', config['log']) compare_results = { 'added': diff_added, 'removed': diff_removed, 'changed': diff_changed, 'large': diff_large, 'noread': diff_noread } return compare_results
def clean_cache(config, nc): ''' This provides automated cache cleaning using pacman, paccache, and pacback. ''' fname = 'utils.clean_cache()' paf.prBold('Starting Advanced Cache Cleaning...') paf.write_to_log(fname, 'Starting Advanced Cache Cleaning...', config['log']) print('') if nc is True or paf.yn_frame( 'Do You Want To Uninstall Orphaned Packages?') is True: os.system('/usr/bin/pacman -R $(/usr/bin/pacman -Qtdq)') paf.write_to_log(fname, 'Removed Orphaned Packages', config['log']) if nc is True or paf.yn_frame( 'Do You Want To Remove Old Versions of Installed Packages?' ) is True: os.system('/usr/bin/paccache -rk ' + str(config['keep_versions'])) paf.write_to_log(fname, 'Removed Old Package Versions', config['log']) if nc is True or paf.yn_frame( 'Do You Want To Remove Cached Orphans?') is True: os.system('/usr/bin/paccache -ruk0') paf.write_to_log(fname, 'Removed Cached Orphans', config['log']) if nc is True or paf.yn_frame( 'Do You Want To Check For Old Pacback Restore Points?') is True: paf.write_to_log(fname, 'Starting Search For Old Restore Points...', config['log']) meta_paths = sorted(f for f in paf.find_files(config['rp_paths']) if f.endswith(".meta")) today = dt.datetime.now().strftime("%Y/%m/%d") t_split = (today.split('/')) today_dt = dt.date(int(t_split[0]), int(t_split[1]), int(t_split[2])) for m in meta_paths: rp_info = { 'id': m[-7] + m[-6], 'type': 'rp', 'TYPE': 'Restore Point', 'meta': m, 'meta_md5': config['rp_paths'] + '/.rp' + m[-7] + m[-6] + '.md5', 'path': config['rp_paths'] + '/rp' + m[-7] + m[-6], 'pkgcache': config['rp_paths'] + '/rp' + m[-7] + m[-6] + '/pkg-cache' } # Format Dates for Compare m_dict = meta.read(config, m) o_split = (m_dict['date'].split('/')) old_dt = dt.date(int(o_split[0]), int(o_split[1]), int(o_split[2])) # Check How Old Restore Point Is days = (today_dt - old_dt).days if days > config['old_rp']: paf.prWarning('Failed: ' + rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' + str(days) + ' Days Old!') paf.write_to_log( fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' + str(days) + ' Days Old!', config['log']) if paf.yn_frame('Do You Want to Remove This ' + rp_info['TYPE'] + '?') is True: utils.remove_id(config, rp_info) paf.prSuccess('Restore Point Removed!') else: paf.write_to_log( fname, 'User Declined Removal of ' + rp_info['TYPE'] + ' ' + rp_info['id'], config['log']) else: paf.prSuccess('Passed: ' + rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' + str(days) + ' Days Old') paf.write_to_log( fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' + str(days) + ' Days Old', config['log']) paf.write_to_log(fname, 'Finished Advanced Cache Cleaning', config['log'])