Exemplo n.º 1
0
def validate(config, info):
    '''
    Checks if a meta file has become corrupted or is missing.
    '''
    fname = 'meta.validate(' + info['type'] + info['id'] + ')'

    if os.path.exists(info['meta']) and os.path.exists(info['meta_md5']):
        paf.write_to_log(fname, 'Meta File and Meta Checksum Are Present',
                         config['log'])
        csum = str(open(info['meta_md5']).read()).strip()
        msum = str(paf.checksum_file(info['meta'])[1]).strip()

        if csum == msum:
            paf.write_to_log(fname, 'Meta Passed Checksum', config['log'])
            return

        elif csum != msum:
            paf.write_to_log(fname, 'Meta Checksum FAILED!', config['log'])
            paf.prError(info['TYPE'] + ' ' + info['id'] +
                        ' Has Failed its Checksum Check!')
            paf.prError('This ' + info['TYPE'] + ' Has Likely Become Corrupt!')

            if paf.yn_frame('Do You Want to Remove This ' + info['TYPE'] +
                            ' Now?') is True:
                utils.remove_id(config, info)
                session.abort(fname, 'User Deleted Corrupted ' + info['TYPE'],
                              info['TYPE'] + ' Was Removed. Exiting Now!',
                              config)
            else:
                session.abort(
                    fname,
                    'User Choose NOT to Remove Corrupted ' + info['TYPE'],
                    'Okay, Leaving the ' + info['TYPE'] +
                    ' Alone. Exiting Now!', config)

    elif os.path.exists(info['meta']) and not os.path.exists(info['meta_md5']):
        paf.write_to_log(fname, 'Meta File is Missing its Checksum File!',
                         config['log'])
        paf.prError(info['TYPE'] + ' ' + info['id'] +
                    ' is Missing a Checksum!')

        if paf.yn_frame('Do You Still Want To Continue?') is False:
            session.abort(fname, 'User Exited Due to Missing Checksum File',
                          'Okay, Aborting Due to Missing Checksum', config)
        else:
            paf.write_to_log(
                fname,
                'User Choose To Continue Even Though The Checksum is Missing',
                config['log'])
            return
Exemplo n.º 2
0
def force_overwrite(config, unpack_path, p_len):
    '''
    Restore Files Without Checksum
    '''
    fname = 'custom_dirs.force_overwrite()'

    # Allow Exit Since This Is Bad Idea
    paf.prWarning(
        'OVERWRITING FILES WITHOUT CHECKSUMS CAN BE EXTREMELY DANGEROUS!')
    if paf.yn_frame(
            'Do You Still Want to Continue and Restore ALL The Files You Stored?'
    ) is False:
        return

    # Overwrite Files
    paf.write_to_log(fname, 'Starting Force Overwrite Process...',
                     config['log'])
    print(
        'Starting Full File Restore! Please Be Patient As All Files are Overwritten...'
    )
    fs_stored = paf.find_files(unpack_path)
    try:
        fs_stored.remove(unpack_path + '/folder_permissions.pickle')
    except Exception:
        pass
    make_missing_dirs(config, unpack_path, p_len)
    for f in track(fs_stored, description='Overwriting Files'):
        shutil.move(f, f[p_len:])

    paf.prSuccess('Done Overwriting Files!')
    paf.write_to_log(fname, 'Finished Force Overwrite Of Files', config['log'])
Exemplo n.º 3
0
def archive_date(config, date):
    '''
    This function simply automates the date rollback instructions found on the Arch Wiki.
    https://wiki.archlinux.org/index.php/Arch_Linux_Archive#How_to_restore_all_packages_to_a_specific_date
    '''
    # Startup
    fname = 'restore.archive_date(' + str(date) + ')'
    mirror = '/etc/pacman.d/mirrorlist'

    # Done as a Fail Safe
    if len(paf.read_file(mirror)) > 2:
        os.system('mv ' + mirror + ' ' + mirror + '.pacback')
        paf.write_to_log(fname, 'Backed Up Existing Mirrorlist', config['log'])
    else:
        paf.write_to_log(
            fname, 'Skipped Mirrorlist Backup. File Seems Miss-Formated!',
            config['log'])

    paf.export_iterable(mirror, [
        '## Set By Pacback', 'Server=https://archive.archlinux.org/repos/' +
        date + '/$repo/os/$arch'
    ])
    paf.write_to_log(fname, 'Added ' + date + ' Archive URL To Mirrorlist',
                     config['log'])

    # Run Pacman Update to Run Downgrade
    os.system('/usr/bin/pacman -Syyuu')
    paf.write_to_log(fname, 'Sent -Syyuu to Pacman', config['log'])

    # Restore the Non-Archive URL Mirrorlist
    if os.path.exists(mirror + '.pacback') is False:
        paf.write_to_log(fname, 'Backup Mirrorlist Is Missing', config['log'])
        if paf.yn_frame(
                'Missing Mirrorlist! Do You Want to Fetch a New HTTPS Mirrorlist?'
        ) is True:
            if utils.fetch_new_mirrorlist() is True:
                paf.write_to_log(
                    fname, 'A New Mirrorlist Was Successfully Downloaded',
                    config['log'])
            else:
                session.abort_fail(fname, 'User Declined Country Selection!',
                                   'Please Manually Replace Your Mirrorlist!',
                                   config['log'])
        else:
            session.abort_fail(
                fname,
                'Backup Mirrorlist Is Missing and User Declined Download!',
                'Please Manually Replace Your Mirrorlist!', config['log'])
    else:
        os.system('mv ' + mirror + '.pacback ' + mirror)
        paf.write_to_log(fname, 'Backup Mirrorlist Was Restored Successfully',
                         config['log'])
        print('Refreshing Pacman Database...')
        os.system('/usr/bin/pacman -Sy > /dev/null')
        paf.write_to_log(fname,
                         'Updated Pacman Database After Restoring Mirrorlist',
                         config['log'])
Exemplo n.º 4
0
def restore_point(config, num, full_rp, dir_list, no_confirm, label):
    '''
    Assembles all the info for main() and stages the file system
    for the creation of a restore point. It is assumed that user input
    has been cleansed by this point.
    '''
    num = str(num).zfill(2)
    fname = 'create.restore_point(' + num + ')'
    paf.write_to_log(fname, 'Started Restore Point Creation...', config['log'])

    info = {
        'id': num,
        'type': 'rp',
        'TYPE': 'Restore Point',
        'stype': 'f' if full_rp is True else 'l',
        'STYPE': 'Full' if full_rp is True else 'Light',
        'nc': no_confirm,
        'label': str(label),
        'meta': config['rp_paths'] + '/rp' + num + '.meta',
        'meta_md5': config['rp_paths'] + '/.rp' + num + '.md5',
        'dir_list': dir_list,
        'path': config['rp_paths'] + '/rp' + num,
        'pkgcache': config['rp_paths'] + '/rp' + num + '/pkg-cache',
        'tar': config['rp_paths'] + '/rp' + num + '/rp' + num + '_dirs.tar'
    }

    # Check for Pre-Existing Restore Point
    if os.path.exists(info['meta']) or os.path.exists(info['path']):
        paf.prWarning('Restore Point #' + info['id'] + ' Already Exists!')

        if info['nc'] is False:
            if paf.yn_frame('Do You Want to Overwrite It?') is False or None:
                session.abort(fname,
                              'User Aborted Overwrite of RP #' + info['id'],
                              'Aborting Creation!', config)
        utils.remove_id(config, info)

    # Create Restore Point After Checks
    paf.write_to_log(fname, 'All Checks Passed! Handing Off to create.main()',
                     config['log'])
    paf.prBold('Building ' + info['STYPE'] + ' ' + info['TYPE'] + ' ' +
               info['id'] + '...')
    main(config, info)

    # Finish After Successful Creation
    paf.write_to_log(fname, 'Restore Point Creation Complete!', config['log'])
    paf.prBold('Restore Point Creation Complete!')
Exemplo n.º 5
0
def gdelt_diff(lang, uc, config):
    dlp_path = config['base'] + '/prev-' + lang + '.txt'
    fzf_path = config['base'] + '/404-' + lang + '.txt'

    # Download and Filter URLs
    url = config[lang]
    print_stream_status(lang, url)
    paf.prBold('Downloading ' + lang.upper() + ' Stream Inventory File...')
    dln = requests.get(url)
    dlc = {''.join(x.split(' ')[2:]) for x in dln.text.split('\n')[:-1]}

    # Filter URL Based On Start Date
    if uc['start_date'] != 'all':
        d = uc['start_date'].split('/')
        days = {dt.replace('-', '') for dt in paf.date_to_today(int(d[0]), int(d[1]), int(d[2]))}
        filtered = set()
        for x in dlc:
            if paf.basename(x)[:8] in days:
                filtered.add(x)
        dlc = filtered

    # Run Install If Fresh Run
    if not os.path.exists(dlp_path):
        fresh_install(lang, uc, config)

    # Compare Previous Run
    dlp = paf.read_file(dlp_path)
    diff = set(dlc).difference(dlp)

    # Download Files Into Place
    if len(diff) > 10000:
        if paf.yn_frame(str(len(diff)) + ' Files Are Missing! Do You Still Want to Continue?') is True:
            print('This May Take a While! Starting Download...')
        else:
            sys.exit()
    if len(diff) > 0:
        fzf = fetch(diff, uc[lang + '_path'])
        paf.export_iterable(dlp_path, dlc)
        for x in paf.read_file(fzf_path):
            fzf.add(x)
        paf.export_iterable(fzf_path, fzf)
    else:
        paf.prSuccess('All Files Are Up To Date!')
Exemplo n.º 6
0
def reboot_check(config):
    '''
    Checks running and installed kernel versions to determine if
    a reboot is needed.
    '''
    fname = 'utils.reboot_check()'

    cmd = "file -bL /boot/vmlinuz* | grep -o 'version [^ ]*' | cut -d ' ' -f 2 && uname -r"
    raw = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
    out = str(raw.communicate())[3:]
    out = out.split('\n')
    out = out[0].split('\\n')[:-1]

    if out[0].strip() != out[1].strip():
        paf.write_to_log(
            fname, 'The Installed Kernel Has Changed From ' + out[1].strip() +
            ' To ' + out[0].strip(), config['log'])
        paf.prWarning('Your Installed Kernel Has Changed From ' +
                      out[1].strip() + ' To ' + out[0].strip() +
                      ' and a Reboot Is Needed!')

        if config['reboot'] is True:
            if paf.yn_frame('Do You Want To Schedule A Reboot In ' +
                            str(config['reboot_offset']) +
                            ' Minutes?') is True:
                os.system("shutdown -r $(date --date='" +
                          str(config['reboot_offset']) + " minute' +%H:%M)")
                paf.write_to_log(
                    fname, 'User Scheduled A Reboot In ' +
                    str(config['reboot_offset']) + ' Minutes', config['log'])
            else:
                paf.write_to_log(fname, 'User Declined System Reboot',
                                 config['log'])
        else:
            paf.write_to_log(
                fname,
                'A Reboot Is Needed For The Whole Downgrade To Take Affect!',
                config['log'])

    else:
        paf.write_to_log(
            fname, 'The Kernel Hasn\'t Been Changed, A Reboot is Unnecessary',
            config['log'])
Exemplo n.º 7
0
def remove_rp(config, num, nc):
    fname = 'user.remove_rp(' + str(num) + ')'

    rm_info = {
        'id': str(num).zfill(2),
        'type': 'rp',
        'TYPE': 'Restore Point',
        'meta': config['rp_paths'] + '/rp' + str(num).zfill(2) + '.meta',
        'meta_md5': config['rp_paths'] + '/.rp' + str(num).zfill(2) + '.md5',
        'path': config['rp_paths'] + '/rp' + str(num).zfill(2)
    }

    if nc is False:
        if paf.yn_frame('Are You Sure You Want to Remove This Restore Point?'
                        ) is False or None:
            return

    utils.remove_id(config, rm_info)
    paf.prSuccess('Restore Point Removed!')
    paf.write_to_log(fname, 'Removed Restore Point ' + num, config['log'])
Exemplo n.º 8
0
def main(config, parms, pkg_results):
    '''
    This is the main restore logic for pacback. It should NOT be called directly but
    instead called through a higher level 'API' like call.
    This logic does the actual work of downgrading, removing, and installing packages.
    '''
    fname = 'restore.main(' + parms['type'] + parms['id'] + ')'

    # Branch if Packages Have Been Changed or Removed
    if pkg_results['search']:
        cache = utils.scan_caches(config)
        found_pkgs = utils.search_cache(pkg_results['search'], cache, config)

        # This is Very Bad
        if len(found_pkgs) > len(pkg_results['search']):
            paf.prError(
                'Error: Somehow More Packages Were Found Than Were Searched For!'
            )
            paf.write_to_log(
                fname,
                'Error: Somehow More Packages Were Found Than Were Searched For!',
                config['log'])
            print('Starting Error Resolving Process...')
            error_handler_results = error.too_many_pkgs_found(
                config, parms, found_pkgs, pkg_results)

            if error_handler_results[0] is True:
                paf.prSuccess(
                    'Pacback Was Able To Automaticly Resolve This Error!')
                found_pkgs = error_handler_results[1]
            else:
                paf.prError(
                    'Pacback Was NOT Able To Automaticly Resolve This Error!')
                error.create_error_report()

        # Branch if Packages are Missing
        elif len(found_pkgs) < len(pkg_results['search']):
            missing_pkg = set(pkg_results['search'] -
                              utils.trim_pkg_list(found_pkgs))
            paf.write_to_log(
                fname,
                str(len(found_pkgs)) + ' Out of ' +
                str(len(pkg_results['search'])) + ' Packages Found',
                config['log'])

            paf.prWarning('Couldn\'t Find The Following Package Versions:')
            for pkg in missing_pkg:
                paf.prError(pkg)
            if paf.yn_frame('Do You Want To Continue Anyway?') is False:
                session.abort_fail(
                    fname, 'User Aborted Rollback Because of Missing Packages',
                    'Aborting Rollback!', config)

        # This is the Best Case
        else:
            paf.prSuccess('All Packages Found In Your Local File System!')
            paf.write_to_log(fname, 'Found All Changed and Removed Packages',
                             config['log'])

        print(str(len(found_pkgs)))
        paf.pacman(' '.join(found_pkgs), '-U')
        paf.write_to_log(fname, 'Sent Pacman Selected Packages', config['log'])

    else:
        paf.prSuccess('No Packages Have Been Changed or Removed!')
        paf.write_to_log(fname, 'No Packages Have Been Changed or Removed',
                         config['log'])

    # Branch if Packages Have Been Added
    if pkg_results['a_pkgs']:
        print('')
        paf.write_to_log(
            fname,
            str(len(pkg_results['a_pkgs'])) +
            ' Have Been Added Since Creation', config['log'])

        paf.prWarning(
            str(len(pkg_results['a_pkgs'])) +
            ' Packages Have Been Added Since Creation')
        for pkg in pkg_results['a_pkgs']:
            paf.prAdded(pkg)
        print('')
        if paf.yn_frame(
                'Do You Want to Remove These Packages From Your System?'
        ) is True:
            print('')
            paf.pacman(' '.join(pkg_results['a_pkgs']), '-R')
            paf.write_to_log(fname, 'Sent Added Packages To `pacman -R`',
                             config['log'])

    else:
        paf.prSuccess('No Packages Have Been Added!')
        paf.write_to_log(fname, 'No Packages Have Been Added', config['log'])
Exemplo n.º 9
0
def restore(config, info, dir_list, checksum):
    '''
    This is the main 'api' entrance point for file restoration.
    This function orchestrates the process handing of work to other funcs.
    '''
    fname = 'custom_dirs.restore()'
    unpack_path = info['tar'][:-4]
    p_len = len(unpack_path)
    paf.write_to_log(fname, 'PLACE HOLDER', config['log'])

    # Decompress Tar
    if os.path.exists(info['tar.gz']):
        paf.prWarning('Decompressing Custom Tar....')
        if any(re.findall('pigz', line.lower()) for line in utils.pacman_Q()):
            os.system('/usr/bin/pigz -d ' + info['tar.gz'] + ' -f')
            paf.write_to_log(fname, 'Decompressed Tar With Pigz',
                             config['log'])
        else:
            paf.gz_d(info['tar.gz'])
            paf.write_to_log(fname, 'Decompressed Tar With Python',
                             config['log'])

    # Check Tar Csum And Unpack
    if os.path.exists(info['tar']):
        # Checksum Tar
        print('Checking Integrity of Tar...')
        tar_csum = paf.checksum_file(info['tar'])[1]
        paf.write_to_log(fname, 'Checksummed Tar', config['log'])

        if tar_csum == checksum:
            paf.write_to_log(fname, 'Tar Passed Checksum Integrity Check',
                             config['log'])
            paf.prSuccess('Tar Passed Integrity Check')
        else:
            paf.write_to_log(fname, 'Custom Tar Failed Integrity Check!',
                             config['log'])
            paf.prError('Custom Tar Failed Integrity Check!')
            paf.prBold('Skipping Custom File Restoration!')
            return

        # Clean Then Unpack Tar
        paf.prWarning('Unpacking Files from Tar....')
        paf.rm_dir(unpack_path, sudo=True)
        paf.untar_dir(info['tar'])
        paf.write_to_log(fname, 'Unpacked Custom Files From Tar',
                         config['log'])

    else:
        # Skip If Tar is Missing
        paf.write_to_log(
            fname, 'Meta Data File Spesifies A Tar That is Now Missing!',
            config['log'])
        paf.prError('This Restore Point is Missing It\'s Custom Tar!')
        return

    if paf.yn_frame(
            'Do You Want to Compare Restore Point Files Against Your Current File System?'
    ) is True:
        results = compare_files(config, dir_list, unpack_path, p_len)
        # Exit If No Changes Made to Files
        if len(results['added']) + len(results['removed']) + len(
                results['changed']) == 0:
            paf.write_to_log(
                fname, 'Checksum Returned 0 Changed, Removed or Added Files',
                config['log'])
            paf.prSuccess('No Changes Have Been Made to Your File System!')
        else:
            smart_overwrite(config, results, unpack_path, p_len)

    else:
        force_overwrite(config, unpack_path, p_len)

    # Cleanup After Runtime
    repack(config, info, unpack_path)
Exemplo n.º 10
0
def smart_overwrite(config, csum_results, unpack_path, p_len):
    '''
    Main File Restoration Logic
    '''
    fname = 'custom_dirs.smart_overwrite()'

    if csum_results['changed']:
        paf.write_to_log(
            fname,
            'Found ' + str(len(csum_results['changed'])) + ' Changed Files',
            config['log'])
        print('')
        print('#################################')
        paf.prWarning('The Following Files Have Changed:')
        print('#################################')
        print('')
        for f in list(csum_results['changed']):
            paf.prChanged(f[0])
        print('')

        if paf.yn_frame('Do You Want to Restore ' +
                        str(len(csum_results['changed'])) +
                        ' Files That Have Been CHANGED?') is True:
            for f in track(csum_results['changed'],
                           description='Restoring Changed Files'):
                shutil.move(unpack_path + f[0], f[0])
            paf.write_to_log(fname, 'Restored Changed Files', config['log'])
        else:
            paf.write_to_log(fname, 'User Declined Restoring Changed Files',
                             config['log'])

    if csum_results['removed']:
        paf.write_to_log(
            fname,
            'Found ' + str(len(csum_results['removed'])) + ' Removed Files',
            config['log'])
        print('')
        print('######################################')
        paf.prWarning('The Following Files Have Been Removed:')
        print('######################################')
        print('')
        for f in list(csum_results['removed']):
            paf.prRemoved(f[p_len:])
        print('')

        if paf.yn_frame('Do You Want to Restore ' +
                        str(len(csum_results['removed'])) +
                        ' Files That Have Been REMOVED?') is True:
            make_missing_dirs(config, unpack_path, p_len)
            for f in track(csum_results['removed'],
                           description='Restoring Removed Files'):
                os.shutil(f, f[p_len:])
            paf.write_to_log(fname, 'Restored Removed Files', config['log'])
        else:
            paf.write_to_log(fname, 'User Declined Restoring Removed Files',
                             config['log'])

    if csum_results['added']:
        paf.write_to_log(
            fname, 'Found ' + str(len(csum_results['added'])) + ' New Files',
            config['log'])
        print('')
        print('####################################')
        paf.prWarning('The Following Files Have Been Added:')
        print('####################################')
        print('')
        for f in list(csum_results['added']):
            paf.prAdded(f)
        print('')

        if paf.yn_frame('Do You Want to Remove ' +
                        str(len(csum_results['added'])) +
                        ' Files That Have Been ADDED?') is True:
            for f in track(csum_results['added'],
                           description='Removing New Files'):
                os.remove(f)
            paf.write_to_log(fname, 'Removed New Files', config['log'])
        else:
            paf.write_to_log(fname, 'User Declined Removing New Files',
                             config['log'])

    paf.prSuccess('Done Restoring Files!')
    paf.write_to_log(fname, 'Done Restoring Files', config['log'])
Exemplo n.º 11
0
def main(config, parms, pkg_results):
    '''
    This is the main restore logic for pacback. It should NOT be called directly as restore.main().
    This logic does the actual work of downgrading, removing, and installing packages.
    '''
    fname = 'restore.main(' + parms['type'] + parms['id'] + ')'

    # Branch if Packages Have Been Changed or Removed
    if pkg_results['search']:
        cache = utils.scan_caches(config)
        found_pkgs = utils.search_cache(pkg_results['search'], cache, config)

        # Branch if Packages are Missing
        if len(found_pkgs) != len(pkg_results['search']):
            missing_pkg = set(pkg_results['search'] -
                              utils.trim_pkg_list(found_pkgs))
            paf.write_to_log(
                fname,
                str(len(found_pkgs)) + ' Out of ' +
                str(len(pkg_results['search'])) + ' Packages Found',
                config['log'])

            paf.prWarning('Couldn\'t Find The Following Package Versions:')
            for pkg in missing_pkg:
                paf.prError(pkg)
            if paf.yn_frame('Do You Want To Continue Anyway?') is False:
                session.abort_fail(
                    fname, 'User Aborted Rollback Because of Missing Packages',
                    'Aborting Rollback!', config)

        else:
            paf.prSuccess('All Packages Found In Your Local File System!')
            paf.write_to_log(fname, 'Found All Changed and Removed Packages',
                             config['log'])

        paf.pacman(' '.join(found_pkgs), '-U')
        paf.write_to_log(fname, 'Sent Pacman Selected Packages', config['log'])

    else:
        paf.prSuccess('No Packages Have Been Changed or Removed!')
        paf.write_to_log(fname, 'No Packages Have Been Changed or Removed',
                         config['log'])

    # Branch if Packages Have Been Added
    if pkg_results['a_pkgs']:
        print('')
        paf.write_to_log(
            fname,
            str(len(pkg_results['a_pkgs'])) +
            ' Have Been Added Since Creation', config['log'])

        paf.prWarning(
            str(len(pkg_results['a_pkgs'])) +
            ' Packages Have Been Added Since Creation')
        for pkg in pkg_results['a_pkgs']:
            paf.prAdded(pkg)
        print('')
        if paf.yn_frame(
                'Do You Want to Remove These Packages From Your System?'
        ) is True:
            print('')
            paf.pacman(' '.join(pkg_results['a_pkgs']), '-R')
            paf.write_to_log(fname, 'Sent Added Packages To `pacman -R`',
                             config['log'])

    else:
        paf.prSuccess('No Packages Have Been Added!')
        paf.write_to_log(fname, 'No Packages Have Been Added', config['log'])
Exemplo n.º 12
0
def main(config, info):
    '''
    This is pacbacks main method for orchestrating the creation of a
    fallback point. It shouldn't be called directly with create.main()
    but rather by a 'higher' level call that stages system for the
    actual creation process.
    '''
    fname = 'create.main(' + info['type'] + info['id'] + ')'
    paf.write_to_log(
        fname, 'Building ID:' + info['id'] + ' As ' + info['STYPE'] + ' ' +
        info['TYPE'], config['log'])

    # Light Restore Point
    if info['STYPE'] == 'Light':
        if info['dir_list']:
            session.abort_fail(
                fname,
                'Custom Dirs Are Not Allowed With STYPE: ' + info['STYPE'],
                'Light ' + info['TYPE'] +
                ' DO NOT Support Custom Dirs! Please Use The `-f` Flag',
                config)
    # Full Restore Point
    elif info['STYPE'] == 'Full':
        pkg_search = paf.replace_spaces(utils.pacman_Q(), '-')
        found_pkgs = utils.search_cache(pkg_search, utils.scan_caches(config),
                                        config)
        pkg_size = paf.size_of_files(found_pkgs)

        # Ask About Missing Pkgs
        if len(found_pkgs) != len(pkg_search):
            paf.write_to_log(fname, 'Not All Packages Where Found!',
                             config['log'])
            pkg_split = utils.trim_pkg_list(found_pkgs)
            print('')
            paf.prBold('======================================')
            paf.prWarning('The Following Packages Were NOT Found!')
            paf.prBold('======================================')
            for pkg in set(pkg_search - pkg_split):
                paf.prWarning(pkg)
            print('')

            if info['nc'] is False:
                if paf.yn_frame(
                        'Do You Still Want to Continue?') is False or None:
                    session.abort(fname, 'User Aborted Due to Missing Pkgs',
                                  'Aborting Creation!', config)

        # Make Folders and Hardlink Packages
        paf.mk_dir(info['path'], sudo=False)
        paf.mk_dir(info['pkgcache'], sudo=False)

        for pkg in found_pkgs:
            os.link(pkg, info['pkgcache'] + '/' + paf.basename(pkg))
        paf.write_to_log(fname,
                         'HardLinked ' + str(len(found_pkgs)) + ' Packages',
                         config['log'])

        # Search Custom Dir's
        if info['dir_list']:
            paf.write_to_log(
                fname, 'User Selected Version Dependent Folders For Storage',
                config['log'])
            pack_results = custom_dirs.store(config, info)

    # Generate Meta Data File
    current_pkgs = utils.pacman_Q()
    meta = [
        '======= Pacback Info =======', 'Version: ' + config['version'],
        'Label: ' + info['label'],
        'Date Created: ' + dt.datetime.now().strftime("%Y/%m/%d"),
        'Time Created: ' + dt.datetime.now().strftime("%H:%M:%S"),
        'Type: ' + info['TYPE'], 'SubType: ' + info['STYPE'],
        'Packages Installed: ' + str(len(current_pkgs))
    ]

    if info['STYPE'] == 'Full':
        meta.append('Packages Cached: ' + str(len(found_pkgs)))
        meta.append('Package Cache Size: ' + paf.convert_size(pkg_size))

    if info['dir_list']:
        meta.append('Dir File Count: ' + str(pack_results['file_count']))
        meta.append('Dir Raw Size: ' + pack_results['raw_size'])
        meta.append('Tar Compressed Size: ' + pack_results['compressed_size'])
        meta.append('Tar Checksum: ' + pack_results['csum'])

        meta.append('')
        meta.append('========= Dir List =========')
        for d in info['dir_list']:
            meta.append(d)

    meta.append('')
    meta.append('======= Pacman List ========')
    for pkg in current_pkgs:
        meta.append(pkg)

    # Export Final Meta Data File
    paf.export_iterable(info['meta'], meta)
    paf.write_to_log(fname, 'Generated Meta Data File', config['log'])
    # Checksum Meta Data File
    paf.export_iterable(info['meta_md5'], [paf.checksum_file(info['meta'])[1]])
    paf.write_to_log(fname, 'Generated Meta Data Checksum', config['log'])
    # Finish and Return
    paf.write_to_log(
        fname, 'Main Build Complete of ID:' + info['id'] + ' As ' +
        info['STYPE'] + ' ' + info['TYPE'], config['log'])
Exemplo n.º 13
0
def clean_cache(config, nc):
    '''
    This provides automated cache cleaning using pacman, paccache, and pacback.
    '''
    fname = 'utils.clean_cache()'
    paf.prBold('Starting Advanced Cache Cleaning...')
    paf.write_to_log(fname, 'Starting Advanced Cache Cleaning...',
                     config['log'])
    print('')

    if nc is True or paf.yn_frame(
            'Do You Want To Uninstall Orphaned Packages?') is True:
        os.system('/usr/bin/pacman -R $(/usr/bin/pacman -Qtdq)')
        paf.write_to_log(fname, 'Removed Orphaned Packages', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Remove Old Versions of Installed Packages?'
    ) is True:
        os.system('/usr/bin/paccache -rk ' + str(config['keep_versions']))
        paf.write_to_log(fname, 'Removed Old Package Versions', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Remove Cached Orphans?') is True:
        os.system('/usr/bin/paccache -ruk0')
        paf.write_to_log(fname, 'Removed Cached Orphans', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Check For Old Pacback Restore Points?') is True:
        paf.write_to_log(fname, 'Starting Search For Old Restore Points...',
                         config['log'])
        meta_paths = sorted(f for f in paf.find_files(config['rp_paths'])
                            if f.endswith(".meta"))

        today = dt.datetime.now().strftime("%Y/%m/%d")
        t_split = (today.split('/'))
        today_dt = dt.date(int(t_split[0]), int(t_split[1]), int(t_split[2]))

        for m in meta_paths:
            rp_info = {
                'id':
                m[-7] + m[-6],
                'type':
                'rp',
                'TYPE':
                'Restore Point',
                'meta':
                m,
                'meta_md5':
                config['rp_paths'] + '/.rp' + m[-7] + m[-6] + '.md5',
                'path':
                config['rp_paths'] + '/rp' + m[-7] + m[-6],
                'pkgcache':
                config['rp_paths'] + '/rp' + m[-7] + m[-6] + '/pkg-cache'
            }

            # Format Dates for Compare
            m_dict = meta.read(config, m)
            o_split = (m_dict['date'].split('/'))
            old_dt = dt.date(int(o_split[0]), int(o_split[1]), int(o_split[2]))

            # Check How Old Restore Point Is
            days = (today_dt - old_dt).days
            if days > config['old_rp']:
                paf.prWarning('Failed: ' + rp_info['TYPE'] + ' ' +
                              rp_info['id'] + ' Is ' + str(days) +
                              ' Days Old!')
                paf.write_to_log(
                    fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' +
                    str(days) + ' Days Old!', config['log'])
                if paf.yn_frame('Do You Want to Remove This ' +
                                rp_info['TYPE'] + '?') is True:
                    utils.remove_id(config, rp_info)
                    paf.prSuccess('Restore Point Removed!')
                else:
                    paf.write_to_log(
                        fname, 'User Declined Removal of ' + rp_info['TYPE'] +
                        ' ' + rp_info['id'], config['log'])

            else:
                paf.prSuccess('Passed: ' + rp_info['TYPE'] + ' ' +
                              rp_info['id'] + ' Is ' + str(days) + ' Days Old')
                paf.write_to_log(
                    fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' +
                    str(days) + ' Days Old', config['log'])

    paf.write_to_log(fname, 'Finished Advanced Cache Cleaning', config['log'])