Esempio n. 1
0
def clean_cache(count):
    '''Automated Cache Cleaning Using pacman, paccache, and pacback.'''
    PS.Start_Log('CleanCache', log_file)
    PS.prWorking('Starting Advanced Cache Cleaning...')
    if PS.YN_Frame('Do You Want To Uninstall Orphaned Packages?') is True:
        os.system('sudo pacman -R $(pacman -Qtdq)')
        PS.Write_To_Log('CleanCache', 'Ran pacman -Rns $(pacman -Qtdq)',
                        log_file)

    if PS.YN_Frame('Do You Want To Remove Old Versions of Installed Packages?'
                   ) is True:
        os.system('sudo paccache -rk ' + count)
        PS.Write_To_Log('CleanCache', 'Ran paccache -rk ' + count, log_file)

    if PS.YN_Frame('Do You Want To Remove Cached Orphans?') is True:
        os.system('sudo paccache -ruk0')
        PS.Write_To_Log('CleanCache', 'Ran paccache -ruk0', log_file)

    if PS.YN_Frame(
            'Do You Want To Check For Old Pacback Restore Points?') is True:
        PS.Write_To_Log('CleanCache', 'Started Search For Old RPs', log_file)
        metas = PS.Search_FS(rp_paths, 'set')
        rps = {f for f in metas if f.endswith(".meta")}

        for m in rps:
            rp_num = m.split('/')[-1]
            # Find RP Create Date in Meta File
            meta = PS.Read_List(m)
            for l in meta:
                if l.split(':')[0] == 'Date Created':
                    target_date = l.split(':')[1].strip()
                    break

            # Parse and Format Dates for Compare
            today = dt.datetime.now().strftime("%Y/%m/%d")
            t_split = list(today.split('/'))
            today_date = dt.date(int(t_split[0]), int(t_split[1]),
                                 int(t_split[2]))
            o_split = list(target_date.split('/'))
            old_date = dt.date(int(o_split[0]), int(o_split[1]),
                               int(o_split[2]))

            # Compare Days
            days = (today_date - old_date).days
            if days > 180:
                PS.prWarning(m.split('/')[-1] + ' Is Over 180 Days Old!')
                if PS.YN_Frame(
                        'Do You Want to Remove This Restore Point?') is True:
                    PS.RM_File(m, sudo=True)
                    PS.RM_Dir(m[:-5], sudo=True)
                    PS.prSuccess('Restore Point Removed!')
                    PS.Write_To_Log('CleanCache', 'Removed RP ' + rp_num,
                                    log_file)
            PS.prSuccess(rp_num + ' Is Only ' + str(days) + ' Days Old!')
            PS.Write_To_Log('CleanCache',
                            'RP ' + rp_num + ' Was Less Than 180 Days 0ld',
                            log_file)

    PS.End_Log('CleanCache', log_file)
Esempio n. 2
0
def remove_rp(rp_num, nc):
    PS.Start_Log('RemoveRP', log_file)
    rp = rp_paths + '/rp' + rp_num + '.meta'

    if nc is False:
        if PS.YN_Frame('Do You Want to Remove This Restore Point?') is True:
            PS.RM_File(rp, sudo=False)
            PS.RM_Dir(rp[:-5], sudo=False)
            PS.prSuccess('Restore Point Removed!')
            PS.Write_To_Log('RemoveRP', 'Removed Restore Point ' + rp_num,
                            log_file)
        else:
            PS.Write_To_Log('RemoveRP',
                            'User Declined Removing Restore Point ' + rp_num)

    elif nc is True:
        PS.RM_File(rp, sudo=False)
        PS.RM_Dir(rp[:-5], sudo=False)
        PS.prSuccess('Restore Point Removed!')
        PS.Write_To_Log('RemoveRP', 'Removed Restore Point ' + rp_num,
                        log_file)

    PS.End_Log('RemoveRP', log_file)
Esempio n. 3
0
def rollback_to_rp(version, rp_num):
    PS.Start_Log('RollbackRP', log_file)
    #####################
    # Stage Rollback Vars
    #####################
    rp_num = str(rp_num).zfill(2)
    rp_path = '/var/lib/pacback/restore-points/rp' + rp_num
    rp_tar = rp_path + '/rp' + rp_num + '_dirs.tar'
    rp_meta = rp_path + '.meta'
    current_pkgs = pu.pacman_Q()

    # Set Full RP Status
    if os.path.exists(rp_path):
        full_rp = True
        PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Full RP',
                        log_file)
    else:
        full_rp = False
        PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Light RP',
                        log_file)

    # Set Meta Status, Read Meta, Diff Packages, Set Vars
    if os.path.exists(rp_meta):
        meta_exists = True
        PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Has MetaData',
                        log_file)
        meta = PS.Read_List(rp_meta)
        meta_dirs = PS.Read_Between('= Dir List =',
                                    '= Pacman List =',
                                    meta,
                                    re_flag=True)[:-1]
        meta_old_pkgs = PS.Read_Between('= Pacman List =',
                                        '<Endless>',
                                        meta,
                                        re_flag=True)

        # Checking for New and Changed Packages
        changed_pkgs = set(set(meta_old_pkgs) - current_pkgs)
        meta_old_pkg_strp = {pkg.split(' ')[0] for pkg in meta_old_pkgs}
        current_pkg_strp = {pkg.split(' ')[0] for pkg in current_pkgs}
        added_pkgs = set(current_pkg_strp - meta_old_pkg_strp)
        m_search = PS.Replace_Spaces(changed_pkgs)
        PS.Write_To_Log('RollbackRP', 'Finished Reading RP MetaData', log_file)

    else:
        meta_exists = False
        meta = None
        PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Missing MetaData',
                        log_file)

    # Abort If No Files Are Found
    if meta_exists is False and full_rp is False:
        PS.Abort_With_Log('RollbackRP',
                          'Restore Point #' + rp_num + ' Was NOT FOUND!',
                          'Restore Point #' + rp_num + ' Was NOT FOUND!',
                          log_file)

    # Compare Versions
    vc.check_pacback_version(version, rp_path, meta_exists, meta)

    ####################
    # Full Restore Point
    ####################
    if full_rp is True:
        if meta_exists is True:
            # Pass If No Packages Have Changed
            if len(changed_pkgs) > 0:
                PS.Write_To_Log(
                    'RollbackRP',
                    str(len(changed_pkgs)) + ' Packages Have Been Changed',
                    log_file)
                found_pkgs = pu.search_paccache(m_search, pu.fetch_paccache())
                PS.pacman(' '.join(found_pkgs), '-U')
                PS.Write_To_Log('RollbackRP',
                                'Send Found Packages to pacman -U', log_file)
            else:
                PS.prSuccess('No Packages Have Been Changed!')
                PS.Write_To_Log('RollbackRP', 'No Packages Have Been Changed',
                                log_file)

        elif meta_exists is False:
            rp_cache = rp_path + '/pac_cache'
            PS.pacman(rp_cache + '/*', '--needed -U')
            PS.Write_To_Log('RollbackRP', 'Send pacman -U /* --needed',
                            log_file)
            PS.prError('Restore Point #' + rp_num + ' MetaData Was NOT FOUND!')
            PS.Abort_With_Log('RollbackRP',
                              'Meta Is Missing So Skipping Advanced Features',
                              'Skipping Advanced Features!', log_file)

    #####################
    # Light Restore Point
    #####################
    elif meta_exists is True and full_rp is False:

        # Pass If No Packages Have Changed
        if len(changed_pkgs) > 0:
            PS.prWorking('Bulk Scanning for ' + str(len(meta_old_pkgs)) +
                         ' Packages...')
            found_pkgs = pu.search_paccache(m_search, pu.fetch_paccache())
        else:
            PS.prSuccess('No Packages Have Been Changed!')
            PS.Write_To_Log('RollbackRP', 'No Packages Have Been Changed',
                            log_file)
            found_pkgs = {}

        if len(changed_pkgs) == 0:
            pass

        # Pass Comparison if All Packages Found
        elif len(found_pkgs) == len(changed_pkgs):
            PS.prSuccess('All Packages Found In Your Local File System!')
            PS.Write_To_Log('RollbackRP', 'All Packages Found', log_file)
            PS.pacman(' '.join(found_pkgs), '--needed -U')
            PS.Write_To_Log('RollbackRP', 'Sent Found Packages To pacman -U',
                            log_file)

        # Branch if Packages are Missing
        elif len(found_pkgs) < len(changed_pkgs):
            PS.Write_To_Log(
                'RollbackRP',
                str(len(found_pkgs) - len(changed_pkgs)) +
                ' Packages Are Where Not Found', log_file)
            missing_pkg = set(m_search - pu.trim_pkg_list(found_pkgs))

            # Show Missing Pkgs
            PS.prWarning('Couldn\'t Find The Following Package Versions:')
            for pkg in missing_pkg:
                PS.prError(pkg)

            if PS.YN_Frame('Do You Want To Continue Anyway?') is True:
                PS.pacman(' '.join(found_pkgs), '-U')
                PS.Write_To_Log('RollbackRP',
                                'Sent Found Packages To pacman -U', log_file)
            else:
                PS.Abort_With_Log(
                    'RollbackRP',
                    'User Aborted Rollback Because of Missing Packages',
                    'Aborting Rollback!', log_file)

    # Ask User If They Want to Remove New Packages
    if len(added_pkgs) > 0:
        PS.prWarning(
            'The Following Packages Are Installed But Are NOT Present in Restore Point #'
            + rp_num + ':')
        PS.Write_To_Log(
            'RollbackRP',
            str(len(added_pkgs)) + ' Have Been Added Since RP Creation',
            log_file)
        for pkg in added_pkgs:
            PS.prAdded(pkg)
        if PS.YN_Frame('Do You Want to Remove These Packages From Your System?'
                       ) is True:
            PS.pacman(' '.join(added_pkgs), '-R')
            PS.Write_To_Log('RollbackRP', 'Sent Added Packages To pacman -R',
                            log_file)
    else:
        PS.prSuccess('No Packages Have Been Added!')
        PS.Write_To_Log('RollbackRP',
                        'No Packages Have Been Added Since RP Creation',
                        log_file)

    ########################
    # Stage Custom File Diff
    ########################
    if len(meta_dirs) > 0:
        PS.Write_To_Log('RollbackRP', 'Custom Dirs Specified in RP Meta File',
                        log_file)
        custom_dirs = rp_tar[:-4]
        if os.path.exists(rp_tar + '.gz'):
            PS.prWorking('Decompressing Restore Point....')
            if any(re.findall('pigz', line.lower()) for line in current_pkgs):
                os.system('pigz -d ' + rp_tar + '.gz -f')
                PS.Write_To_Log('RPDiff',
                                'Decompressed Custom Files With Pigz',
                                log_file)
            else:
                PS.GZ_D(rp_tar + '.gz')
                PS.Write_To_Log('RPDiff',
                                'Decompressed Custom Files With Python',
                                log_file)

        if os.path.exists(custom_dirs):
            PS.RM_Dir(custom_dirs, sudo=True)

        PS.prWorking('Unpacking Files from Restore Point Tar....')
        PS.Untar_Dir(rp_tar)
        PS.Write_To_Log('RPDiff', 'Unpacked Custom Files RP Tar', log_file)

        ################################
        # Restore Files Without Checksum
        ################################
        diff_yn = PS.YN_Frame(
            'Do You Want to Checksum Diff Restore Point Files Against Your Current File System?'
        )
        if diff_yn is False:
            PS.Write_To_Log('RPDiff', 'User Skipped Checksumming Files',
                            log_file)
            PS.prWarning(
                'OVERWRITING FILES WITHOUT CHECKSUMMING CAN BE EXTREMELY DANGEROUS!'
            )

            ow = PS.YN_Frame(
                'Do You Still Want to Continue and Restore ALL Files?')
            if ow is False:
                PS.Write_To_Log('RPDiff',
                                'User Declined Overwrite After Skipping Diff',
                                log_file)
                print('Skipping! Restore Point Files Are Unpacked in ' +
                      custom_dirs)
                PS.Write_To_Log('RPDiff',
                                'Left Files Unpacked in ' + custom_dirs,
                                log_file)

            elif ow is True:
                print(
                    'Starting Full File Restore! Please Be Patient As All Files are Overwritten...'
                )
                rp_fs = PS.Search_FS(custom_dirs)
                for f in rp_fs:
                    PS.prWorking('Please Be Patient. This May Take a While...')
                    os.system('sudo mkdir -p ' +
                              PS.Escape_Bash('/'.join(f.split('/')[:-1])) +
                              ' && sudo cp -af ' + PS.Escape_Bash(f) + ' ' +
                              PS.Escape_Bash(f[len(custom_dirs):]))

        ############################
        # Checksum and Compare Files
        ############################
        elif diff_yn is True:
            PS.Write_To_Log('RPDiff', 'Started Checksumming Custom Files',
                            log_file)
            rp_fs = PS.Search_FS(custom_dirs)
            rp_fs_trim = set(path[len(custom_dirs):]
                             for path in PS.Search_FS(custom_dirs))

            # Checksum Restore Point Files with a MultiProcessing Pool
            with mp.Pool(os.cpu_count()) as pool:
                rp_checksum = set(
                    tqdm.tqdm(pool.imap(PS.Checksum_File, rp_fs),
                              total=len(rp_fs),
                              desc='Checksumming Restore Point Files'))
                sf_checksum = set(
                    tqdm.tqdm(pool.imap(PS.Checksum_File, rp_fs_trim),
                              total=len(rp_fs_trim),
                              desc='Checksumming Source Files'))
            PS.Write_To_Log('RPDiff', 'Finished Checksumming Custom Files',
                            log_file)

            # Compare Checksums For Files That Exist
            PS.Write_To_Log('RPDiff', 'Starting Sorting and Comparing Files',
                            log_file)
            rp_csum_trim = set(path[len(custom_dirs):] for path in rp_checksum)
            rp_diff = sf_checksum.difference(rp_csum_trim)

            # Filter Removed and Changed Files
            diff_removed = set()
            diff_changed = set()
            for csum in rp_diff:
                if re.findall('FILE MISSING', csum):
                    diff_removed.add(csum)
                else:
                    diff_changed.add(csum.split(' : ')[0] + ' : FILE CHANGED!')

            # Find Added Files
            src_fs = set()
            for x in meta_dirs:
                for l in PS.Search_FS(x):
                    src_fs.add(l)
            diff_new = src_fs.difference(rp_fs_trim)

            PS.Write_To_Log('RPDiff', 'Finished Comparing and Sorting Files',
                            log_file)

            # Print Changed Files For User
            if len(diff_changed) + len(diff_new) + len(diff_removed) == 0:
                PS.Write_To_Log('RPDiff',
                                'Checksum Returned Zero Changed Files',
                                log_file)
                PS.RM_Dir(custom_dirs, sudo=True)
                PS.Write_To_Log('RPDiff',
                                'Cleaned Up Files and Completed Successfully',
                                log_file)
                PS.prSuccess('No Files Have Been Changed!')

            #################
            # Overwrite Files
            #################
            else:
                if len(diff_changed) > 0:
                    PS.Write_To_Log(
                        'RPDiff',
                        'Found ' + str(len(diff_changed)) + ' Changed Files',
                        log_file)
                    PS.prWarning('The Following Files Have Changed:')
                    for f in diff_changed:
                        PS.prChanged(f)
                    if PS.YN_Frame(
                            'Do You Want to Overwrite Files That Have Been CHANGED?'
                    ) is True:
                        PS.prWorking(
                            'Please Be Patient. This May Take a While...')
                        for f in diff_changed:
                            fs = (f.split(' : ')[0])
                            os.system('sudo cp -af ' +
                                      PS.Escape_Bash(custom_dirs + fs) + ' ' +
                                      PS.Escape_Bash(fs))
                        PS.Write_To_Log('RPDiff', 'Restored Changed Files',
                                        log_file)
                    else:
                        PS.Write_To_Log('RPDiff',
                                        'User Declined Restoring Files',
                                        log_file)

                if len(diff_removed) > 0:
                    PS.Write_To_Log(
                        'RPDiff',
                        'Found ' + str(len(diff_removed)) + ' Removed Files',
                        log_file)
                    PS.prWarning('The Following Files Have Been Removed:')
                    for f in diff_removed:
                        PS.prRemoved(f)
                    if PS.YN_Frame(
                            'Do You Want to Add Files That Have Been REMOVED?'
                    ) is True:
                        PS.prWorking(
                            'Please Be Patient. This May Take a While...')
                        for f in diff_removed:
                            fs = (f.split(' : ')[0])
                            os.system(
                                'sudo mkdir -p ' +
                                PS.Escape_Bash('/'.join(fs.split('/')[:-1])) +
                                ' && sudo cp -af ' +
                                PS.Escape_Bash(custom_dirs + fs) + ' ' +
                                PS.Escape_Bash(fs))
                        PS.Write_To_Log('RPDiff', 'Restored Removed Files',
                                        log_file)
                    else:
                        PS.Write_To_Log('RPDiff',
                                        'User Declined Restoring Files',
                                        log_file)

                if len(diff_new) > 0:
                    PS.Write_To_Log(
                        'RPDiff', 'Found ' + str(len(diff_new)) + ' New Files',
                        log_file)
                    PS.prWarning('The Following Files Have Been Added:')
                    for f in diff_new:
                        PS.prAdded(f + ' : NEW FILE!')
                    if PS.YN_Frame(
                            'Do You Want to Remove Files That Have Been ADDED?'
                    ) is True:
                        for f in diff_new:
                            fs = (f.split(' : ')[0])
                            os.system('rm ' + fs)
                        PS.Write_To_Log('RPDiff', 'Removed New Files',
                                        log_file)
                    else:
                        PS.Write_To_Log('RPDiff',
                                        'User Declined Restoring Files',
                                        log_file)

                PS.RM_Dir(custom_dirs, sudo=True)
                PS.Write_To_Log('RPDiff', 'Done Comparing and Restoring Files',
                                log_file)
                PS.prSuccess('File Diff and Restore Complete!')

    else:
        PS.prSuccess('Rollback to Restore Point #' + rp_num + ' Complete!')
        PS.Write_To_Log('RollbackRP',
                        'Rollback to RP #' + rp_num + ' Complete', log_file)

    PS.End_Log('RollbackRP', log_file)
Esempio n. 4
0
def upgrade_to_hardlinks(rp_path):
    # This is a Total Hack Job. Don't Judge Me :(
    PS.prWorking('Unpacking...')
    PS.Write_To_Log('HardlinkUpgrade',
                    'Unpacking Old Restore Point For Conversion', log_file)
    if os.path.exists(rp_path + '.tar.gz'):
        PS.GZ_D(rp_path + '.tar.gz')
    PS.Untar_Dir(rp_path + '.tar')
    PS.Write_To_Log('HardlinkUpgrade', 'Unpacked Restore Point', log_file)

    # Read and Parse Meta Data
    meta = PS.Read_List(rp_path + '.meta')
    meta_old_pkgs = PS.Read_Between('======= Pacman List ========',
                                    '<Endless>', meta)
    meta_dirs = PS.Read_Between('========= Dir List =========',
                                '======= Pacman List ========', meta)[:-1]
    PS.Write_To_Log('HardlinkUpgrade', 'Read RP MetaData', log_file)

    # Find Existing Package
    pc = PS.Search_FS(rp_path + '/pac_cache')
    found = PU.search_paccache(PS.Replace_Spaces(meta_old_pkgs),
                               PU.fetch_paccache())

    if len(found) == len(pc):
        PS.prSuccess('All Packages Found!')
        PS.Write_To_Log('HardlinkUpgrade',
                        'All Packages Where Found Elsewhere', log_file)
        PS.RM_Dir(rp_path + '/pac_cache', sudo=False)
        PS.MK_Dir(rp_path + '/pac_cache', sudo=False)
        for pkg in tqdm.tqdm(found, desc='Hardlinking Packages to Pacback RP'):
            os.system('sudo ln ' + pkg + ' ' + rp_path + '/pac_cache/' +
                      pkg.split('/')[-1])
        PS.Write_To_Log('HardlinkUpgrade',
                        'Hardlinked Packages From Other Locations', log_file)

    elif len(found) < len(pc):
        PS.Write_To_Log(
            'HardlinkUpgrade',
            'Not All Packages Where Found. Mergeing With Hardlinks', log_file)
        duplicate = PS.Trim_Dir(pc).intersection(PS.Trim_Dir(found))
        for d in tqdm.tqdm(duplicate, desc='Mergeing and Hardlinking'):
            PS.RM_File(rp_path + '/pac_cache/' + d, sudo=False)
            for p in found:
                if p.split('/')[-1] == d.split('/')[-1]:
                    os.system('sudo ln ' + p + ' ' + rp_path + '/pac_cache/' +
                              d)
                    break
        PS.Write_To_Log('HardlinkUpgrade',
                        'Successfully Merged Restore Point Packages', log_file)

    if len(meta_dirs) > 0:
        PS.Write_To_Log('HardlinkUpgrade', 'Detected Custom Files Saved In RP',
                        log_file)
        f_list = set()
        rp_fs = PS.Search_FS(rp_path)
        for f in rp_fs:
            if f[len(rp_path):].split('/')[1] != 'pac_cache':
                f_list.add(f)

        with tarfile.open(rp_path + '/' + rp_path[-2:] + '_dirs.tar',
                          'w') as tar:
            for f in tqdm.tqdm(f_list, desc='Adding Dir\'s to Tar'):
                tar.add(f, f[len(rp_path):])
        PS.Write_To_Log('HardlinkUpgrade', 'Added Custom Files To New RP Tar',
                        log_file)

        for d in meta_dirs:
            PS.RM_Dir(rp_path + '/' + d.split('/')[1], sudo=False)
        PS.Write_To_Log('HardlinkUpgrade', 'Cleaned Unpacked Custom Files',
                        log_file)

    PS.RM_File(rp_path + '.tar', sudo=False)
    PS.Write_To_Log('HardlinkUpgrade', 'Removed Old Restore Point Tar',
                    log_file)
    PS.prSuccess('RP Version Upgrade Complete!')
    PS.Write_To_Log('HardlinkUpgrade', 'Restore Point Upgrade Complete',
                    log_file)
Esempio n. 5
0
def create_restore_point(version, rp_num, rp_full, dir_list, no_confirm, notes):
    PS.Start_Log('CreateRP', log_file)
    # Fail Safe for New Users
    if os.path.exists(rp_paths) is False:
        PS.MK_Dir('/var/lib/pacback', sudo=False)
        PS.MK_Dir(rp_paths, sudo=False)
        PS.Write_To_Log('CreateRP', 'Created Base RP Folder in /var/lib', log_file)

    # Set Base Vars
    rp_num = str(rp_num).zfill(2)
    rp_path = rp_paths + '/rp' + rp_num
    rp_tar = rp_path + '/rp' + rp_num + '_dirs.tar'
    rp_meta = rp_path + '.meta'
    found_pkgs = set()
    pac_size = 0

    # Check for Existing Restore Points
    if os.path.exists(rp_path) or os.path.exists(rp_meta):
        if no_confirm is False:
            if int(rp_num) != 0:
                PS.prWarning('Restore Point #' + rp_num + ' Already Exists!')
                if PS.YN_Frame('Do You Want to Overwrite It?') is False:
                    PS.Abort_With_Log('CreateRP', 'User Aborted Overwrite of RP #' + rp_num, 'Aborting!', log_file)

        PS.RM_File(rp_meta, sudo=False)
        PS.RM_Dir(rp_path, sudo=False)
        PS.Write_To_Log('CreateRP', 'Removed RP #' + rp_num + ' During Overwrite', log_file)

    ###########################
    # Full Restore Point Branch
    ###########################
    if rp_full is True:
        PS.Write_To_Log('CreateRP', 'Creating RP #' + rp_num + ' As Full RP', log_file)
        print('Building Full Restore Point...')

        # Set Vars For Full RP
        dir_size = 0
        rp_files = set()
        pac_cache = rp_path + '/pac_cache'

        PS.prWorking('Retrieving Current Packages...')
        pkg_search = pu.pacman_Q(replace_spaces=True)

        # Search File System for Pkgs
        PS.prWorking('Bulk Scanning for ' + str(len(pkg_search)) + ' Packages...')
        found_pkgs = pu.search_paccache(pkg_search, pu.fetch_paccache())
        pac_size = PS.Size_Of_Files(found_pkgs)

        # Ask About Missing Pkgs
        if len(found_pkgs) != len(pkg_search):
            PS.Write_To_Log('CreateRP', 'Not All Packages Where Found', log_file)
            if int(rp_num) != 0:
                if no_confirm is False:
                    pkg_split = pu.trim_pkg_list(found_pkgs)
                    PS.prError('The Following Packages Where NOT Found!')
                    for pkg in set(pkg_search - pkg_split):
                        PS.prWarning(pkg + ' Was NOT Found!')
                    if PS.YN_Frame('Do You Still Want to Continue?') is False:
                        PS.Abort_With_Log('CreateRP', 'User Aborted Due to Missing Pkgs', 'Aborting!', log_file)

        # HardLink Packages to RP
        PS.MK_Dir(rp_path, sudo=False)
        PS.MK_Dir(pac_cache, sudo=False)
        for pkg in tqdm.tqdm(found_pkgs, desc='Hardlinking Packages to Pacback RP'):
            os.system('sudo ln ' + pkg + ' ' + pac_cache + '/' + pkg.split('/')[-1])
        PS.Write_To_Log('CreateRP', 'HardLinked ' + str(len(found_pkgs)) + ' Packages', log_file)

        # Find Custom Files for RP
        if dir_list:
            PS.Write_To_Log('CreateRP', 'User Defined Custom RP Files', log_file)
            # Find and Get Size of Custom Files
            for d in dir_list:
                for f in PS.Search_FS(d, 'set'):
                    try:
                        dir_size += os.path.getsize(f)
                    except Exception:
                        OSError
                    rp_files.add(f)

            # Pack Custom Folders Into a Tar
            with tarfile.open(rp_tar, 'w') as tar:
                for f in tqdm.tqdm(rp_files, desc='Adding Dir\'s to Tar'):
                    tar.add(f)
            PS.Write_To_Log('CreateRP', 'Tar Created For Custom RP Files', log_file)

            # Compress Custom Files If Added Larger Than 1GB
            if dir_size > 1073741824:
                PS.prWorking('Compressing Restore Point Files...')
                if any(re.findall('pigz', l.lower()) for l in pkg_search):
                    os.system('pigz ' + rp_tar + ' -f')
                else:
                    PS.GZ_C(rp_tar, rm=True)
                PS.Write_To_Log('CreateRP', 'Compressed Custom Files RP Tar', log_file)

    elif rp_full is False:
        PS.Write_To_Log('CreateRP', 'Creating RP #' + rp_num + ' As A Light RP', log_file)
        if len(dir_list) > 0:
            PS.Abort_With_Log('CreateRP', 'Custom Dirs Are Not Supported By LightRP',
                              'Light Restore Points DO NOT Support Custom Dirs! Please Use The `-f` Flag', log_file)
        print('Building Light Restore Point...')

    #########################
    # Generate Meta Data File
    #########################
    current_pkgs = pu.pacman_Q()
    meta_list = ['====== Pacback RP #' + rp_num + ' ======',
                 'Pacback Version: ' + version,
                 'Date Created: ' + dt.datetime.now().strftime("%Y/%m/%d"),
                 'Packages Installed: ' + str(len(current_pkgs)),
                 'Packages in RP: ' + str(len(found_pkgs)),
                 'Size of Packages in RP: ' + PS.Convert_Size(pac_size)]

    if notes:
        meta_list.append('Notes: ' + notes)

    if len(dir_list) != 0:
        meta_list.append('Dirs File Count: ' + str(len(rp_files)))
        meta_list.append('Dirs Total Size: ' + PS.Convert_Size(dir_size))
        meta_list.append('')
        meta_list.append('========= Dir List =========')
        for d in dir_list:
            meta_list.append(d)

    meta_list.append('')
    meta_list.append('======= Pacman List ========')
    for pkg in current_pkgs:
        meta_list.append(pkg)

    # Export Final Meta Data File
    PS.Export_List(rp_meta, meta_list)
    PS.Write_To_Log('CreateRP', 'RP #' + rp_num + ' Was Successfully Created', log_file)
    PS.End_Log('CreateRP', log_file)
    PS.prSuccess('Restore Point #' + rp_num + ' Successfully Created!')