Example #1
0
def rollback_to_date(date):
    PS.Start_Log('RollbackToDate', log_file)
    # Validate Date Fromat and Build New URL
    if not re.findall(r'([12]\d{3}/(0[1-9]|1[0-2])/(0[1-9]|[12]\d|3[01]))',
                      date):
        PS.Abort_With_Log('RollbackToDate',
                          'Aborting Due to Invalid Date Format',
                          'Invalid Date! Date Must be YYYY/MM/DD Format',
                          log_file)

    # Backup Mirrorlist
    if len(PS.Read_List('/etc/pacman.d/mirrorlist')) > 1:
        os.system(
            'sudo cp /etc/pacman.d/mirrorlist /etc/pacman.d/mirrorlist.pacback'
        )
        PS.Write_To_Log('RollbackToDate', 'Backed Up Old Mirrorlist', log_file)
    os.system(
        "echo 'Server=https://archive.archlinux.org/repos/" + date +
        "/$repo/os/$arch' | sudo tee /etc/pacman.d/mirrorlist >/dev/null")
    PS.Write_To_Log('RollbackToDate', 'Added Archive URL To Mirrorlist',
                    log_file)

    # Run Pacman Update
    os.system('sudo pacman -Syyuu')
    PS.Write_To_Log('RollbackToDate', 'Ran pacman -Syyuu', log_file)
    PS.End_Log('RollbackToDate', log_file)
Example #2
0
def pacback_hook(install):
    '''Installs or removes a standard alpm hook in /etc/pacman.d/hooks
    Runs as a PreTransaction hook during every upgrade.'''
    PS.Start_Log('PacbackHook', log_file)

    if install is True:
        PS.MK_Dir('/etc/pacman.d/hooks', sudo=False)
        PS.Uncomment_Line_Sed('HookDir', '/etc/pacman.conf', sudo=False)
        hook = [
            '[Trigger]', 'Operation = Upgrade', 'Type = Package', 'Target = *',
            '', '[Action]', 'Description = Pre-Upgrade Pacback Hook',
            'Depends = pacman', 'When = PreTransaction',
            'Exec = /usr/bin/pacback --hook'
        ]
        PS.Export_List('/etc/pacman.d/hooks/pacback.hook', hook)
        PS.prSuccess('Pacback Hook is Now Installed!')
        PS.Write_To_Log('InstallHook', 'Installed Pacback Hook Successfully',
                        log_file)

    elif install is False:
        PS.RM_File('/etc/pacman.d/hooks/pacback.hook', sudo=False)
        PS.Write_To_Log('RemoveHook', 'Removed Pacback Hook Successfully',
                        log_file)
        PS.prSuccess('Pacback Hook Removed!')

    PS.End_Log('PacbackHook', log_file)
Example #3
0
def check_pacback_version(current_version, rp_path, meta_exists, meta):
    if meta_exists is False:
        PS.Write_To_Log('VersionControl', 'Restore Point is Missing MetaData',
                        log_file)

        # Check for Full RP Created Before V1.5
        if os.path.exists(rp_path + '.tar') or os.path.exists(rp_path +
                                                              '.tar.gz'):
            PS.prError(
                'Full Restore Points Generated Before Version 1.5.0 Are No Longer Compatible With Newer Versions of Pacback!'
            )
            PS.Abort_With_Log(
                'VersionControl',
                'RP Version is > V1.5 and MetaData is Missing',
                'Without Meta Data Pacback Can\'t Upgrade This Restore Point!',
                log_file)

    elif meta_exists is True:
        # Find version in metadate file
        for m in meta:
            if m.split(':')[0] == 'Pacback Version':
                target_version = m.split(':')[1].strip()
                break

        # Parse version into vars
        cv_M = int(current_version.split('.')[0])
        cv_m = int(current_version.split('.')[1])
        cv_p = int(current_version.split('.')[2])
        ####
        tv_M = int(target_version.split('.')[0])
        tv_m = int(target_version.split('.')[1])
        tv_p = int(target_version.split('.')[2])

        if current_version != target_version:
            PS.Write_To_Log(
                'VersionControl', 'Current Version ' + current_version +
                ' Miss-Matched With ' + target_version, log_file)
        else:
            PS.Write_To_Log('VersionControl',
                            'Both Versions Match ' + current_version, log_file)

        # Check for Full RP's Created Before V1.5
        if tv_M == 1 and tv_m < 5:
            if os.path.exists(rp_path + '.tar') or os.path.exists(rp_path +
                                                                  '.tar.gz'):
                PS.prError(
                    'Full Restore Points Generated Before V1.5.0 Are No Longer Compatible With Newer Versions of Pacback!'
                )
                PS.Write_To_Log(
                    'VersionControl',
                    'Detected Restore Point Version Generated > V1.5',
                    log_file)
                upgrade = PS.YN_Frame(
                    'Do You Want to Upgrade This Restore Point?')
                if upgrade is True:
                    upgrade_to_hardlinks(rp_path)
                else:
                    PS.Abort_With_Log('VersionControl', 'User Exited Upgrade',
                                      'Aborting!', log_file)
Example #4
0
def fetch_paccache():
    '''Always returns a unique list of pkgs found on the file sys.'''

    # Searches File System For Packages
    pacman_cache = find_pkgs_in_dir('/var/cache/pacman/pkg')
    root_cache = find_pkgs_in_dir('/root/.cache')
    pacback_cache = find_pkgs_in_dir('/var/lib/pacback')
    user_cache = set()
    users = os.listdir('/home')

    for u in users:
        u_pkgs = find_pkgs_in_dir('/home/' + u + '/.cache')
        user_cache = user_cache.union(u_pkgs)

    fs_list = pacman_cache.union(root_cache, pacback_cache, user_cache)
    PS.Write_To_Log('FetchPaccache', 'Searched ALL Package Cache Locations',
                    log_file)

    unique_pkgs = PS.Trim_Dir(fs_list)
    if len(fs_list) != len(unique_pkgs):
        PS.prWorking('Filtering Duplicate Packages...')

        chunk_size = int(round(len(unique_pkgs) / max_threads(), 0)) + 1
        unique_pkgs = list(f for f in unique_pkgs)
        chunks = [
            unique_pkgs[i:i + chunk_size]
            for i in range(0, len(unique_pkgs), chunk_size)
        ]

        with mp.Pool(processes=max_threads()) as pool:
            new_fs = pool.starmap(first_pkg_path,
                                  zip(chunks, itertools.repeat(fs_list)))
            new_fs = set(itertools.chain(*new_fs))

        PS.Write_To_Log(
            'FetchPaccache',
            'Returned ' + str(len(new_fs)) + ' Unique Cache Packages',
            log_file)
        return new_fs

    else:
        PS.Write_To_Log('FetchPaccache',
                        'Returned ' + str(len(fs_list)) + ' Cached Packages',
                        log_file)
        return fs_list
Example #5
0
def unlock_rollback():
    '''Restores Mirrorlist in /etc/pacman.d/mirrorlist Which Releases Archive Date Rollback'''
    PS.Start_Log('UnlockRollback', log_file)
    # Check if mirrorlist is locked
    if len(PS.Read_List('/etc/pacman.d/mirrorlist')) == 1:
        PS.Write_To_Log('UnlockRollback', 'Lock Detected on Mirrorlist',
                        log_file)

        if os.path.exists('/etc/pacman.d/mirrolist.pacback'):
            PS.Write_To_Log('UnlockRollback', 'Backup Mirrorlist Is Missing',
                            log_file)
            fetch = PS.YN_Frame(
                'Pacback Can\'t Find Your Backup Mirrorlist! Do You Want to Fetch a New US HTTPS Mirrorlist?'
            )
            if fetch is True:
                os.system(
                    "curl -s 'https://www.archlinux.org/mirrorlist/?country=US&protocol=https&use_mirror_status=on' | sed -e 's/^#Server/Server/' -e '/^#/d' | sudo tee /etc/pacman.d/mirrorlist.pacback >/dev/null"
                )
            else:
                PS.Abort_With_Log(
                    'UnlockRollback',
                    'Backup Mirrorlist Is Missing and User Declined Download',
                    'Please Manually Replace Your Mirrorlist!', log_file)

        os.system(
            'sudo cp /etc/pacman.d/mirrorlist.pacback /etc/pacman.d/mirrorlist'
        )
        PS.Write_To_Log('UnlockRollback',
                        'Mirrorlist Was Restored Successfully', log_file)

    else:
        PS.Write_To_Log('UnlockRollback', 'No Mirrorlist Lock Was Found',
                        log_file)
        PS.End_Log('UnlockRollback', log_file)
        return PS.prError('Pacback Does NOT Have an Active Date Lock!')

    # Update?
    update = PS.YN_Frame('Do You Want to Update Your System Now?')
    if update is True:
        os.system('sudo pacman -Syu')
        PS.Write_To_Log('UnlockRollback', 'User Ran -Syu Upgrade', log_file)
    if update is False:
        print('Skipping Update!')

    PS.End_Log('UnlockRollback', log_file)
Example #6
0
def rollback_packages(pkg_list):
    '''Allows User to Rollback Any Number of Packages By Name'''
    PS.Start_Log('RbPkgs', log_file)
    PS.prWorking('Searching File System for Packages...')
    cache = fetch_paccache()
    pkg_paths = list()
    PS.Write_To_Log('UserSearch', 'Started Search for ' + ' '.join(pkg_list),
                    log_file)

    for pkg in pkg_list:
        found_pkgs = user_pkg_search(pkg, cache)
        sort_pkgs = sorted(found_pkgs, reverse=True)

        if len(found_pkgs) > 0:
            PS.Write_To_Log(
                'UserSearch',
                'Found ' + str(len(found_pkgs)) + ' pkgs for ' + pkg, log_file)
            PS.prSuccess('Pacback Found the Following Package Versions for ' +
                         pkg + ':')
            answer = PS.Multi_Choice_Frame(sort_pkgs)

            if answer is False:
                PS.Write_To_Log('UserSearch',
                                'User Force Exited Selection For ' + pkg,
                                log_file)
            else:
                for x in cache:
                    if re.findall(re.escape(answer), x):
                        path = x
                        pkg_paths.append(path)
                        break

        else:
            PS.prError('No Packages Found Under the Name: ' + pkg)
            PS.Write_To_Log('UserSearch',
                            'Search ' + pkg.upper() + ' Returned Zero Results',
                            log_file)

    PS.pacman(' '.join(pkg_paths), '-U')
    PS.Write_To_Log('UserSearch',
                    'Sent ' + ' '.join(pkg_paths) + ' to Pacman -U', log_file)
    PS.End_Log('RbPkgs', log_file)
Example #7
0
def clean_cache(count):
    '''Automated Cache Cleaning Using pacman, paccache, and pacback.'''
    PS.Start_Log('CleanCache', log_file)
    PS.prWorking('Starting Advanced Cache Cleaning...')
    if PS.YN_Frame('Do You Want To Uninstall Orphaned Packages?') is True:
        os.system('sudo pacman -R $(pacman -Qtdq)')
        PS.Write_To_Log('CleanCache', 'Ran pacman -Rns $(pacman -Qtdq)',
                        log_file)

    if PS.YN_Frame('Do You Want To Remove Old Versions of Installed Packages?'
                   ) is True:
        os.system('sudo paccache -rk ' + count)
        PS.Write_To_Log('CleanCache', 'Ran paccache -rk ' + count, log_file)

    if PS.YN_Frame('Do You Want To Remove Cached Orphans?') is True:
        os.system('sudo paccache -ruk0')
        PS.Write_To_Log('CleanCache', 'Ran paccache -ruk0', log_file)

    if PS.YN_Frame(
            'Do You Want To Check For Old Pacback Restore Points?') is True:
        PS.Write_To_Log('CleanCache', 'Started Search For Old RPs', log_file)
        metas = PS.Search_FS(rp_paths, 'set')
        rps = {f for f in metas if f.endswith(".meta")}

        for m in rps:
            rp_num = m.split('/')[-1]
            # Find RP Create Date in Meta File
            meta = PS.Read_List(m)
            for l in meta:
                if l.split(':')[0] == 'Date Created':
                    target_date = l.split(':')[1].strip()
                    break

            # Parse and Format Dates for Compare
            today = dt.datetime.now().strftime("%Y/%m/%d")
            t_split = list(today.split('/'))
            today_date = dt.date(int(t_split[0]), int(t_split[1]),
                                 int(t_split[2]))
            o_split = list(target_date.split('/'))
            old_date = dt.date(int(o_split[0]), int(o_split[1]),
                               int(o_split[2]))

            # Compare Days
            days = (today_date - old_date).days
            if days > 180:
                PS.prWarning(m.split('/')[-1] + ' Is Over 180 Days Old!')
                if PS.YN_Frame(
                        'Do You Want to Remove This Restore Point?') is True:
                    PS.RM_File(m, sudo=True)
                    PS.RM_Dir(m[:-5], sudo=True)
                    PS.prSuccess('Restore Point Removed!')
                    PS.Write_To_Log('CleanCache', 'Removed RP ' + rp_num,
                                    log_file)
            PS.prSuccess(rp_num + ' Is Only ' + str(days) + ' Days Old!')
            PS.Write_To_Log('CleanCache',
                            'RP ' + rp_num + ' Was Less Than 180 Days 0ld',
                            log_file)

    PS.End_Log('CleanCache', log_file)
Example #8
0
def remove_rp(rp_num, nc):
    PS.Start_Log('RemoveRP', log_file)
    rp = rp_paths + '/rp' + rp_num + '.meta'

    if nc is False:
        if PS.YN_Frame('Do You Want to Remove This Restore Point?') is True:
            PS.RM_File(rp, sudo=False)
            PS.RM_Dir(rp[:-5], sudo=False)
            PS.prSuccess('Restore Point Removed!')
            PS.Write_To_Log('RemoveRP', 'Removed Restore Point ' + rp_num,
                            log_file)
        else:
            PS.Write_To_Log('RemoveRP',
                            'User Declined Removing Restore Point ' + rp_num)

    elif nc is True:
        PS.RM_File(rp, sudo=False)
        PS.RM_Dir(rp[:-5], sudo=False)
        PS.prSuccess('Restore Point Removed!')
        PS.Write_To_Log('RemoveRP', 'Removed Restore Point ' + rp_num,
                        log_file)

    PS.End_Log('RemoveRP', log_file)
Example #9
0
def search_paccache(pkg_list, fs_list):
    '''Searches cache for matching pkg versions and returns results.'''
    PS.Write_To_Log('SearchPaccache',
                    'Started Search for ' + str(len(pkg_list)) + ' Packages',
                    log_file)

    # Combing package names into one term provides much faster results
    bulk_search = ('|'.join(list(re.escape(pkg) for pkg in pkg_list)))
    chunk_size = int(round(len(fs_list) / max_threads(), 0)) + 1
    fs_list = list(f for f in fs_list)
    chunks = [
        fs_list[i:i + chunk_size] for i in range(0, len(fs_list), chunk_size)
    ]

    with mp.Pool(processes=max_threads()) as pool:
        found_pkgs = pool.starmap(search_pkg_chunk,
                                  zip(itertools.repeat(bulk_search), chunks))
        found_pkgs = set(itertools.chain(*found_pkgs))

    PS.Write_To_Log(
        'SearchPaccache', 'Found ' + str(len(found_pkgs)) + ' OUT OF ' +
        str(len(pkg_list)) + ' Packages', log_file)
    return found_pkgs
Example #10
0
def pre_fligh_check():
    base_dir = os.path.dirname(os.path.realpath(__file__))[:-5]
    old_rp_path = base_dir + '/restore-points'
    if os.path.exists(old_rp_path):
        PS.Start_Log('PreFlight', log_file)
        PS.prError('Looks Like You Are Upgrading From A Version Before 1.6!')
        PS.prWorking('Migrating Your Restore Point Folder Now...')
        check_if_root()
        PS.MK_Dir('/var/lib/pacback', sudo=False)
        os.system('mv ' + old_rp_path + ' /var/lib/pacback')
        os.system(
            'chown root:root /var/lib/pacback && chmod 700 /var/lib/pacback')
        PS.Write_To_Log('PreFlight',
                        'Pacback Successfully Migrated To /var/lib/pacback',
                        log_file)
Example #11
0
def upgrade_to_hardlinks(rp_path):
    # This is a Total Hack Job. Don't Judge Me :(
    PS.prWorking('Unpacking...')
    PS.Write_To_Log('HardlinkUpgrade',
                    'Unpacking Old Restore Point For Conversion', log_file)
    if os.path.exists(rp_path + '.tar.gz'):
        PS.GZ_D(rp_path + '.tar.gz')
    PS.Untar_Dir(rp_path + '.tar')
    PS.Write_To_Log('HardlinkUpgrade', 'Unpacked Restore Point', log_file)

    # Read and Parse Meta Data
    meta = PS.Read_List(rp_path + '.meta')
    meta_old_pkgs = PS.Read_Between('======= Pacman List ========',
                                    '<Endless>', meta)
    meta_dirs = PS.Read_Between('========= Dir List =========',
                                '======= Pacman List ========', meta)[:-1]
    PS.Write_To_Log('HardlinkUpgrade', 'Read RP MetaData', log_file)

    # Find Existing Package
    pc = PS.Search_FS(rp_path + '/pac_cache')
    found = PU.search_paccache(PS.Replace_Spaces(meta_old_pkgs),
                               PU.fetch_paccache())

    if len(found) == len(pc):
        PS.prSuccess('All Packages Found!')
        PS.Write_To_Log('HardlinkUpgrade',
                        'All Packages Where Found Elsewhere', log_file)
        PS.RM_Dir(rp_path + '/pac_cache', sudo=False)
        PS.MK_Dir(rp_path + '/pac_cache', sudo=False)
        for pkg in tqdm.tqdm(found, desc='Hardlinking Packages to Pacback RP'):
            os.system('sudo ln ' + pkg + ' ' + rp_path + '/pac_cache/' +
                      pkg.split('/')[-1])
        PS.Write_To_Log('HardlinkUpgrade',
                        'Hardlinked Packages From Other Locations', log_file)

    elif len(found) < len(pc):
        PS.Write_To_Log(
            'HardlinkUpgrade',
            'Not All Packages Where Found. Mergeing With Hardlinks', log_file)
        duplicate = PS.Trim_Dir(pc).intersection(PS.Trim_Dir(found))
        for d in tqdm.tqdm(duplicate, desc='Mergeing and Hardlinking'):
            PS.RM_File(rp_path + '/pac_cache/' + d, sudo=False)
            for p in found:
                if p.split('/')[-1] == d.split('/')[-1]:
                    os.system('sudo ln ' + p + ' ' + rp_path + '/pac_cache/' +
                              d)
                    break
        PS.Write_To_Log('HardlinkUpgrade',
                        'Successfully Merged Restore Point Packages', log_file)

    if len(meta_dirs) > 0:
        PS.Write_To_Log('HardlinkUpgrade', 'Detected Custom Files Saved In RP',
                        log_file)
        f_list = set()
        rp_fs = PS.Search_FS(rp_path)
        for f in rp_fs:
            if f[len(rp_path):].split('/')[1] != 'pac_cache':
                f_list.add(f)

        with tarfile.open(rp_path + '/' + rp_path[-2:] + '_dirs.tar',
                          'w') as tar:
            for f in tqdm.tqdm(f_list, desc='Adding Dir\'s to Tar'):
                tar.add(f, f[len(rp_path):])
        PS.Write_To_Log('HardlinkUpgrade', 'Added Custom Files To New RP Tar',
                        log_file)

        for d in meta_dirs:
            PS.RM_Dir(rp_path + '/' + d.split('/')[1], sudo=False)
        PS.Write_To_Log('HardlinkUpgrade', 'Cleaned Unpacked Custom Files',
                        log_file)

    PS.RM_File(rp_path + '.tar', sudo=False)
    PS.Write_To_Log('HardlinkUpgrade', 'Removed Old Restore Point Tar',
                    log_file)
    PS.prSuccess('RP Version Upgrade Complete!')
    PS.Write_To_Log('HardlinkUpgrade', 'Restore Point Upgrade Complete',
                    log_file)
Example #12
0
def rollback_to_rp(version, rp_num):
    PS.Start_Log('RollbackRP', log_file)
    #####################
    # Stage Rollback Vars
    #####################
    rp_num = str(rp_num).zfill(2)
    rp_path = '/var/lib/pacback/restore-points/rp' + rp_num
    rp_tar = rp_path + '/rp' + rp_num + '_dirs.tar'
    rp_meta = rp_path + '.meta'
    current_pkgs = pu.pacman_Q()

    # Set Full RP Status
    if os.path.exists(rp_path):
        full_rp = True
        PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Full RP',
                        log_file)
    else:
        full_rp = False
        PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Light RP',
                        log_file)

    # Set Meta Status, Read Meta, Diff Packages, Set Vars
    if os.path.exists(rp_meta):
        meta_exists = True
        PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Has MetaData',
                        log_file)
        meta = PS.Read_List(rp_meta)
        meta_dirs = PS.Read_Between('= Dir List =',
                                    '= Pacman List =',
                                    meta,
                                    re_flag=True)[:-1]
        meta_old_pkgs = PS.Read_Between('= Pacman List =',
                                        '<Endless>',
                                        meta,
                                        re_flag=True)

        # Checking for New and Changed Packages
        changed_pkgs = set(set(meta_old_pkgs) - current_pkgs)
        meta_old_pkg_strp = {pkg.split(' ')[0] for pkg in meta_old_pkgs}
        current_pkg_strp = {pkg.split(' ')[0] for pkg in current_pkgs}
        added_pkgs = set(current_pkg_strp - meta_old_pkg_strp)
        m_search = PS.Replace_Spaces(changed_pkgs)
        PS.Write_To_Log('RollbackRP', 'Finished Reading RP MetaData', log_file)

    else:
        meta_exists = False
        meta = None
        PS.Write_To_Log('RollbackRP', 'RP #' + rp_num + ' Is Missing MetaData',
                        log_file)

    # Abort If No Files Are Found
    if meta_exists is False and full_rp is False:
        PS.Abort_With_Log('RollbackRP',
                          'Restore Point #' + rp_num + ' Was NOT FOUND!',
                          'Restore Point #' + rp_num + ' Was NOT FOUND!',
                          log_file)

    # Compare Versions
    vc.check_pacback_version(version, rp_path, meta_exists, meta)

    ####################
    # Full Restore Point
    ####################
    if full_rp is True:
        if meta_exists is True:
            # Pass If No Packages Have Changed
            if len(changed_pkgs) > 0:
                PS.Write_To_Log(
                    'RollbackRP',
                    str(len(changed_pkgs)) + ' Packages Have Been Changed',
                    log_file)
                found_pkgs = pu.search_paccache(m_search, pu.fetch_paccache())
                PS.pacman(' '.join(found_pkgs), '-U')
                PS.Write_To_Log('RollbackRP',
                                'Send Found Packages to pacman -U', log_file)
            else:
                PS.prSuccess('No Packages Have Been Changed!')
                PS.Write_To_Log('RollbackRP', 'No Packages Have Been Changed',
                                log_file)

        elif meta_exists is False:
            rp_cache = rp_path + '/pac_cache'
            PS.pacman(rp_cache + '/*', '--needed -U')
            PS.Write_To_Log('RollbackRP', 'Send pacman -U /* --needed',
                            log_file)
            PS.prError('Restore Point #' + rp_num + ' MetaData Was NOT FOUND!')
            PS.Abort_With_Log('RollbackRP',
                              'Meta Is Missing So Skipping Advanced Features',
                              'Skipping Advanced Features!', log_file)

    #####################
    # Light Restore Point
    #####################
    elif meta_exists is True and full_rp is False:

        # Pass If No Packages Have Changed
        if len(changed_pkgs) > 0:
            PS.prWorking('Bulk Scanning for ' + str(len(meta_old_pkgs)) +
                         ' Packages...')
            found_pkgs = pu.search_paccache(m_search, pu.fetch_paccache())
        else:
            PS.prSuccess('No Packages Have Been Changed!')
            PS.Write_To_Log('RollbackRP', 'No Packages Have Been Changed',
                            log_file)
            found_pkgs = {}

        if len(changed_pkgs) == 0:
            pass

        # Pass Comparison if All Packages Found
        elif len(found_pkgs) == len(changed_pkgs):
            PS.prSuccess('All Packages Found In Your Local File System!')
            PS.Write_To_Log('RollbackRP', 'All Packages Found', log_file)
            PS.pacman(' '.join(found_pkgs), '--needed -U')
            PS.Write_To_Log('RollbackRP', 'Sent Found Packages To pacman -U',
                            log_file)

        # Branch if Packages are Missing
        elif len(found_pkgs) < len(changed_pkgs):
            PS.Write_To_Log(
                'RollbackRP',
                str(len(found_pkgs) - len(changed_pkgs)) +
                ' Packages Are Where Not Found', log_file)
            missing_pkg = set(m_search - pu.trim_pkg_list(found_pkgs))

            # Show Missing Pkgs
            PS.prWarning('Couldn\'t Find The Following Package Versions:')
            for pkg in missing_pkg:
                PS.prError(pkg)

            if PS.YN_Frame('Do You Want To Continue Anyway?') is True:
                PS.pacman(' '.join(found_pkgs), '-U')
                PS.Write_To_Log('RollbackRP',
                                'Sent Found Packages To pacman -U', log_file)
            else:
                PS.Abort_With_Log(
                    'RollbackRP',
                    'User Aborted Rollback Because of Missing Packages',
                    'Aborting Rollback!', log_file)

    # Ask User If They Want to Remove New Packages
    if len(added_pkgs) > 0:
        PS.prWarning(
            'The Following Packages Are Installed But Are NOT Present in Restore Point #'
            + rp_num + ':')
        PS.Write_To_Log(
            'RollbackRP',
            str(len(added_pkgs)) + ' Have Been Added Since RP Creation',
            log_file)
        for pkg in added_pkgs:
            PS.prAdded(pkg)
        if PS.YN_Frame('Do You Want to Remove These Packages From Your System?'
                       ) is True:
            PS.pacman(' '.join(added_pkgs), '-R')
            PS.Write_To_Log('RollbackRP', 'Sent Added Packages To pacman -R',
                            log_file)
    else:
        PS.prSuccess('No Packages Have Been Added!')
        PS.Write_To_Log('RollbackRP',
                        'No Packages Have Been Added Since RP Creation',
                        log_file)

    ########################
    # Stage Custom File Diff
    ########################
    if len(meta_dirs) > 0:
        PS.Write_To_Log('RollbackRP', 'Custom Dirs Specified in RP Meta File',
                        log_file)
        custom_dirs = rp_tar[:-4]
        if os.path.exists(rp_tar + '.gz'):
            PS.prWorking('Decompressing Restore Point....')
            if any(re.findall('pigz', line.lower()) for line in current_pkgs):
                os.system('pigz -d ' + rp_tar + '.gz -f')
                PS.Write_To_Log('RPDiff',
                                'Decompressed Custom Files With Pigz',
                                log_file)
            else:
                PS.GZ_D(rp_tar + '.gz')
                PS.Write_To_Log('RPDiff',
                                'Decompressed Custom Files With Python',
                                log_file)

        if os.path.exists(custom_dirs):
            PS.RM_Dir(custom_dirs, sudo=True)

        PS.prWorking('Unpacking Files from Restore Point Tar....')
        PS.Untar_Dir(rp_tar)
        PS.Write_To_Log('RPDiff', 'Unpacked Custom Files RP Tar', log_file)

        ################################
        # Restore Files Without Checksum
        ################################
        diff_yn = PS.YN_Frame(
            'Do You Want to Checksum Diff Restore Point Files Against Your Current File System?'
        )
        if diff_yn is False:
            PS.Write_To_Log('RPDiff', 'User Skipped Checksumming Files',
                            log_file)
            PS.prWarning(
                'OVERWRITING FILES WITHOUT CHECKSUMMING CAN BE EXTREMELY DANGEROUS!'
            )

            ow = PS.YN_Frame(
                'Do You Still Want to Continue and Restore ALL Files?')
            if ow is False:
                PS.Write_To_Log('RPDiff',
                                'User Declined Overwrite After Skipping Diff',
                                log_file)
                print('Skipping! Restore Point Files Are Unpacked in ' +
                      custom_dirs)
                PS.Write_To_Log('RPDiff',
                                'Left Files Unpacked in ' + custom_dirs,
                                log_file)

            elif ow is True:
                print(
                    'Starting Full File Restore! Please Be Patient As All Files are Overwritten...'
                )
                rp_fs = PS.Search_FS(custom_dirs)
                for f in rp_fs:
                    PS.prWorking('Please Be Patient. This May Take a While...')
                    os.system('sudo mkdir -p ' +
                              PS.Escape_Bash('/'.join(f.split('/')[:-1])) +
                              ' && sudo cp -af ' + PS.Escape_Bash(f) + ' ' +
                              PS.Escape_Bash(f[len(custom_dirs):]))

        ############################
        # Checksum and Compare Files
        ############################
        elif diff_yn is True:
            PS.Write_To_Log('RPDiff', 'Started Checksumming Custom Files',
                            log_file)
            rp_fs = PS.Search_FS(custom_dirs)
            rp_fs_trim = set(path[len(custom_dirs):]
                             for path in PS.Search_FS(custom_dirs))

            # Checksum Restore Point Files with a MultiProcessing Pool
            with mp.Pool(os.cpu_count()) as pool:
                rp_checksum = set(
                    tqdm.tqdm(pool.imap(PS.Checksum_File, rp_fs),
                              total=len(rp_fs),
                              desc='Checksumming Restore Point Files'))
                sf_checksum = set(
                    tqdm.tqdm(pool.imap(PS.Checksum_File, rp_fs_trim),
                              total=len(rp_fs_trim),
                              desc='Checksumming Source Files'))
            PS.Write_To_Log('RPDiff', 'Finished Checksumming Custom Files',
                            log_file)

            # Compare Checksums For Files That Exist
            PS.Write_To_Log('RPDiff', 'Starting Sorting and Comparing Files',
                            log_file)
            rp_csum_trim = set(path[len(custom_dirs):] for path in rp_checksum)
            rp_diff = sf_checksum.difference(rp_csum_trim)

            # Filter Removed and Changed Files
            diff_removed = set()
            diff_changed = set()
            for csum in rp_diff:
                if re.findall('FILE MISSING', csum):
                    diff_removed.add(csum)
                else:
                    diff_changed.add(csum.split(' : ')[0] + ' : FILE CHANGED!')

            # Find Added Files
            src_fs = set()
            for x in meta_dirs:
                for l in PS.Search_FS(x):
                    src_fs.add(l)
            diff_new = src_fs.difference(rp_fs_trim)

            PS.Write_To_Log('RPDiff', 'Finished Comparing and Sorting Files',
                            log_file)

            # Print Changed Files For User
            if len(diff_changed) + len(diff_new) + len(diff_removed) == 0:
                PS.Write_To_Log('RPDiff',
                                'Checksum Returned Zero Changed Files',
                                log_file)
                PS.RM_Dir(custom_dirs, sudo=True)
                PS.Write_To_Log('RPDiff',
                                'Cleaned Up Files and Completed Successfully',
                                log_file)
                PS.prSuccess('No Files Have Been Changed!')

            #################
            # Overwrite Files
            #################
            else:
                if len(diff_changed) > 0:
                    PS.Write_To_Log(
                        'RPDiff',
                        'Found ' + str(len(diff_changed)) + ' Changed Files',
                        log_file)
                    PS.prWarning('The Following Files Have Changed:')
                    for f in diff_changed:
                        PS.prChanged(f)
                    if PS.YN_Frame(
                            'Do You Want to Overwrite Files That Have Been CHANGED?'
                    ) is True:
                        PS.prWorking(
                            'Please Be Patient. This May Take a While...')
                        for f in diff_changed:
                            fs = (f.split(' : ')[0])
                            os.system('sudo cp -af ' +
                                      PS.Escape_Bash(custom_dirs + fs) + ' ' +
                                      PS.Escape_Bash(fs))
                        PS.Write_To_Log('RPDiff', 'Restored Changed Files',
                                        log_file)
                    else:
                        PS.Write_To_Log('RPDiff',
                                        'User Declined Restoring Files',
                                        log_file)

                if len(diff_removed) > 0:
                    PS.Write_To_Log(
                        'RPDiff',
                        'Found ' + str(len(diff_removed)) + ' Removed Files',
                        log_file)
                    PS.prWarning('The Following Files Have Been Removed:')
                    for f in diff_removed:
                        PS.prRemoved(f)
                    if PS.YN_Frame(
                            'Do You Want to Add Files That Have Been REMOVED?'
                    ) is True:
                        PS.prWorking(
                            'Please Be Patient. This May Take a While...')
                        for f in diff_removed:
                            fs = (f.split(' : ')[0])
                            os.system(
                                'sudo mkdir -p ' +
                                PS.Escape_Bash('/'.join(fs.split('/')[:-1])) +
                                ' && sudo cp -af ' +
                                PS.Escape_Bash(custom_dirs + fs) + ' ' +
                                PS.Escape_Bash(fs))
                        PS.Write_To_Log('RPDiff', 'Restored Removed Files',
                                        log_file)
                    else:
                        PS.Write_To_Log('RPDiff',
                                        'User Declined Restoring Files',
                                        log_file)

                if len(diff_new) > 0:
                    PS.Write_To_Log(
                        'RPDiff', 'Found ' + str(len(diff_new)) + ' New Files',
                        log_file)
                    PS.prWarning('The Following Files Have Been Added:')
                    for f in diff_new:
                        PS.prAdded(f + ' : NEW FILE!')
                    if PS.YN_Frame(
                            'Do You Want to Remove Files That Have Been ADDED?'
                    ) is True:
                        for f in diff_new:
                            fs = (f.split(' : ')[0])
                            os.system('rm ' + fs)
                        PS.Write_To_Log('RPDiff', 'Removed New Files',
                                        log_file)
                    else:
                        PS.Write_To_Log('RPDiff',
                                        'User Declined Restoring Files',
                                        log_file)

                PS.RM_Dir(custom_dirs, sudo=True)
                PS.Write_To_Log('RPDiff', 'Done Comparing and Restoring Files',
                                log_file)
                PS.prSuccess('File Diff and Restore Complete!')

    else:
        PS.prSuccess('Rollback to Restore Point #' + rp_num + ' Complete!')
        PS.Write_To_Log('RollbackRP',
                        'Rollback to RP #' + rp_num + ' Complete', log_file)

    PS.End_Log('RollbackRP', log_file)
Example #13
0
def create_restore_point(version, rp_num, rp_full, dir_list, no_confirm, notes):
    PS.Start_Log('CreateRP', log_file)
    # Fail Safe for New Users
    if os.path.exists(rp_paths) is False:
        PS.MK_Dir('/var/lib/pacback', sudo=False)
        PS.MK_Dir(rp_paths, sudo=False)
        PS.Write_To_Log('CreateRP', 'Created Base RP Folder in /var/lib', log_file)

    # Set Base Vars
    rp_num = str(rp_num).zfill(2)
    rp_path = rp_paths + '/rp' + rp_num
    rp_tar = rp_path + '/rp' + rp_num + '_dirs.tar'
    rp_meta = rp_path + '.meta'
    found_pkgs = set()
    pac_size = 0

    # Check for Existing Restore Points
    if os.path.exists(rp_path) or os.path.exists(rp_meta):
        if no_confirm is False:
            if int(rp_num) != 0:
                PS.prWarning('Restore Point #' + rp_num + ' Already Exists!')
                if PS.YN_Frame('Do You Want to Overwrite It?') is False:
                    PS.Abort_With_Log('CreateRP', 'User Aborted Overwrite of RP #' + rp_num, 'Aborting!', log_file)

        PS.RM_File(rp_meta, sudo=False)
        PS.RM_Dir(rp_path, sudo=False)
        PS.Write_To_Log('CreateRP', 'Removed RP #' + rp_num + ' During Overwrite', log_file)

    ###########################
    # Full Restore Point Branch
    ###########################
    if rp_full is True:
        PS.Write_To_Log('CreateRP', 'Creating RP #' + rp_num + ' As Full RP', log_file)
        print('Building Full Restore Point...')

        # Set Vars For Full RP
        dir_size = 0
        rp_files = set()
        pac_cache = rp_path + '/pac_cache'

        PS.prWorking('Retrieving Current Packages...')
        pkg_search = pu.pacman_Q(replace_spaces=True)

        # Search File System for Pkgs
        PS.prWorking('Bulk Scanning for ' + str(len(pkg_search)) + ' Packages...')
        found_pkgs = pu.search_paccache(pkg_search, pu.fetch_paccache())
        pac_size = PS.Size_Of_Files(found_pkgs)

        # Ask About Missing Pkgs
        if len(found_pkgs) != len(pkg_search):
            PS.Write_To_Log('CreateRP', 'Not All Packages Where Found', log_file)
            if int(rp_num) != 0:
                if no_confirm is False:
                    pkg_split = pu.trim_pkg_list(found_pkgs)
                    PS.prError('The Following Packages Where NOT Found!')
                    for pkg in set(pkg_search - pkg_split):
                        PS.prWarning(pkg + ' Was NOT Found!')
                    if PS.YN_Frame('Do You Still Want to Continue?') is False:
                        PS.Abort_With_Log('CreateRP', 'User Aborted Due to Missing Pkgs', 'Aborting!', log_file)

        # HardLink Packages to RP
        PS.MK_Dir(rp_path, sudo=False)
        PS.MK_Dir(pac_cache, sudo=False)
        for pkg in tqdm.tqdm(found_pkgs, desc='Hardlinking Packages to Pacback RP'):
            os.system('sudo ln ' + pkg + ' ' + pac_cache + '/' + pkg.split('/')[-1])
        PS.Write_To_Log('CreateRP', 'HardLinked ' + str(len(found_pkgs)) + ' Packages', log_file)

        # Find Custom Files for RP
        if dir_list:
            PS.Write_To_Log('CreateRP', 'User Defined Custom RP Files', log_file)
            # Find and Get Size of Custom Files
            for d in dir_list:
                for f in PS.Search_FS(d, 'set'):
                    try:
                        dir_size += os.path.getsize(f)
                    except Exception:
                        OSError
                    rp_files.add(f)

            # Pack Custom Folders Into a Tar
            with tarfile.open(rp_tar, 'w') as tar:
                for f in tqdm.tqdm(rp_files, desc='Adding Dir\'s to Tar'):
                    tar.add(f)
            PS.Write_To_Log('CreateRP', 'Tar Created For Custom RP Files', log_file)

            # Compress Custom Files If Added Larger Than 1GB
            if dir_size > 1073741824:
                PS.prWorking('Compressing Restore Point Files...')
                if any(re.findall('pigz', l.lower()) for l in pkg_search):
                    os.system('pigz ' + rp_tar + ' -f')
                else:
                    PS.GZ_C(rp_tar, rm=True)
                PS.Write_To_Log('CreateRP', 'Compressed Custom Files RP Tar', log_file)

    elif rp_full is False:
        PS.Write_To_Log('CreateRP', 'Creating RP #' + rp_num + ' As A Light RP', log_file)
        if len(dir_list) > 0:
            PS.Abort_With_Log('CreateRP', 'Custom Dirs Are Not Supported By LightRP',
                              'Light Restore Points DO NOT Support Custom Dirs! Please Use The `-f` Flag', log_file)
        print('Building Light Restore Point...')

    #########################
    # Generate Meta Data File
    #########################
    current_pkgs = pu.pacman_Q()
    meta_list = ['====== Pacback RP #' + rp_num + ' ======',
                 'Pacback Version: ' + version,
                 'Date Created: ' + dt.datetime.now().strftime("%Y/%m/%d"),
                 'Packages Installed: ' + str(len(current_pkgs)),
                 'Packages in RP: ' + str(len(found_pkgs)),
                 'Size of Packages in RP: ' + PS.Convert_Size(pac_size)]

    if notes:
        meta_list.append('Notes: ' + notes)

    if len(dir_list) != 0:
        meta_list.append('Dirs File Count: ' + str(len(rp_files)))
        meta_list.append('Dirs Total Size: ' + PS.Convert_Size(dir_size))
        meta_list.append('')
        meta_list.append('========= Dir List =========')
        for d in dir_list:
            meta_list.append(d)

    meta_list.append('')
    meta_list.append('======= Pacman List ========')
    for pkg in current_pkgs:
        meta_list.append(pkg)

    # Export Final Meta Data File
    PS.Export_List(rp_meta, meta_list)
    PS.Write_To_Log('CreateRP', 'RP #' + rp_num + ' Was Successfully Created', log_file)
    PS.End_Log('CreateRP', log_file)
    PS.prSuccess('Restore Point #' + rp_num + ' Successfully Created!')