Beispiel #1
0
def abort_fail(func, output, message, config):
    '''
    This is a surrogate function for other functions to safely abort runtime during a failure.
    It reports the func sending the kill signal as the origin, rather than session.abort().
    '''
    paf.write_to_log(func, 'FAILURE: ' + output, config['log'])
    unlock(config)
    paf.prError(message)
    sys.exit()
Beispiel #2
0
def packages(config, pkgs):
    '''
    Allows the user to rollback packages by name.
    Packages are not sent to pacman until the user has
    selected all the packages they want to restore/change.
    '''
    # Startup
    fname = 'restore.packages(' + str(len(pkgs)) + ')'
    pkg_paths = list()
    cache = utils.scan_caches(config)

    # Search For Each Package Name And Let User Select Version
    paf.write_to_log(fname, 'Started Search for ' + ', '.join(pkgs),
                     config['log'])
    for pkg in pkgs:
        found_pkgs = utils.user_pkg_search(pkg, cache)
        sort_pkgs = sorted(found_pkgs, reverse=True)

        if found_pkgs:
            paf.write_to_log(
                fname, 'Found ' + str(len(found_pkgs)) +
                ' Cached Versions for `' + pkg + '`', config['log'])
            paf.prSuccess('Pacback Found the Following Versions for `' + pkg +
                          '`:')
            answer = paf.multi_choice_frame(sort_pkgs)

            # Lets User Abort Package Selection
            if answer is False or None:
                paf.write_to_log(fname, 'User Selected NOTHING For ' + pkg,
                                 config['log'])
            else:
                for x in cache:
                    if re.findall(re.escape(answer), x):
                        pkg_paths.append(x)
                        break

        else:
            paf.prError('No Packages Found Under the Name: ' + pkg)
            paf.write_to_log(
                fname, 'Search for ' + pkg.upper() + ' Returned ZERO Results!',
                config['log'])

    if pkg_paths:
        paf.pacman(' '.join(pkg_paths), '-U')
        paf.write_to_log(fname,
                         'Sent Pacman Selected Packages For Installation',
                         config['log'])
    else:
        paf.write_to_log(
            fname, 'User Selected No Packages or No Packages Were Found',
            config['log'])
Beispiel #3
0
def compare(config, target_version):
    '''
    Parses the versions and forks if an upgrade is needed.
    '''
    fname = 'version.compare()'

    # Current Version
    cv_M = int(config['version'].split('.')[0])
    cv_m = int(config['version'].split('.')[1])
    cv_p = int(config['version'].split('.')[2])

    # Target Version
    tv_M = int(target_version.split('.')[0])
    tv_m = int(target_version.split('.')[1])
    tv_p = int(target_version.split('.')[2])

    versions = ((cv_M, cv_m, cv_p), (tv_M, tv_m, tv_p))

    if config['version'] != target_version:
        paf.write_to_log(
            fname, 'Current Version ' + config['version'] +
            ' Miss-Matched With ' + target_version, config['log'])

        # Check for Versions <= V1.5
        if tv_M == 1 and tv_m < 5:
            paf.prError(
                'Restore Points Generated Before V1.5.0 Are Not Backwards Compatible With Newer Versions of Pacback!'
            )
            paf.write_to_log(
                fname, 'Detected a Restore Point Version Generated > V1.5',
                config['log'])
            session.abort_fail(
                fname,
                'Can\'t Upgrade or Restore Versions Created Before V1.5',
                'Aborting!', config['log'])

        # Check for V1.5 to V1.7
        elif tv_M == 1 and tv_m > 5:
            paf.write_to_log(fname, 'Detected Alpha Restore Point!',
                             config['log'])

    else:
        paf.write_to_log(fname, 'Both Versions Match ' + config['version'],
                         config['log'])

    return versions
Beispiel #4
0
def validate(config, info):
    '''
    Checks if a meta file has become corrupted or is missing.
    '''
    fname = 'meta.validate(' + info['type'] + info['id'] + ')'

    if os.path.exists(info['meta']) and os.path.exists(info['meta_md5']):
        paf.write_to_log(fname, 'Meta File and Meta Checksum Are Present',
                         config['log'])
        csum = str(open(info['meta_md5']).read()).strip()
        msum = str(paf.checksum_file(info['meta'])[1]).strip()

        if csum == msum:
            paf.write_to_log(fname, 'Meta Passed Checksum', config['log'])
            return

        elif csum != msum:
            paf.write_to_log(fname, 'Meta Checksum FAILED!', config['log'])
            paf.prError(info['TYPE'] + ' ' + info['id'] +
                        ' Has Failed its Checksum Check!')
            paf.prError('This ' + info['TYPE'] + ' Has Likely Become Corrupt!')

            if paf.yn_frame('Do You Want to Remove This ' + info['TYPE'] +
                            ' Now?') is True:
                utils.remove_id(config, info)
                session.abort(fname, 'User Deleted Corrupted ' + info['TYPE'],
                              info['TYPE'] + ' Was Removed. Exiting Now!',
                              config)
            else:
                session.abort(
                    fname,
                    'User Choose NOT to Remove Corrupted ' + info['TYPE'],
                    'Okay, Leaving the ' + info['TYPE'] +
                    ' Alone. Exiting Now!', config)

    elif os.path.exists(info['meta']) and not os.path.exists(info['meta_md5']):
        paf.write_to_log(fname, 'Meta File is Missing its Checksum File!',
                         config['log'])
        paf.prError(info['TYPE'] + ' ' + info['id'] +
                    ' is Missing a Checksum!')

        if paf.yn_frame('Do You Still Want To Continue?') is False:
            session.abort(fname, 'User Exited Due to Missing Checksum File',
                          'Okay, Aborting Due to Missing Checksum', config)
        else:
            paf.write_to_log(
                fname,
                'User Choose To Continue Even Though The Checksum is Missing',
                config['log'])
            return
Beispiel #5
0
def main(config, parms, pkg_results):
    '''
    This is the main restore logic for pacback. It should NOT be called directly but
    instead called through a higher level 'API' like call.
    This logic does the actual work of downgrading, removing, and installing packages.
    '''
    fname = 'restore.main(' + parms['type'] + parms['id'] + ')'

    # Branch if Packages Have Been Changed or Removed
    if pkg_results['search']:
        cache = utils.scan_caches(config)
        found_pkgs = utils.search_cache(pkg_results['search'], cache, config)

        # This is Very Bad
        if len(found_pkgs) > len(pkg_results['search']):
            paf.prError(
                'Error: Somehow More Packages Were Found Than Were Searched For!'
            )
            paf.write_to_log(
                fname,
                'Error: Somehow More Packages Were Found Than Were Searched For!',
                config['log'])
            print('Starting Error Resolving Process...')
            error_handler_results = error.too_many_pkgs_found(
                config, parms, found_pkgs, pkg_results)

            if error_handler_results[0] is True:
                paf.prSuccess(
                    'Pacback Was Able To Automaticly Resolve This Error!')
                found_pkgs = error_handler_results[1]
            else:
                paf.prError(
                    'Pacback Was NOT Able To Automaticly Resolve This Error!')
                error.create_error_report()

        # Branch if Packages are Missing
        elif len(found_pkgs) < len(pkg_results['search']):
            missing_pkg = set(pkg_results['search'] -
                              utils.trim_pkg_list(found_pkgs))
            paf.write_to_log(
                fname,
                str(len(found_pkgs)) + ' Out of ' +
                str(len(pkg_results['search'])) + ' Packages Found',
                config['log'])

            paf.prWarning('Couldn\'t Find The Following Package Versions:')
            for pkg in missing_pkg:
                paf.prError(pkg)
            if paf.yn_frame('Do You Want To Continue Anyway?') is False:
                session.abort_fail(
                    fname, 'User Aborted Rollback Because of Missing Packages',
                    'Aborting Rollback!', config)

        # This is the Best Case
        else:
            paf.prSuccess('All Packages Found In Your Local File System!')
            paf.write_to_log(fname, 'Found All Changed and Removed Packages',
                             config['log'])

        print(str(len(found_pkgs)))
        paf.pacman(' '.join(found_pkgs), '-U')
        paf.write_to_log(fname, 'Sent Pacman Selected Packages', config['log'])

    else:
        paf.prSuccess('No Packages Have Been Changed or Removed!')
        paf.write_to_log(fname, 'No Packages Have Been Changed or Removed',
                         config['log'])

    # Branch if Packages Have Been Added
    if pkg_results['a_pkgs']:
        print('')
        paf.write_to_log(
            fname,
            str(len(pkg_results['a_pkgs'])) +
            ' Have Been Added Since Creation', config['log'])

        paf.prWarning(
            str(len(pkg_results['a_pkgs'])) +
            ' Packages Have Been Added Since Creation')
        for pkg in pkg_results['a_pkgs']:
            paf.prAdded(pkg)
        print('')
        if paf.yn_frame(
                'Do You Want to Remove These Packages From Your System?'
        ) is True:
            print('')
            paf.pacman(' '.join(pkg_results['a_pkgs']), '-R')
            paf.write_to_log(fname, 'Sent Added Packages To `pacman -R`',
                             config['log'])

    else:
        paf.prSuccess('No Packages Have Been Added!')
        paf.write_to_log(fname, 'No Packages Have Been Added', config['log'])
Beispiel #6
0
##########################
# Display Info For User
########################

if args.version:
    print('Pacback Version: ' + config['version'])
    print('PAF Version: ' + config['paf'])

if args.info:
    if re.findall(r'^(rp[0-9][0-9]$|rp[0-9]$|ss[0-9][0-9]$|ss[0-9])$',
                  args.info):
        user.print_info(config, args.info)
    else:
        paf.prError(
            'Invalid Input: Argument Must Specify Type and Number! (IE: rp02 or ss4)'
        )

if args.list:
    user.list_all(config)

if args.diff:
    if all(
            re.search(r'^(rp[0-9][0-9]$|rp[0-9]$|ss[0-9][0-9]$|ss[0-9])$', d)
            for d in args.diff):
        user.diff_meta(config, args.diff[0], args.diff[1])
    else:
        paf.prError(
            'Invalid Input: Argument Must Specify Type and Number! (IE: rp02 or ss4)'
        )
Beispiel #7
0
def restore(config, info, dir_list, checksum):
    '''
    This is the main 'api' entrance point for file restoration.
    This function orchestrates the process handing of work to other funcs.
    '''
    fname = 'custom_dirs.restore()'
    unpack_path = info['tar'][:-4]
    p_len = len(unpack_path)
    paf.write_to_log(fname, 'PLACE HOLDER', config['log'])

    # Decompress Tar
    if os.path.exists(info['tar.gz']):
        paf.prWarning('Decompressing Custom Tar....')
        if any(re.findall('pigz', line.lower()) for line in utils.pacman_Q()):
            os.system('/usr/bin/pigz -d ' + info['tar.gz'] + ' -f')
            paf.write_to_log(fname, 'Decompressed Tar With Pigz',
                             config['log'])
        else:
            paf.gz_d(info['tar.gz'])
            paf.write_to_log(fname, 'Decompressed Tar With Python',
                             config['log'])

    # Check Tar Csum And Unpack
    if os.path.exists(info['tar']):
        # Checksum Tar
        print('Checking Integrity of Tar...')
        tar_csum = paf.checksum_file(info['tar'])[1]
        paf.write_to_log(fname, 'Checksummed Tar', config['log'])

        if tar_csum == checksum:
            paf.write_to_log(fname, 'Tar Passed Checksum Integrity Check',
                             config['log'])
            paf.prSuccess('Tar Passed Integrity Check')
        else:
            paf.write_to_log(fname, 'Custom Tar Failed Integrity Check!',
                             config['log'])
            paf.prError('Custom Tar Failed Integrity Check!')
            paf.prBold('Skipping Custom File Restoration!')
            return

        # Clean Then Unpack Tar
        paf.prWarning('Unpacking Files from Tar....')
        paf.rm_dir(unpack_path, sudo=True)
        paf.untar_dir(info['tar'])
        paf.write_to_log(fname, 'Unpacked Custom Files From Tar',
                         config['log'])

    else:
        # Skip If Tar is Missing
        paf.write_to_log(
            fname, 'Meta Data File Spesifies A Tar That is Now Missing!',
            config['log'])
        paf.prError('This Restore Point is Missing It\'s Custom Tar!')
        return

    if paf.yn_frame(
            'Do You Want to Compare Restore Point Files Against Your Current File System?'
    ) is True:
        results = compare_files(config, dir_list, unpack_path, p_len)
        # Exit If No Changes Made to Files
        if len(results['added']) + len(results['removed']) + len(
                results['changed']) == 0:
            paf.write_to_log(
                fname, 'Checksum Returned 0 Changed, Removed or Added Files',
                config['log'])
            paf.prSuccess('No Changes Have Been Made to Your File System!')
        else:
            smart_overwrite(config, results, unpack_path, p_len)

    else:
        force_overwrite(config, unpack_path, p_len)

    # Cleanup After Runtime
    repack(config, info, unpack_path)
Beispiel #8
0
def main(config, parms, pkg_results):
    '''
    This is the main restore logic for pacback. It should NOT be called directly as restore.main().
    This logic does the actual work of downgrading, removing, and installing packages.
    '''
    fname = 'restore.main(' + parms['type'] + parms['id'] + ')'

    # Branch if Packages Have Been Changed or Removed
    if pkg_results['search']:
        cache = utils.scan_caches(config)
        found_pkgs = utils.search_cache(pkg_results['search'], cache, config)

        # Branch if Packages are Missing
        if len(found_pkgs) != len(pkg_results['search']):
            missing_pkg = set(pkg_results['search'] -
                              utils.trim_pkg_list(found_pkgs))
            paf.write_to_log(
                fname,
                str(len(found_pkgs)) + ' Out of ' +
                str(len(pkg_results['search'])) + ' Packages Found',
                config['log'])

            paf.prWarning('Couldn\'t Find The Following Package Versions:')
            for pkg in missing_pkg:
                paf.prError(pkg)
            if paf.yn_frame('Do You Want To Continue Anyway?') is False:
                session.abort_fail(
                    fname, 'User Aborted Rollback Because of Missing Packages',
                    'Aborting Rollback!', config)

        else:
            paf.prSuccess('All Packages Found In Your Local File System!')
            paf.write_to_log(fname, 'Found All Changed and Removed Packages',
                             config['log'])

        paf.pacman(' '.join(found_pkgs), '-U')
        paf.write_to_log(fname, 'Sent Pacman Selected Packages', config['log'])

    else:
        paf.prSuccess('No Packages Have Been Changed or Removed!')
        paf.write_to_log(fname, 'No Packages Have Been Changed or Removed',
                         config['log'])

    # Branch if Packages Have Been Added
    if pkg_results['a_pkgs']:
        print('')
        paf.write_to_log(
            fname,
            str(len(pkg_results['a_pkgs'])) +
            ' Have Been Added Since Creation', config['log'])

        paf.prWarning(
            str(len(pkg_results['a_pkgs'])) +
            ' Packages Have Been Added Since Creation')
        for pkg in pkg_results['a_pkgs']:
            paf.prAdded(pkg)
        print('')
        if paf.yn_frame(
                'Do You Want to Remove These Packages From Your System?'
        ) is True:
            print('')
            paf.pacman(' '.join(pkg_results['a_pkgs']), '-R')
            paf.write_to_log(fname, 'Sent Added Packages To `pacman -R`',
                             config['log'])

    else:
        paf.prSuccess('No Packages Have Been Added!')
        paf.write_to_log(fname, 'No Packages Have Been Added', config['log'])
Beispiel #9
0
def diff_meta(config, meta1, meta2):
    '''
    This function processes two meta data files without validating either.
    It will compare meta1 as base compared to meta2 then present the results in a table.
    The code is kind of gross but I'm not inclined to fix it.
    '''
    # Build Base Vars
    m1_num = meta1[2:].zfill(2)
    m2_num = meta2[2:].zfill(2)

    if meta1.startswith('rp'):
        m1_path = config['rp_paths'] + '/rp' + m1_num + '.meta'
    elif meta1.startswith('ss'):
        m1_path = config['ss_paths'] + '/ss' + m1_num + '.meta'

    if meta2.startswith('rp'):
        m2_path = config['rp_paths'] + '/rp' + m2_num + '.meta'
    elif meta2.startswith('ss'):
        m2_path = config['ss_paths'] + '/ss' + m2_num + '.meta'

    # Return if Missing
    if not os.path.exists(m1_path):
        return paf.prError(meta1.upper() + ' Was NOT Found!')

    if not os.path.exists(m2_path):
        return paf.prError(meta2.upper() + ' Was NOT Found!')

    # Read Meta Data
    m1 = meta.read(config, m1_path)
    m2 = meta.read(config, m2_path)
    compare = meta.compare_meta(config, m1, m2)

    # Build Info For Table
    c1 = [
        'Installed Packages: ' + m1['pkgs_installed'], 'Date: ' + m1['date'],
        'Time: ' + m1['time'], 'Pacback Version: ' + m1['version'],
        'User Label: ' + m1['label']
    ]

    if m1['stype'] == 'Full':
        c1.append('Packages Cached: ' + m1['pkgs_cached'])
        c1.append('Cache Size: ' + m1['cache_size'])

    if m1['dir_list']:
        c1.append('')
        c1.append('File Count: ' + m1['file_count'])
        c1.append('Raw File Size: ' + m1['file_raw_size'])
        c1.append('Compressed Size: ' + m1['tar_size'])
        c1.append('')
        c1.append('Directory List')
        c1.append('--------------')
        for d in m1['dir_list']:
            c1.append(d)

    c2 = list(compare['c_pkgs'])
    if not c2:
        c2.append('NONE')

    c3 = list(compare['a_pkgs'])
    if not c3:
        c3.append('NONE')

    c4 = list(compare['r_pkgs'])
    if not c4:
        c4.append('NONE')

    c5 = [
        'Installed Packages: ' + m2['pkgs_installed'], 'Date: ' + m2['date'],
        'Time: ' + m2['time'], 'Pacback Version: ' + m2['version'],
        'User Label: ' + m2['label']
    ]

    if m2['stype'] == 'Full':
        c5.append('Packages Cached: ' + m2['pkgs_cached'])
        c5.append('Cache Size: ' + m2['cache_size'])

    if m2['dir_list']:
        c5.append('')
        c5.append('File Count: ' + m2['file_count'])
        c5.append('Raw File Size: ' + m2['file_raw_size'])
        c5.append('Compressed Size: ' + m2['tar_size'])
        c5.append('')
        c5.append('Directory List')
        c5.append('--------------')
        for d in m2['dir_list']:
            c5.append(d)

    # Build Table
    t = Table(title=m1['type'] + ' #' + m1_num + ' --------> ' + m2['type'] +
              ' #' + m2_num)
    t.add_column(meta1.upper() + ' Meta Info',
                 justify='left',
                 style='bold white',
                 no_wrap=True)
    t.add_column('Changed Since Creation',
                 justify='center',
                 style='yellow',
                 no_wrap=True)
    t.add_column('Added Since Creation',
                 justify='center',
                 style='green',
                 no_wrap=True)
    t.add_column('Removed Since Creation',
                 justify='center',
                 style='red',
                 no_wrap=True)
    t.add_column(meta2.upper() + ' Meta Info',
                 justify='right',
                 style='bold white',
                 no_wrap=True)

    # This Builds The Table Output Line by Line
    counter = 0
    for x in range(0, max(len(l) for l in [c1, c2, c3, c4, c5])):
        try:
            a = str(c5[counter])
        except Exception:
            a = ''

        try:
            b = str(c2[counter])
        except Exception:
            b = ''

        try:
            c = str(c3[counter])
        except Exception:
            c = ''

        try:
            d = str(c4[counter])
        except Exception:
            d = ''

        try:
            e = str(c5[counter])
        except Exception:
            e = ''

        t.add_row(a, b, c, d, e)
        counter += 1

    console = Console()
    console.print(t)
Beispiel #10
0
def print_info(config, selction):
    '''
    This function processes a meta data file without validating it,
    then compares the file to now and presents the results in a table.
    This acts as a 'dry run' of sorts not only showing info in the meta data
    file but also showing what would be changed if actually restored.
    The code is kind of gross but I'm not inclined to fix it.
    '''
    # Build Base Vars
    m_num = selction[2:].zfill(2)

    if selction.startswith('rp'):
        m_path = config['rp_paths'] + '/rp' + m_num + '.meta'
    elif selction.startswith('ss'):
        m_path = config['ss_paths'] + '/ss' + m_num + '.meta'

    # Return if Missing
    if not os.path.exists(m_path):
        return paf.prError(selction.upper() + ' Was NOT Found!')

    # Load Meta and Compare
    m = meta.read(config, m_path)
    compare = meta.compare_now(config, m)

    # Build Data For Table
    c1 = [
        'Installed Packages: ' + m['pkgs_installed'], 'Date: ' + m['date'],
        'Time: ' + m['time'], 'Pacback Version: ' + m['version'],
        'User Label: ' + m['label']
    ]

    if m['stype'] == 'Full':
        c1.append('Packages Cached: ' + m['pkgs_cached'])
        c1.append('Cache Size: ' + m['cache_size'])

    if m['dir_list']:
        c1.append('')
        c1.append('File Count: ' + m['file_count'])
        c1.append('Raw File Size: ' + m['file_raw_size'])
        c1.append('Compressed Size: ' + m['tar_size'])
        c1.append('')
        c1.append('Directory List')
        c1.append('--------------')
        for d in m['dir_list']:
            c1.append(d)

    c2 = list(compare['c_pkgs'])
    if not c2:
        c2.append('NONE')

    c3 = list(compare['a_pkgs'])
    if not c3:
        c3.append('NONE')

    c4 = list(compare['r_pkgs'])
    if not c4:
        c4.append('NONE')

    # Build Table
    t = Table(title=m['type'] + ' #' + m_num)
    t.add_column('Meta Info', justify='left', style='bold white', no_wrap=True)
    t.add_column('Changed Since Creation',
                 justify='center',
                 style='yellow',
                 no_wrap=True)
    t.add_column('Added Since Creation',
                 justify='center',
                 style='green',
                 no_wrap=True)
    t.add_column('Removed Since Creation',
                 justify='center',
                 style='red',
                 no_wrap=True)

    # This Builds The Table Output Line by Line
    counter = 0
    for x in range(0, max(len(l) for l in [c1, c2, c3, c4])):
        try:
            a = str(c1[counter])
        except Exception:
            a = ''

        try:
            b = str(c2[counter])
        except Exception:
            b = ''

        try:
            c = str(c3[counter])
        except Exception:
            c = ''

        try:
            d = str(c4[counter])
        except Exception:
            d = ''

        t.add_row(a, b, c, d)
        counter += 1

    console = Console()
    console.print(t)