示例#1
0
def store(config, info):
    '''
    Packs up user defined directories.
    '''
    fname = 'custom_dirs.pack()'
    paf.write_to_log(
        fname,
        str(len(info['dir_list'])) + ' Folders Selected For Storage',
        config['log'])
    tmpfile = tempfile.gettempdir() + '/folder_permissions.pickle'

    # Fetch Folder Permissions and Pickle
    folder_perms = set()
    for d in info['dir_list']:
        folder_perms.update(paf.get_permissions(d, 'folders'))
    pickle.dump(folder_perms, (open(tmpfile, 'wb')))
    # Scan For Files
    files = paf.find_files(info['dir_list'])

    # Pack Custom Files Into Tar
    with tarfile.open(info['tar'], 'w') as tar:
        tar.add(tmpfile, arcname='folder_permissions.pickle')
        for f in track(files, description='Adding Files to Tar'):
            tar.add(f)
    paf.rm_file(tmpfile, sudo=False)

    paf.write_to_log(fname, 'Created ' + info['tar'], config['log'])

    # Create Checksum for Tar
    print('Creating Checksum...')
    pack_csum = paf.checksum_file(info['tar'])[1]
    paf.write_to_log(fname, 'Checksummed Tar ', config['log'])

    # Compresses Custom Tar
    print('Compressing Custom Tar...')
    if any(re.findall('pigz', l.lower()) for l in utils.pacman_Q()):
        os.system('/usr/bin/pigz ' + info['tar'] + ' -f')
    else:
        paf.gz_c(info['tar'], rm=True)
    paf.write_to_log(fname, 'Compressed ' + info['tar'], config['log'])

    pack_results = {
        'file_count': len(files),
        'raw_size': paf.convert_size(paf.size_of_files(files)),
        'compressed_size':
        paf.convert_size(os.path.getsize(info['tar'] + '.gz')),
        'csum': pack_csum
    }

    return pack_results
示例#2
0
def repack(config, info, unpack_path):
    '''
    Cleans up after comparing an already created custom tar.
    '''
    fname = 'custom_dirs.repack()'
    paf.rm_dir(unpack_path, sudo=False)
    paf.write_to_log(fname, 'Cleaned Up Unpacked Files', config['log'])

    if os.path.exists(info['tar']):
        # Re-Compress Custom Tar
        print('Re-Compressing Tar...')
        if any(re.findall('pigz', l.lower()) for l in utils.pacman_Q()):
            os.system('/usr/bin/pigz ' + info['tar'] + ' -f')
        else:
            paf.gz_c(info['tar'], rm=True)
        paf.write_to_log(fname, 'Compressed ' + info['tar'], config['log'])
示例#3
0
def compare_now(config, old_meta):
    return compare(config, old_meta['pkg_list'], utils.pacman_Q())
示例#4
0
def restore(config, info, dir_list, checksum):
    '''
    This is the main 'api' entrance point for file restoration.
    This function orchestrates the process handing of work to other funcs.
    '''
    fname = 'custom_dirs.restore()'
    unpack_path = info['tar'][:-4]
    p_len = len(unpack_path)
    paf.write_to_log(fname, 'PLACE HOLDER', config['log'])

    # Decompress Tar
    if os.path.exists(info['tar.gz']):
        paf.prWarning('Decompressing Custom Tar....')
        if any(re.findall('pigz', line.lower()) for line in utils.pacman_Q()):
            os.system('/usr/bin/pigz -d ' + info['tar.gz'] + ' -f')
            paf.write_to_log(fname, 'Decompressed Tar With Pigz',
                             config['log'])
        else:
            paf.gz_d(info['tar.gz'])
            paf.write_to_log(fname, 'Decompressed Tar With Python',
                             config['log'])

    # Check Tar Csum And Unpack
    if os.path.exists(info['tar']):
        # Checksum Tar
        print('Checking Integrity of Tar...')
        tar_csum = paf.checksum_file(info['tar'])[1]
        paf.write_to_log(fname, 'Checksummed Tar', config['log'])

        if tar_csum == checksum:
            paf.write_to_log(fname, 'Tar Passed Checksum Integrity Check',
                             config['log'])
            paf.prSuccess('Tar Passed Integrity Check')
        else:
            paf.write_to_log(fname, 'Custom Tar Failed Integrity Check!',
                             config['log'])
            paf.prError('Custom Tar Failed Integrity Check!')
            paf.prBold('Skipping Custom File Restoration!')
            return

        # Clean Then Unpack Tar
        paf.prWarning('Unpacking Files from Tar....')
        paf.rm_dir(unpack_path, sudo=True)
        paf.untar_dir(info['tar'])
        paf.write_to_log(fname, 'Unpacked Custom Files From Tar',
                         config['log'])

    else:
        # Skip If Tar is Missing
        paf.write_to_log(
            fname, 'Meta Data File Spesifies A Tar That is Now Missing!',
            config['log'])
        paf.prError('This Restore Point is Missing It\'s Custom Tar!')
        return

    if paf.yn_frame(
            'Do You Want to Compare Restore Point Files Against Your Current File System?'
    ) is True:
        results = compare_files(config, dir_list, unpack_path, p_len)
        # Exit If No Changes Made to Files
        if len(results['added']) + len(results['removed']) + len(
                results['changed']) == 0:
            paf.write_to_log(
                fname, 'Checksum Returned 0 Changed, Removed or Added Files',
                config['log'])
            paf.prSuccess('No Changes Have Been Made to Your File System!')
        else:
            smart_overwrite(config, results, unpack_path, p_len)

    else:
        force_overwrite(config, unpack_path, p_len)

    # Cleanup After Runtime
    repack(config, info, unpack_path)
示例#5
0
def main(config, info):
    '''
    This is pacbacks main method for orchestrating the creation of a
    fallback point. It shouldn't be called directly with create.main()
    but rather by a 'higher' level call that stages system for the
    actual creation process.
    '''
    fname = 'create.main(' + info['type'] + info['id'] + ')'
    paf.write_to_log(
        fname, 'Building ID:' + info['id'] + ' As ' + info['STYPE'] + ' ' +
        info['TYPE'], config['log'])

    # Light Restore Point
    if info['STYPE'] == 'Light':
        if info['dir_list']:
            session.abort_fail(
                fname,
                'Custom Dirs Are Not Allowed With STYPE: ' + info['STYPE'],
                'Light ' + info['TYPE'] +
                ' DO NOT Support Custom Dirs! Please Use The `-f` Flag',
                config)
    # Full Restore Point
    elif info['STYPE'] == 'Full':
        pkg_search = paf.replace_spaces(utils.pacman_Q(), '-')
        found_pkgs = utils.search_cache(pkg_search, utils.scan_caches(config),
                                        config)
        pkg_size = paf.size_of_files(found_pkgs)

        # Ask About Missing Pkgs
        if len(found_pkgs) != len(pkg_search):
            paf.write_to_log(fname, 'Not All Packages Where Found!',
                             config['log'])
            pkg_split = utils.trim_pkg_list(found_pkgs)
            print('')
            paf.prBold('======================================')
            paf.prWarning('The Following Packages Were NOT Found!')
            paf.prBold('======================================')
            for pkg in set(pkg_search - pkg_split):
                paf.prWarning(pkg)
            print('')

            if info['nc'] is False:
                if paf.yn_frame(
                        'Do You Still Want to Continue?') is False or None:
                    session.abort(fname, 'User Aborted Due to Missing Pkgs',
                                  'Aborting Creation!', config)

        # Make Folders and Hardlink Packages
        paf.mk_dir(info['path'], sudo=False)
        paf.mk_dir(info['pkgcache'], sudo=False)

        for pkg in found_pkgs:
            os.link(pkg, info['pkgcache'] + '/' + paf.basename(pkg))
        paf.write_to_log(fname,
                         'HardLinked ' + str(len(found_pkgs)) + ' Packages',
                         config['log'])

        # Search Custom Dir's
        if info['dir_list']:
            paf.write_to_log(
                fname, 'User Selected Version Dependent Folders For Storage',
                config['log'])
            pack_results = custom_dirs.store(config, info)

    # Generate Meta Data File
    current_pkgs = utils.pacman_Q()
    meta = [
        '======= Pacback Info =======', 'Version: ' + config['version'],
        'Label: ' + info['label'],
        'Date Created: ' + dt.datetime.now().strftime("%Y/%m/%d"),
        'Time Created: ' + dt.datetime.now().strftime("%H:%M:%S"),
        'Type: ' + info['TYPE'], 'SubType: ' + info['STYPE'],
        'Packages Installed: ' + str(len(current_pkgs))
    ]

    if info['STYPE'] == 'Full':
        meta.append('Packages Cached: ' + str(len(found_pkgs)))
        meta.append('Package Cache Size: ' + paf.convert_size(pkg_size))

    if info['dir_list']:
        meta.append('Dir File Count: ' + str(pack_results['file_count']))
        meta.append('Dir Raw Size: ' + pack_results['raw_size'])
        meta.append('Tar Compressed Size: ' + pack_results['compressed_size'])
        meta.append('Tar Checksum: ' + pack_results['csum'])

        meta.append('')
        meta.append('========= Dir List =========')
        for d in info['dir_list']:
            meta.append(d)

    meta.append('')
    meta.append('======= Pacman List ========')
    for pkg in current_pkgs:
        meta.append(pkg)

    # Export Final Meta Data File
    paf.export_iterable(info['meta'], meta)
    paf.write_to_log(fname, 'Generated Meta Data File', config['log'])
    # Checksum Meta Data File
    paf.export_iterable(info['meta_md5'], [paf.checksum_file(info['meta'])[1]])
    paf.write_to_log(fname, 'Generated Meta Data Checksum', config['log'])
    # Finish and Return
    paf.write_to_log(
        fname, 'Main Build Complete of ID:' + info['id'] + ' As ' +
        info['STYPE'] + ' ' + info['TYPE'], config['log'])