Ejemplo n.º 1
0
def pacman_hook(install, config):
    '''
    Installs or removes a standard alpm hook in /usr/share/libalpm/hooks/
    which runs as a PreTransaction hook during every pacman transaction.
    `install = True` Installs Pacman Hook
    `install = False` Removes Pacman Hook
    '''

    if install is True:
        fname = 'utils.pacman_hook(install)'
        paf.write_to_log(fname, 'Starting Hook Installation...', config['log'])

        hook = [
            '[Trigger]', 'Operation = Install', 'Operation = Remove',
            'Operation = Upgrade', 'Type = Package', 'Target = *', '',
            '[Action]', 'Description = Pre-Upgrade Pacback Hook',
            'Depends = pacman', 'When = PreTransaction',
            'Exec = /usr/bin/pacback --hook'
        ]

        paf.export_iterable('/usr/share/libalpm/hooks/pacback.hook', hook)
        paf.prSuccess('Pacback Hook is Now Installed!')
        paf.write_to_log(fname, 'Installed Pacback PreTransaction Hook',
                         config['log'])

    elif install is False:
        fname = 'utils.pacman_hook(remove)'
        paf.write_to_log(fname, 'Starting Hook Removal...', config['log'])

        paf.rm_file('/usr/share/libalpm/hooks/pacback.hook', sudo=False)
        paf.write_to_log(fname, 'Removed Pacback PreTransaction Hook',
                         config['log'])
        paf.prSuccess('Pacback Hook Was Removed!')
Ejemplo n.º 2
0
def archive_date(config, date):
    '''
    This function simply automates the date rollback instructions found on the Arch Wiki.
    https://wiki.archlinux.org/index.php/Arch_Linux_Archive#How_to_restore_all_packages_to_a_specific_date
    '''
    # Startup
    fname = 'restore.archive_date(' + str(date) + ')'
    mirror = '/etc/pacman.d/mirrorlist'

    # Done as a Fail Safe
    if len(paf.read_file(mirror)) > 2:
        os.system('mv ' + mirror + ' ' + mirror + '.pacback')
        paf.write_to_log(fname, 'Backed Up Existing Mirrorlist', config['log'])
    else:
        paf.write_to_log(
            fname, 'Skipped Mirrorlist Backup. File Seems Miss-Formated!',
            config['log'])

    paf.export_iterable(mirror, [
        '## Set By Pacback', 'Server=https://archive.archlinux.org/repos/' +
        date + '/$repo/os/$arch'
    ])
    paf.write_to_log(fname, 'Added ' + date + ' Archive URL To Mirrorlist',
                     config['log'])

    # Run Pacman Update to Run Downgrade
    os.system('/usr/bin/pacman -Syyuu')
    paf.write_to_log(fname, 'Sent -Syyuu to Pacman', config['log'])

    # Restore the Non-Archive URL Mirrorlist
    if os.path.exists(mirror + '.pacback') is False:
        paf.write_to_log(fname, 'Backup Mirrorlist Is Missing', config['log'])
        if paf.yn_frame(
                'Missing Mirrorlist! Do You Want to Fetch a New HTTPS Mirrorlist?'
        ) is True:
            if utils.fetch_new_mirrorlist() is True:
                paf.write_to_log(
                    fname, 'A New Mirrorlist Was Successfully Downloaded',
                    config['log'])
            else:
                session.abort_fail(fname, 'User Declined Country Selection!',
                                   'Please Manually Replace Your Mirrorlist!',
                                   config['log'])
        else:
            session.abort_fail(
                fname,
                'Backup Mirrorlist Is Missing and User Declined Download!',
                'Please Manually Replace Your Mirrorlist!', config['log'])
    else:
        os.system('mv ' + mirror + '.pacback ' + mirror)
        paf.write_to_log(fname, 'Backup Mirrorlist Was Restored Successfully',
                         config['log'])
        print('Refreshing Pacman Database...')
        os.system('/usr/bin/pacman -Sy > /dev/null')
        paf.write_to_log(fname,
                         'Updated Pacman Database After Restoring Mirrorlist',
                         config['log'])
Ejemplo n.º 3
0
def hlock_start(config):
    '''
    This starts a hook lock overwriting the previous lock.
    This should be triggered at the end of a successful `--hook` run.
    '''
    fname = 'session.hlock_start(' + str(config['hook_cooldown']) + ')'
    stime = 'Created: ' + dt.datetime.now().strftime("%Y:%m:%d:%H:%M:%S"),
    paf.export_iterable(config['hlock'], [stime])
    paf.write_to_log(fname, 'Created Hook Lock With ' + str(config['hook_cooldown']) + ' Second Cooldown', config['log'])
Ejemplo n.º 4
0
def gdelt_live(lang):
    last_eng = 'http://data.gdeltproject.org/gdeltv2/lastupdate.txt'
    last_trans = 'http://data.gdeltproject.org/gdeltv2/lastupdate-translation.txt'
    old_fetch = tempfile.gettempdir() + '/gdelt-live/prev-' + lang + '.txt'
    dl_path = tempfile.gettempdir() + '/gdelt-live/' + lang

    # Downloading Most Recent File List
    if 'english' == lang:
        dl = requests.get(last_eng)
    elif 'translation' == lang:
        dl = requests.get(last_trans)

    # Get File and Filter URLs
    status = (lang.upper() + ' Stream Status: ' + str(dl)[1:-1])
    print('-' * len(status))
    paf.prBold(status)
    print('-' * len(status))
    urls = {''.join(x.split(' ')[2:]) for x in dl.text.split('\n')[:-1]}

    # Compare and Diff
    if os.path.exists(old_fetch):
        old = paf.read_file(old_fetch, 'set')
        new = set(urls.difference(old))
        rm = set(old.difference(urls))

        if len(new) == 0:
            paf.prSuccess(lang.upper() + ' Live Files are Already Up-to-Date!')
            return
        else:
            # Remove Old Files
            for x in rm:
                os.remove(dl_path + '/' + ''.join(x.split('/')[-1][:-4]))

    else:
        # Setup If First Run
        if not os.path.exists(dl_path):
            os.makedirs(dl_path)
        new = urls

    # Download URLs
    for url in new:
        try:
            print('Downloading: ' + ''.join(url.split('/')[-1]))
            resp = requests.get(url)
            print('Decompressing: ' + ''.join(url.split('/')[-1]))
            with zipfile.ZipFile(io.BytesIO(resp.content), 'r') as csvzip:
                csvzip.extractall(dl_path)

        except Exception:
            print("404: " + url)

    # Export Final Results
    paf.export_iterable(old_fetch, urls)
Ejemplo n.º 5
0
def fresh_install(lang, uc, config):
    if uc[lang + '_path'] == '/path/here':
        paf.prWarning('Your Config File Has Not Been Setup for the ' + lang.upper() + ' Stream!')
        sys.exit('Edit the File ' + config['user_config'] + ' and Re-Run Your Command!')

    if not os.path.exists(uc[lang + '_path']):
        os.makedirs(uc[lang + '_path'])

    paf.prWarning('Scanning File System...')
    files = paf.basenames(paf.find_files(uc[lang + '_path']))
    files = {"http://data.gdeltproject.org/gdeltv2/" + f for f in files}
    paf.export_iterable(config['base'] + '/prev-' + lang + '.txt', files)
    paf.export_iterable(config['base'] + '/404-' + lang + '.txt', [])
Ejemplo n.º 6
0
def gdelt_diff(lang, uc, config):
    dlp_path = config['base'] + '/prev-' + lang + '.txt'
    fzf_path = config['base'] + '/404-' + lang + '.txt'

    # Download and Filter URLs
    url = config[lang]
    print_stream_status(lang, url)
    paf.prBold('Downloading ' + lang.upper() + ' Stream Inventory File...')
    dln = requests.get(url)
    dlc = {''.join(x.split(' ')[2:]) for x in dln.text.split('\n')[:-1]}

    # Filter URL Based On Start Date
    if uc['start_date'] != 'all':
        d = uc['start_date'].split('/')
        days = {dt.replace('-', '') for dt in paf.date_to_today(int(d[0]), int(d[1]), int(d[2]))}
        filtered = set()
        for x in dlc:
            if paf.basename(x)[:8] in days:
                filtered.add(x)
        dlc = filtered

    # Run Install If Fresh Run
    if not os.path.exists(dlp_path):
        fresh_install(lang, uc, config)

    # Compare Previous Run
    dlp = paf.read_file(dlp_path)
    diff = set(dlc).difference(dlp)

    # Download Files Into Place
    if len(diff) > 10000:
        if paf.yn_frame(str(len(diff)) + ' Files Are Missing! Do You Still Want to Continue?') is True:
            print('This May Take a While! Starting Download...')
        else:
            sys.exit()
    if len(diff) > 0:
        fzf = fetch(diff, uc[lang + '_path'])
        paf.export_iterable(dlp_path, dlc)
        for x in paf.read_file(fzf_path):
            fzf.add(x)
        paf.export_iterable(fzf_path, fzf)
    else:
        paf.prSuccess('All Files Are Up To Date!')
Ejemplo n.º 7
0
def main(config, info):
    '''
    This is pacbacks main method for orchestrating the creation of a
    fallback point. It shouldn't be called directly with create.main()
    but rather by a 'higher' level call that stages system for the
    actual creation process.
    '''
    fname = 'create.main(' + info['type'] + info['id'] + ')'
    paf.write_to_log(
        fname, 'Building ID:' + info['id'] + ' As ' + info['STYPE'] + ' ' +
        info['TYPE'], config['log'])

    # Light Restore Point
    if info['STYPE'] == 'Light':
        if info['dir_list']:
            session.abort_fail(
                fname,
                'Custom Dirs Are Not Allowed With STYPE: ' + info['STYPE'],
                'Light ' + info['TYPE'] +
                ' DO NOT Support Custom Dirs! Please Use The `-f` Flag',
                config)
    # Full Restore Point
    elif info['STYPE'] == 'Full':
        pkg_search = paf.replace_spaces(utils.pacman_Q(), '-')
        found_pkgs = utils.search_cache(pkg_search, utils.scan_caches(config),
                                        config)
        pkg_size = paf.size_of_files(found_pkgs)

        # Ask About Missing Pkgs
        if len(found_pkgs) != len(pkg_search):
            paf.write_to_log(fname, 'Not All Packages Where Found!',
                             config['log'])
            pkg_split = utils.trim_pkg_list(found_pkgs)
            print('')
            paf.prBold('======================================')
            paf.prWarning('The Following Packages Were NOT Found!')
            paf.prBold('======================================')
            for pkg in set(pkg_search - pkg_split):
                paf.prWarning(pkg)
            print('')

            if info['nc'] is False:
                if paf.yn_frame(
                        'Do You Still Want to Continue?') is False or None:
                    session.abort(fname, 'User Aborted Due to Missing Pkgs',
                                  'Aborting Creation!', config)

        # Make Folders and Hardlink Packages
        paf.mk_dir(info['path'], sudo=False)
        paf.mk_dir(info['pkgcache'], sudo=False)

        for pkg in found_pkgs:
            os.link(pkg, info['pkgcache'] + '/' + paf.basename(pkg))
        paf.write_to_log(fname,
                         'HardLinked ' + str(len(found_pkgs)) + ' Packages',
                         config['log'])

        # Search Custom Dir's
        if info['dir_list']:
            paf.write_to_log(
                fname, 'User Selected Version Dependent Folders For Storage',
                config['log'])
            pack_results = custom_dirs.store(config, info)

    # Generate Meta Data File
    current_pkgs = utils.pacman_Q()
    meta = [
        '======= Pacback Info =======', 'Version: ' + config['version'],
        'Label: ' + info['label'],
        'Date Created: ' + dt.datetime.now().strftime("%Y/%m/%d"),
        'Time Created: ' + dt.datetime.now().strftime("%H:%M:%S"),
        'Type: ' + info['TYPE'], 'SubType: ' + info['STYPE'],
        'Packages Installed: ' + str(len(current_pkgs))
    ]

    if info['STYPE'] == 'Full':
        meta.append('Packages Cached: ' + str(len(found_pkgs)))
        meta.append('Package Cache Size: ' + paf.convert_size(pkg_size))

    if info['dir_list']:
        meta.append('Dir File Count: ' + str(pack_results['file_count']))
        meta.append('Dir Raw Size: ' + pack_results['raw_size'])
        meta.append('Tar Compressed Size: ' + pack_results['compressed_size'])
        meta.append('Tar Checksum: ' + pack_results['csum'])

        meta.append('')
        meta.append('========= Dir List =========')
        for d in info['dir_list']:
            meta.append(d)

    meta.append('')
    meta.append('======= Pacman List ========')
    for pkg in current_pkgs:
        meta.append(pkg)

    # Export Final Meta Data File
    paf.export_iterable(info['meta'], meta)
    paf.write_to_log(fname, 'Generated Meta Data File', config['log'])
    # Checksum Meta Data File
    paf.export_iterable(info['meta_md5'], [paf.checksum_file(info['meta'])[1]])
    paf.write_to_log(fname, 'Generated Meta Data Checksum', config['log'])
    # Finish and Return
    paf.write_to_log(
        fname, 'Main Build Complete of ID:' + info['id'] + ' As ' +
        info['STYPE'] + ' ' + info['TYPE'], config['log'])
Ejemplo n.º 8
0
def retry(lang, uc, config):
    fzf_path = config['base'] + '/404-' + lang + '.txt'
    fzf = fetch(paf.read_file(fzf_path), uc[lang + '_path'])
    paf.export_iterable(fzf_path, fzf)