Exemple #1
0
def gdelt_live(lang):
    last_eng = 'http://data.gdeltproject.org/gdeltv2/lastupdate.txt'
    last_trans = 'http://data.gdeltproject.org/gdeltv2/lastupdate-translation.txt'
    old_fetch = tempfile.gettempdir() + '/gdelt-live/prev-' + lang + '.txt'
    dl_path = tempfile.gettempdir() + '/gdelt-live/' + lang

    # Downloading Most Recent File List
    if 'english' == lang:
        dl = requests.get(last_eng)
    elif 'translation' == lang:
        dl = requests.get(last_trans)

    # Get File and Filter URLs
    status = (lang.upper() + ' Stream Status: ' + str(dl)[1:-1])
    print('-' * len(status))
    paf.prBold(status)
    print('-' * len(status))
    urls = {''.join(x.split(' ')[2:]) for x in dl.text.split('\n')[:-1]}

    # Compare and Diff
    if os.path.exists(old_fetch):
        old = paf.read_file(old_fetch, 'set')
        new = set(urls.difference(old))
        rm = set(old.difference(urls))

        if len(new) == 0:
            paf.prSuccess(lang.upper() + ' Live Files are Already Up-to-Date!')
            return
        else:
            # Remove Old Files
            for x in rm:
                os.remove(dl_path + '/' + ''.join(x.split('/')[-1][:-4]))

    else:
        # Setup If First Run
        if not os.path.exists(dl_path):
            os.makedirs(dl_path)
        new = urls

    # Download URLs
    for url in new:
        try:
            print('Downloading: ' + ''.join(url.split('/')[-1]))
            resp = requests.get(url)
            print('Decompressing: ' + ''.join(url.split('/')[-1]))
            with zipfile.ZipFile(io.BytesIO(resp.content), 'r') as csvzip:
                csvzip.extractall(dl_path)

        except Exception:
            print("404: " + url)

    # Export Final Results
    paf.export_iterable(old_fetch, urls)
Exemple #2
0
def restore_point(config, num, full_rp, dir_list, no_confirm, label):
    '''
    Assembles all the info for main() and stages the file system
    for the creation of a restore point. It is assumed that user input
    has been cleansed by this point.
    '''
    num = str(num).zfill(2)
    fname = 'create.restore_point(' + num + ')'
    paf.write_to_log(fname, 'Started Restore Point Creation...', config['log'])

    info = {
        'id': num,
        'type': 'rp',
        'TYPE': 'Restore Point',
        'stype': 'f' if full_rp is True else 'l',
        'STYPE': 'Full' if full_rp is True else 'Light',
        'nc': no_confirm,
        'label': str(label),
        'meta': config['rp_paths'] + '/rp' + num + '.meta',
        'meta_md5': config['rp_paths'] + '/.rp' + num + '.md5',
        'dir_list': dir_list,
        'path': config['rp_paths'] + '/rp' + num,
        'pkgcache': config['rp_paths'] + '/rp' + num + '/pkg-cache',
        'tar': config['rp_paths'] + '/rp' + num + '/rp' + num + '_dirs.tar'
    }

    # Check for Pre-Existing Restore Point
    if os.path.exists(info['meta']) or os.path.exists(info['path']):
        paf.prWarning('Restore Point #' + info['id'] + ' Already Exists!')

        if info['nc'] is False:
            if paf.yn_frame('Do You Want to Overwrite It?') is False or None:
                session.abort(fname,
                              'User Aborted Overwrite of RP #' + info['id'],
                              'Aborting Creation!', config)
        utils.remove_id(config, info)

    # Create Restore Point After Checks
    paf.write_to_log(fname, 'All Checks Passed! Handing Off to create.main()',
                     config['log'])
    paf.prBold('Building ' + info['STYPE'] + ' ' + info['TYPE'] + ' ' +
               info['id'] + '...')
    main(config, info)

    # Finish After Successful Creation
    paf.write_to_log(fname, 'Restore Point Creation Complete!', config['log'])
    paf.prBold('Restore Point Creation Complete!')
Exemple #3
0
def gdelt_diff(lang, uc, config):
    dlp_path = config['base'] + '/prev-' + lang + '.txt'
    fzf_path = config['base'] + '/404-' + lang + '.txt'

    # Download and Filter URLs
    url = config[lang]
    print_stream_status(lang, url)
    paf.prBold('Downloading ' + lang.upper() + ' Stream Inventory File...')
    dln = requests.get(url)
    dlc = {''.join(x.split(' ')[2:]) for x in dln.text.split('\n')[:-1]}

    # Filter URL Based On Start Date
    if uc['start_date'] != 'all':
        d = uc['start_date'].split('/')
        days = {dt.replace('-', '') for dt in paf.date_to_today(int(d[0]), int(d[1]), int(d[2]))}
        filtered = set()
        for x in dlc:
            if paf.basename(x)[:8] in days:
                filtered.add(x)
        dlc = filtered

    # Run Install If Fresh Run
    if not os.path.exists(dlp_path):
        fresh_install(lang, uc, config)

    # Compare Previous Run
    dlp = paf.read_file(dlp_path)
    diff = set(dlc).difference(dlp)

    # Download Files Into Place
    if len(diff) > 10000:
        if paf.yn_frame(str(len(diff)) + ' Files Are Missing! Do You Still Want to Continue?') is True:
            print('This May Take a While! Starting Download...')
        else:
            sys.exit()
    if len(diff) > 0:
        fzf = fetch(diff, uc[lang + '_path'])
        paf.export_iterable(dlp_path, dlc)
        for x in paf.read_file(fzf_path):
            fzf.add(x)
        paf.export_iterable(fzf_path, fzf)
    else:
        paf.prSuccess('All Files Are Up To Date!')
Exemple #4
0
def restore(config, info, dir_list, checksum):
    '''
    This is the main 'api' entrance point for file restoration.
    This function orchestrates the process handing of work to other funcs.
    '''
    fname = 'custom_dirs.restore()'
    unpack_path = info['tar'][:-4]
    p_len = len(unpack_path)
    paf.write_to_log(fname, 'PLACE HOLDER', config['log'])

    # Decompress Tar
    if os.path.exists(info['tar.gz']):
        paf.prWarning('Decompressing Custom Tar....')
        if any(re.findall('pigz', line.lower()) for line in utils.pacman_Q()):
            os.system('/usr/bin/pigz -d ' + info['tar.gz'] + ' -f')
            paf.write_to_log(fname, 'Decompressed Tar With Pigz',
                             config['log'])
        else:
            paf.gz_d(info['tar.gz'])
            paf.write_to_log(fname, 'Decompressed Tar With Python',
                             config['log'])

    # Check Tar Csum And Unpack
    if os.path.exists(info['tar']):
        # Checksum Tar
        print('Checking Integrity of Tar...')
        tar_csum = paf.checksum_file(info['tar'])[1]
        paf.write_to_log(fname, 'Checksummed Tar', config['log'])

        if tar_csum == checksum:
            paf.write_to_log(fname, 'Tar Passed Checksum Integrity Check',
                             config['log'])
            paf.prSuccess('Tar Passed Integrity Check')
        else:
            paf.write_to_log(fname, 'Custom Tar Failed Integrity Check!',
                             config['log'])
            paf.prError('Custom Tar Failed Integrity Check!')
            paf.prBold('Skipping Custom File Restoration!')
            return

        # Clean Then Unpack Tar
        paf.prWarning('Unpacking Files from Tar....')
        paf.rm_dir(unpack_path, sudo=True)
        paf.untar_dir(info['tar'])
        paf.write_to_log(fname, 'Unpacked Custom Files From Tar',
                         config['log'])

    else:
        # Skip If Tar is Missing
        paf.write_to_log(
            fname, 'Meta Data File Spesifies A Tar That is Now Missing!',
            config['log'])
        paf.prError('This Restore Point is Missing It\'s Custom Tar!')
        return

    if paf.yn_frame(
            'Do You Want to Compare Restore Point Files Against Your Current File System?'
    ) is True:
        results = compare_files(config, dir_list, unpack_path, p_len)
        # Exit If No Changes Made to Files
        if len(results['added']) + len(results['removed']) + len(
                results['changed']) == 0:
            paf.write_to_log(
                fname, 'Checksum Returned 0 Changed, Removed or Added Files',
                config['log'])
            paf.prSuccess('No Changes Have Been Made to Your File System!')
        else:
            smart_overwrite(config, results, unpack_path, p_len)

    else:
        force_overwrite(config, unpack_path, p_len)

    # Cleanup After Runtime
    repack(config, info, unpack_path)
Exemple #5
0
def main(config, info):
    '''
    This is pacbacks main method for orchestrating the creation of a
    fallback point. It shouldn't be called directly with create.main()
    but rather by a 'higher' level call that stages system for the
    actual creation process.
    '''
    fname = 'create.main(' + info['type'] + info['id'] + ')'
    paf.write_to_log(
        fname, 'Building ID:' + info['id'] + ' As ' + info['STYPE'] + ' ' +
        info['TYPE'], config['log'])

    # Light Restore Point
    if info['STYPE'] == 'Light':
        if info['dir_list']:
            session.abort_fail(
                fname,
                'Custom Dirs Are Not Allowed With STYPE: ' + info['STYPE'],
                'Light ' + info['TYPE'] +
                ' DO NOT Support Custom Dirs! Please Use The `-f` Flag',
                config)
    # Full Restore Point
    elif info['STYPE'] == 'Full':
        pkg_search = paf.replace_spaces(utils.pacman_Q(), '-')
        found_pkgs = utils.search_cache(pkg_search, utils.scan_caches(config),
                                        config)
        pkg_size = paf.size_of_files(found_pkgs)

        # Ask About Missing Pkgs
        if len(found_pkgs) != len(pkg_search):
            paf.write_to_log(fname, 'Not All Packages Where Found!',
                             config['log'])
            pkg_split = utils.trim_pkg_list(found_pkgs)
            print('')
            paf.prBold('======================================')
            paf.prWarning('The Following Packages Were NOT Found!')
            paf.prBold('======================================')
            for pkg in set(pkg_search - pkg_split):
                paf.prWarning(pkg)
            print('')

            if info['nc'] is False:
                if paf.yn_frame(
                        'Do You Still Want to Continue?') is False or None:
                    session.abort(fname, 'User Aborted Due to Missing Pkgs',
                                  'Aborting Creation!', config)

        # Make Folders and Hardlink Packages
        paf.mk_dir(info['path'], sudo=False)
        paf.mk_dir(info['pkgcache'], sudo=False)

        for pkg in found_pkgs:
            os.link(pkg, info['pkgcache'] + '/' + paf.basename(pkg))
        paf.write_to_log(fname,
                         'HardLinked ' + str(len(found_pkgs)) + ' Packages',
                         config['log'])

        # Search Custom Dir's
        if info['dir_list']:
            paf.write_to_log(
                fname, 'User Selected Version Dependent Folders For Storage',
                config['log'])
            pack_results = custom_dirs.store(config, info)

    # Generate Meta Data File
    current_pkgs = utils.pacman_Q()
    meta = [
        '======= Pacback Info =======', 'Version: ' + config['version'],
        'Label: ' + info['label'],
        'Date Created: ' + dt.datetime.now().strftime("%Y/%m/%d"),
        'Time Created: ' + dt.datetime.now().strftime("%H:%M:%S"),
        'Type: ' + info['TYPE'], 'SubType: ' + info['STYPE'],
        'Packages Installed: ' + str(len(current_pkgs))
    ]

    if info['STYPE'] == 'Full':
        meta.append('Packages Cached: ' + str(len(found_pkgs)))
        meta.append('Package Cache Size: ' + paf.convert_size(pkg_size))

    if info['dir_list']:
        meta.append('Dir File Count: ' + str(pack_results['file_count']))
        meta.append('Dir Raw Size: ' + pack_results['raw_size'])
        meta.append('Tar Compressed Size: ' + pack_results['compressed_size'])
        meta.append('Tar Checksum: ' + pack_results['csum'])

        meta.append('')
        meta.append('========= Dir List =========')
        for d in info['dir_list']:
            meta.append(d)

    meta.append('')
    meta.append('======= Pacman List ========')
    for pkg in current_pkgs:
        meta.append(pkg)

    # Export Final Meta Data File
    paf.export_iterable(info['meta'], meta)
    paf.write_to_log(fname, 'Generated Meta Data File', config['log'])
    # Checksum Meta Data File
    paf.export_iterable(info['meta_md5'], [paf.checksum_file(info['meta'])[1]])
    paf.write_to_log(fname, 'Generated Meta Data Checksum', config['log'])
    # Finish and Return
    paf.write_to_log(
        fname, 'Main Build Complete of ID:' + info['id'] + ' As ' +
        info['STYPE'] + ' ' + info['TYPE'], config['log'])
Exemple #6
0
def snapshot(config, label):
    '''
    Assembles all the info for main() and stages the file system for the creation
    of a new snapshot with id='00'. This is only called by `--hook`.
    '''
    num = '00'
    fname = 'create.snapshot(' + num + ')'
    paf.write_to_log(fname, 'Started Snapshot Creation...', config['log'])
    session.hlock_check(config)

    info = {
        'id': num,
        'type': 'ss',
        'TYPE': 'Snapshot',
        'stype': 'l',
        'STYPE': 'Light',
        'nc': True,
        'label': str(label),
        'meta': config['ss_paths'] + '/ss' + num + '.meta',
        'meta_md5': config['ss_paths'] + '/.ss' + num + '.md5',
        'dir_list': [],
        'path': config['ss_paths'] + '/ss' + num,
        'pkgcache': config['ss_paths'] + '/ss' + num + '/pkg-cache',
        'tar': config['ss_paths'] + '/ss' + num + '/ss' + num + '_dirs.tar'
    }

    # Shift Snapshots Forward So This Becomes Zero
    if os.path.exists(config['ss_paths'] + '/ss00.meta'):
        paf.write_to_log(fname, 'Shifting All Snapshots Forward +1...',
                         config['log'])

        # Remove the Last Snapshot
        paf.rm_file(config['ss_paths'] + '/ss' +
                    str(config['max_ss']).zfill(2) + '.meta',
                    sudo=False)
        paf.rm_file(config['ss_paths'] + '/ss' +
                    str(config['max_ss']).zfill(2) + '.md5',
                    sudo=False)

        # Moves Each Snapshot Forward +1 and Cleans on Exceptions
        for n in range((config['max_ss'] - 1), -1, -1):
            meta_path_old = config['ss_paths'] + '/ss' + str(n).zfill(
                2) + '.meta'
            meta_path_new = config['ss_paths'] + '/ss' + str(n + 1).zfill(
                2) + '.meta'
            hash_path_old = config['ss_paths'] + '/.ss' + str(n).zfill(
                2) + '.md5'
            hash_path_new = config['ss_paths'] + '/.ss' + str(n + 1).zfill(
                2) + '.md5'
            meta_found = os.path.exists(meta_path_old)
            csum_found = os.path.exists(hash_path_old)

            if meta_found and csum_found:
                os.rename(meta_path_old, meta_path_new)
                os.rename(hash_path_old, hash_path_new)

            elif meta_found and not csum_found:
                paf.write_to_log(
                    fname, 'Snapshot ' + str(n).zfill(2) +
                    ' is Missing it\'s Checksum File!', config['log'])
                paf.rm_file(meta_path_old, sudo=False)
                paf.write_to_log(fname,
                                 'Removed Snapshot ID:' + str(n).zfill(2),
                                 config['log'])

            elif not meta_found and csum_found:
                paf.write_to_log(fname,
                                 hash_path_old + ' is an Orphaned Checksum',
                                 config['log'])
                paf.rm_file(hash_path_old, sudo=False)
                paf.write_to_log(fname, 'Removed Orphaned Checksum',
                                 config['log'])

            else:
                pass

        paf.write_to_log(fname, 'Finished Shifting Snapshots Forward',
                         config['log'])

    else:
        paf.write_to_log(
            fname,
            'Snapshot ID:00 Was Not Found, Shift Forward is Unnecessary.',
            config['log'])

    # Creates Snapshot After Pre-Transaction Work and Checks
    paf.write_to_log(fname, 'All Checks Passed! Ready For Snapshot Creation',
                     config['log'])
    paf.prBold('Creating Snapshot...')
    main(config, info)

    # Prevents Back-to-Back Snapshots(Especially During AUR Upgrades)
    session.hlock_start(config)
    paf.write_to_log(fname, 'Snapshot Creation Complete!', config['log'])
    paf.prBold('Snapshot Creation Complete!')
Exemple #7
0
def print_stream_status(lang, url):
    status = requests.get(url, stream=True)
    status = (lang.upper() + ' Stream Status: ' + str(status)[1:-1])
    print('-'*len(status))
    paf.prBold(status)
    print('-'*len(status))
Exemple #8
0
def clean_cache(config, nc):
    '''
    This provides automated cache cleaning using pacman, paccache, and pacback.
    '''
    fname = 'utils.clean_cache()'
    paf.prBold('Starting Advanced Cache Cleaning...')
    paf.write_to_log(fname, 'Starting Advanced Cache Cleaning...',
                     config['log'])
    print('')

    if nc is True or paf.yn_frame(
            'Do You Want To Uninstall Orphaned Packages?') is True:
        os.system('/usr/bin/pacman -R $(/usr/bin/pacman -Qtdq)')
        paf.write_to_log(fname, 'Removed Orphaned Packages', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Remove Old Versions of Installed Packages?'
    ) is True:
        os.system('/usr/bin/paccache -rk ' + str(config['keep_versions']))
        paf.write_to_log(fname, 'Removed Old Package Versions', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Remove Cached Orphans?') is True:
        os.system('/usr/bin/paccache -ruk0')
        paf.write_to_log(fname, 'Removed Cached Orphans', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Check For Old Pacback Restore Points?') is True:
        paf.write_to_log(fname, 'Starting Search For Old Restore Points...',
                         config['log'])
        meta_paths = sorted(f for f in paf.find_files(config['rp_paths'])
                            if f.endswith(".meta"))

        today = dt.datetime.now().strftime("%Y/%m/%d")
        t_split = (today.split('/'))
        today_dt = dt.date(int(t_split[0]), int(t_split[1]), int(t_split[2]))

        for m in meta_paths:
            rp_info = {
                'id':
                m[-7] + m[-6],
                'type':
                'rp',
                'TYPE':
                'Restore Point',
                'meta':
                m,
                'meta_md5':
                config['rp_paths'] + '/.rp' + m[-7] + m[-6] + '.md5',
                'path':
                config['rp_paths'] + '/rp' + m[-7] + m[-6],
                'pkgcache':
                config['rp_paths'] + '/rp' + m[-7] + m[-6] + '/pkg-cache'
            }

            # Format Dates for Compare
            m_dict = meta.read(config, m)
            o_split = (m_dict['date'].split('/'))
            old_dt = dt.date(int(o_split[0]), int(o_split[1]), int(o_split[2]))

            # Check How Old Restore Point Is
            days = (today_dt - old_dt).days
            if days > config['old_rp']:
                paf.prWarning('Failed: ' + rp_info['TYPE'] + ' ' +
                              rp_info['id'] + ' Is ' + str(days) +
                              ' Days Old!')
                paf.write_to_log(
                    fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' +
                    str(days) + ' Days Old!', config['log'])
                if paf.yn_frame('Do You Want to Remove This ' +
                                rp_info['TYPE'] + '?') is True:
                    utils.remove_id(config, rp_info)
                    paf.prSuccess('Restore Point Removed!')
                else:
                    paf.write_to_log(
                        fname, 'User Declined Removal of ' + rp_info['TYPE'] +
                        ' ' + rp_info['id'], config['log'])

            else:
                paf.prSuccess('Passed: ' + rp_info['TYPE'] + ' ' +
                              rp_info['id'] + ' Is ' + str(days) + ' Days Old')
                paf.write_to_log(
                    fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' +
                    str(days) + ' Days Old', config['log'])

    paf.write_to_log(fname, 'Finished Advanced Cache Cleaning', config['log'])