예제 #1
0
def ressources():
    i = Indexing()
    ressources = []
    for h, freq in i.ressources:
        domain_freq = i.ressources_number_domains(h)
        context = lookyloo.context.find_known_content(h)
        capture_uuid, url_uuid, hostnode_uuid = i.get_hash_uuids(h)
        ressources.append((h, freq, domain_freq, context.get(h), capture_uuid, url_uuid, hostnode_uuid))
    return render_template('ressources.html', ressources=ressources)
예제 #2
0
def main():
    parser = argparse.ArgumentParser(description='Rebuild the redis cache.')
    parser.add_argument(
        '--rebuild_pickles',
        default=False,
        action='store_true',
        help=
        'Delete and rebuild the pickles. Count 20s/pickle, it can take a very long time.'
    )
    args = parser.parse_args()

    lookyloo = Lookyloo()
    if args.rebuild_pickles:
        lookyloo.rebuild_all()
    else:
        lookyloo.rebuild_cache()

    indexing = Indexing()
    indexing.clear_indexes()

    # This call will rebuild all the caches as needed.
    lookyloo.sorted_capture_cache()
예제 #3
0
def main():
    parser = argparse.ArgumentParser(description='Rebuild the redis cache.')
    parser.add_argument(
        '--rebuild_pickles',
        default=False,
        action='store_true',
        help=
        'Delete and rebuild the pickles. Count 20s/pickle, it can take a very long time.'
    )
    args = parser.parse_args()

    lookyloo = Lookyloo()
    if args.rebuild_pickles:
        lookyloo.rebuild_all()
    else:
        lookyloo.rebuild_cache()

    indexing = Indexing()
    indexing.clear_indexes()
    for capture_uuid in lookyloo.capture_uuids:
        index = True
        try:
            tree = lookyloo.get_crawled_tree(capture_uuid)
        except Exception as e:
            print(capture_uuid, e)
            continue

        if lookyloo.is_public_instance:
            cache = lookyloo.capture_cache(capture_uuid)
            if cache.get('no_index') is not None:
                index = False

        # NOTE: these two methods do nothing if we just generated the pickle
        if index:
            indexing.index_cookies_capture(tree)
            indexing.index_body_hashes_capture(tree)
예제 #4
0
def cookies_lookup():
    i = Indexing()
    cookies_names = [(name, freq, i.cookies_names_number_domains(name)) for name, freq in i.cookies_names]
    return render_template('cookies.html', cookies_names=cookies_names)
예제 #5
0
def categories():
    i = Indexing()
    print(i.categories)
    return render_template('categories.html', categories=i.categories)