예제 #1
0
    def test_spinner_getters_setters(self):
        """Test spinner getters and setters.
        """
        spinner = Halo()
        self.assertEqual(spinner.text, '')
        self.assertEqual(spinner.color, 'cyan')
        self.assertIsNone(spinner.spinner_id)

        spinner.spinner = 'dots12'
        spinner.text = 'bar'
        spinner.color = 'red'

        self.assertEqual(spinner.text, 'bar')
        self.assertEqual(spinner.color, 'red')

        if is_supported():
            self.assertEqual(spinner.spinner, Spinners['dots12'].value)
        else:
            self.assertEqual(spinner.spinner, default_spinner)

        spinner.spinner = 'dots11'
        if is_supported():
            self.assertEqual(spinner.spinner, Spinners['dots11'].value)
        else:
            self.assertEqual(spinner.spinner, default_spinner)

        spinner.spinner = 'foo_bar'
        self.assertEqual(spinner.spinner, default_spinner)

        # Color is None
        spinner.color = None
        spinner.start()
        spinner.stop()
        self.assertIsNone(spinner.color)
예제 #2
0
def download_files(folder_title: str, LINKS: List[str], args: argparse.Namespace):
    """
    Download files when the given URL is parsed
    """

    ROOT_PATH = Path.cwd()
    links_len = len(LINKS)

    if args.name:
        dir_path = ROOT_PATH / args.name
    else:
        dir_path = ROOT_PATH / folder_title

    # Create folder
    spinner = Halo(text="Creating folder", spinner="dots")
    spinner.start()
    try:
        dir_path.mkdir()
    except FileExistsError as _:
        pass
    spinner.stop_and_persist(symbol="✅".encode("utf-8"), text="Folder Created")

    print(f"Total files: {links_len}")
    print(f"Download Path: {dir_path}")

    # Start download
    for index, url in enumerate(LINKS):
        r = requests.get(url, stream=True, headers={"Accept-Encoding": None})
        total_size = int(r.headers.get("Content-Length"))

        spinner.text = f"Downloading {index + 1}/{links_len} file"
        spinner.spinner = "arrow3"
        spinner.start()
        time.sleep(1)
        spinner.stop()

        file = url.split("/")[-1]
        file = (file[:50] + "...") if len(file) > 50 else file
        with open(dir_path / file, "wb") as f:
            with tqdm(
                total=total_size,
                desc=f"{file:<53}",
                unit="B",
                unit_scale=True,
                bar_format="{l_bar}{bar:20}{r_bar}{bar:-10b}",
            ) as pbar:
                for chunk in r.iter_content(chunk_size=1024):
                    if chunk:
                        f.write(chunk)
                        pbar.update(len(chunk))

    spinner.spinner = "monkey"
    spinner.start()
    time.sleep(2)
    spinner.stop_and_persist(symbol="🔥".encode("utf-8"), text="All files downloaded.")
예제 #3
0
    def test_spinner_getter(self):
        instance = Halo()
        if is_supported():
            default_spinner_value = "dots"
        else:
            default_spinner_value = "line"

        instance.spinner = default_spinner_value
        self.assertEqual(default_spinner, instance.spinner)

        instance.spinner = "This_spinner_do_not_exist"
        self.assertEqual(default_spinner, instance.spinner)

        instance.spinner = -123
        self.assertEqual(default_spinner, instance.spinner)
예제 #4
0
def cli():
    parser = argparse.ArgumentParser(
        description=
        'A web scraping tool to help pentesters crawl websites for vulnerable personal info'
    )
    parser.add_argument('url',
                        metavar='N',
                        type=str,
                        nargs='+',
                        help='url(s) to crawl for vulnerabilities')
    parser.add_argument('-f',
                        '--files',
                        default=False,
                        action="store_true",
                        help='Download / crawl compatible file types.')
    parser.add_argument(
        '-l',
        dest='limit',
        type=int,
        nargs='?',
        help='Limits the amount of URLs to spider/crawl for each site')
    parser.add_argument('-o',
                        dest='output',
                        type=str,
                        nargs='?',
                        help='Writes output to file path specified.')
    parser.add_argument(
        '-x',
        dest='spider',
        default=True,
        action="store_false",
        help='Only crawl URLs provided. Do not spider using found URLs.')
    parser.add_argument('--no-relatives',
                        dest='relatives',
                        default=True,
                        action="store_false",
                        help='Disables confidence / entity processing.')

    parser.add_argument(
        '--debug',
        dest='debug',
        default=False,
        action="store_true",
        help=
        'Debugging is actually all about finding the bug. About understanding why the bug was there to begin with. About knowing that it’s existence was no accident.'
    )
    args = parser.parse_args()

    emails = {}
    phones = {}

    spinner = Halo(text='Get hyped.', spinner='dots')
    spinner.start()
    i = 1

    nlp = None
    if args.relatives:
        spinner.text += ' Loading NLP library. '
        nlp = spacy.load('en_core_web_md')

    for base_url in args.url:
        cleaned_urls = [base_url]
        pbar = tqdm(cleaned_urls, position=1)
        for url in pbar:
            if base_url in url and file_type(url) != None:
                n = len(cleaned_urls)
                pbar.total = n
                if args.limit:
                    n = args.limit

                spinner.text = '{}/{} {:15} {}'.format(i, n, 'Scraping...',
                                                       url)
                spinner.spinner = 'simpleDotsScrolling'
                # build a beautiful soup web scraper
                scraper = get_scraper(url)
                if scraper:
                    # pull strings, urls, and body text from the current url
                    found_strings, found_urls = format_site(scraper)
                    # run website page through nlp, build and return "entities" of people / orgs
                    # build entity class in vuln dict if entity not already present

                    new_emails, new_urls, new_phones, new_ips = crawl_strings(
                        found_urls + found_strings)
                    add_pages(new_emails, url, all_vulns)
                    add_pages(new_phones, url, all_vulns)

                    if args.relatives and len(new_emails) + len(new_phones) > 1:
                        spinner.text = '{}/{} {:15} {}'.format(
                            i, n, 'Tokenizing...', url)
                        spinner.spinner = 'simpleDotsScrolling'

                        tokens = nlp(' '.join(found_strings))

                        spinner.text = '{}/{} {:15} {}'.format(
                            i, n, 'Comparing...', url)
                        spinner.spinner = 'squish'
                        vulns = find_ents(tokens)
                        for vuln in vulns:
                            if vuln.kind == 'email':
                                emails[vuln.name]['relatives'] = emails[
                                    vuln.name]['relatives'].union(
                                        vuln.relatives)

                    if args.spider:
                        cleaned_found_urls = add_protocols(
                            base_url, found_urls)
                        for found_url in cleaned_found_urls:
                            if found_url not in cleaned_urls and base_url in found_url:
                                cleaned_urls.append(found_url)
                i += 1
                if args.limit and i > args.limit:
                    break

    output = print_title('Emails') + print_dict(emails)

    if args.output:
        f = open(args.output, 'w')
        f.write(output)
        spinner.succeed('All done! Output written to {}'.format(args.filepath))
    else:
        spinner.succeed('All done! Here you go <3')
        print(output)
예제 #5
0
# -*- coding: utf-8 -*-
"""Example for doge spinner ;)
"""
from __future__ import unicode_literals
import os
import sys
import time

sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from halo import Halo

spinner = Halo(text='Such Spins', spinner='dots')

try:
    spinner.start()
    time.sleep(2)
    spinner.text = 'Much Colors'
    spinner.color = 'magenta'
    time.sleep(2)
    spinner.text = 'Very emojis'
    spinner.spinner = 'hearts'
    time.sleep(2)
    spinner.stop_and_persist({
        'symbol': '🦄 '.encode('utf-8'),
        'text': 'Wow!'
    })
except (KeyboardInterrupt, SystemExit):
    spinner.stop()
예제 #6
0
파일: doge_spin.py 프로젝트: xxyyzz/halo
# -*- coding: utf-8 -*-
"""Examples for halo.
"""
from __future__ import unicode_literals, absolute_import, print_function
import os
import time

os.sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from halo import Halo

spinner = Halo({'text': 'Such Spins', 'spinner': 'dots'})

try:
    spinner.start()
    time.sleep(2)
    spinner.text = 'Much Colors'
    spinner.color = 'magenta'
    time.sleep(2)
    spinner.text = 'Very emojis'
    spinner.spinner = {'spinner': 'hearts'}
    time.sleep(2)
    spinner.stop_and_persist({
        'symbol': '🦄 '.encode('utf-8'),
        'text': 'Wow!'
    })
except (KeyboardInterrupt, SystemExit):
    spinner.stop()