Exemplo n.º 1
0
def batch_scan(list_file):
    '''
    put things together
    '''
    # display progress
    import threading
    outfile = '{}.txt'.format(FingerprintScanner.NOW_TIME)
    status = threading.Thread(target=wc.progress(outfile))
    status.setDaemon(True)
    status.start()

    # parallel exec
    from concurrent import futures
    with futures.ThreadPoolExecutor(max_workers=100) as executor:
        list_open = open(list_file)
        future_targets = {}
        for line in list_open:
            host = line.strip()
            scanner = FingerprintScanner(host)
            future_targets.update(
                {executor.submit(scanner.weblogic_scan): host})
        for future in futures.as_completed(future_targets):
            job = future_targets[future]
            try:
                ret_val = future.result()  # return value of app scanner method
                if ret_val:
                    vwrite.write_to_file(job, outfile)
            except (EOFError, KeyboardInterrupt, SystemExit):
                pass
            else:
                console.debug_except()
Exemplo n.º 2
0
def get_and_parse(url, page):
    '''
    fetch baidu result and parse
    '''
    try:
        headers = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.1) \
                AppleWebKit/537.36 (KHTML, like Gecko) " +
            "Chrome/41.0.2228.0 Safari/537.36"
        }
        url += str(page)
        rget = requests.get(url, headers=headers)
        soup = BeautifulSoup(rget.text, "html.parser")
        div = soup.find_all(tpl='www_normal')

        for line in div:
            result = line.get('data-log', '')
            # pylint: disable=eval-used
            res = eval(result)
            vwrite.write_to_file(res['mu'], 'result.txt')

    except requests.RequestException as exc:
        console.print_warning(f"[-] Request error: {exc}")

    except BaseException:
        console.debug_except()
Exemplo n.º 3
0
def get_and_parse(url, page):
    '''
    fetch baidu result and parse
    '''
    try:
        headers = {
            "User-Agent":
            "Mozilla/5.0 (Windows NT 6.1) \
                AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"
        }
        url += str(page)
        rget = requests.get(url, headers=headers)
        soup = BeautifulSoup(rget.text, "html.parser")
        div = soup.find_all(tpl='www_normal')
        for line in div:
            result = line.get('data-log', '')
            res = eval(result)
            vwrite.write_to_file(res['mu'], 'result.txt')
    except BaseException:
        pass