Esempio n. 1
0
    def run(self):
        #startSaveThread
        self.save_thread = SaveThread(self.save_queue)
        self.save_thread.start()

        #start whois threads
        try:
            for l in open(config.PROXY_LIST,'r'):
                if config.NUM_PROXIES == 0 or len(self.threads) < config.NUM_PROXIES:
                    l = l.strip()
                    if l[0] != '#': #if not a comment
                        url = urlparse.urlparse(l)
                        proxy_type = None
                        if url.scheme == "http":
                            proxy_type = whoisThread.socks.PROXY_TYPE_HTTP
                        elif url.scheme == "socks":
                            proxy_type = whoisThread.socks.PROXY_TYPE_SOCKS4
                        else:
                            print "Unknown Proxy Type"
                        if proxy_type:
                            proxy = whoisThread.Proxy(url.hostname, url.port, proxy_type)
                            t = whoisThread.WhoisThread(proxy, self.input_queue, self.save_queue)
                            t.start()
                            self.threads.append(t)
        except IOError:
            print "Unable to open proxy file: " + config.PROXY_LIST
            return
        if config.DEBUG:
            print str(whoisThread.getProxyThreadCount()) + " threads started"

        #now start EnqueueThread
        self.input_thread = EnqueueThread(self.input_queue)
        self.input_thread.start()

        #wait for threads to settle
        time.sleep(0.2)

        self.ready = True

        #now wait for all the work to be done
        while self.input_thread.isAlive():
            time.sleep(0.5)

        if config.DEBUG:
            print "Done loading domains to queue"

        while self.input_queue.qsize() > whoisThread.getProxyThreadCount():
            time.sleep(config.WHOIS_SERVER_JUMP_DELAY)

        #when the reamining queries are all waiting for an open proxy, reduce the delay
        #TODO this does not always prevent getting stuck on the last few
        config.WHOIS_SERVER_JUMP_DELAY = config.WHOIS_SERVER_SLEEP_DELAY
        config.WHOIS_SERVER_SLEEP_DELAY = 1

        self.input_queue.join()

        if config.DEBUG:
            print "Saving results"
        self.save_queue.join()
Esempio n. 2
0
def run():
    '''main entrypoint once config has been set by main'''
    manager = ManagerThread()
    manager.daemon = True #required for ctrl-c exit
    config.START_TIME = time.time()
    manager.start()

    if config.DEBUG:
        print "Waiting for threads to settle"
    while not manager.ready:
        time.sleep(0.2)

    if config.PRINT_STATUS:
        print_status_line()
        print_status_data(manager)

    time.sleep(0.5)

    try:
        while whoisThread.getProxyThreadCount() > 0 and manager.isAlive():
            if config.PRINT_STATUS:
                print_status_data(manager)
            time.sleep(config.STATUS_UPDATE_DELAY)
        if (whoisThread.getProxyThreadCount() == 0):
            print "No valid Proxy threads running!!"
    except KeyboardInterrupt:
        q_size = manager.input_queue.qsize()
        if q_size <= (config.MAX_QUEUE_SIZE - 1):
            skipped = manager.input_thread.getNumSkipped()
            loaded = manager.input_thread.getDomainCount()
            total = skipped + loaded - config.MAX_QUEUE_SIZE
            print "\nExamined at least %d domains" % (total)
        config.PRINT_STATUS = False
        pass
    finally:
# ensure the tar file is closed
        manager.save_thread.closeTar()
        if config.PRINT_STATUS:
            print_status_data(manager)
            sys.stdout.write("\n")
        if config.SAVE_LOGS:
            whoisThread.printExceptionCounts()
Esempio n. 3
0
def run():
    '''main entrypoint once config has been set by main'''
    manager = ManagerThread()
    manager.daemon = True  #required for ctrl-c exit
    config.START_TIME = time.time()
    manager.start()

    if config.DEBUG:
        print "Waiting for threads to settle"
    while not manager.ready:
        time.sleep(0.2)

    if config.PRINT_STATUS:
        print_status_line()
        print_status_data(manager)

    time.sleep(0.5)

    try:
        while whoisThread.getProxyThreadCount() > 0 and manager.isAlive():
            if config.PRINT_STATUS:
                print_status_data(manager)
            time.sleep(config.STATUS_UPDATE_DELAY)
        if (whoisThread.getProxyThreadCount() == 0):
            print "No valid Proxy threads running!!"
    except KeyboardInterrupt:
        q_size = manager.input_queue.qsize()
        if q_size <= (config.MAX_QUEUE_SIZE - 1):
            skipped = manager.input_thread.getNumSkipped()
            loaded = manager.input_thread.getDomainCount()
            total = skipped + loaded - config.MAX_QUEUE_SIZE
            print "\nExamined at least %d domains" % (total)
        config.PRINT_STATUS = False
        pass
    finally:
        # ensure the tar file is closed
        manager.save_thread.closeTar()
        if config.PRINT_STATUS:
            print_status_data(manager)
            sys.stdout.write("\n")
        if config.SAVE_LOGS:
            whoisThread.printExceptionCounts()
Esempio n. 4
0
def print_status_data(manager):
    '''updates the statusline data'''
    global last_lookups
    running_seconds = (time.time() - config.START_TIME)

    domains = manager.input_thread.getDomainCount()
    good_saved = manager.save_thread.getNumGood()
    fail_saved = manager.save_thread.getNumFails()
    total_saved = manager.save_thread.getNumSaved()
    skipped = manager.input_thread.getNumSkipped()
    active_threads = whoisThread.getActiveThreadCount()
    total_threads = whoisThread.getProxyThreadCount()
    running_time = str(datetime.timedelta(seconds=int(running_seconds)))
    q_size = manager.input_queue.qsize()
    progress = 100 * manager.input_thread.getProgress()

    rlookups = good_saved
    if not config.DPS:
        rlookups = whoisThread.getLookupCount()

    last_lps = (rlookups - last_lookups) / config.STATUS_UPDATE_DELAY
    total_lps = rlookups / running_seconds
    lps = (last_lps * 0.8) + (total_lps * 0.2)
    last_lookups = rlookups

    allDomains = (domains + skipped) - q_size

    failp = 0.0
    if total_saved != 0:
        failp = 100.0 * (float(fail_saved) / float(total_saved))

    # term info
    (width, height) = getTerminalSize()
    # clear screen
    sys.stdout.write('\r' + (' ' * width))

    data = "\r%3.0f%% %9d  %9d  %5.1f%%  %9d  %6d / %-6d  %6.1f  %s" % (
        progress, allDomains, domains, failp, good_saved, active_threads,
        total_threads, lps, running_time)

    sys.stdout.write(data)

    if q_size < (config.MAX_QUEUE_SIZE / 10):
        sys.stdout.write("  WARNING: input queue is %d " % q_size)

    sq_size = manager.save_queue.qsize()
    if sq_size > (config.MAX_QUEUE_SIZE / 5):
        sys.stdout.write("  WARNING: save queue is %d " % sq_size)

    sys.stdout.flush()
Esempio n. 5
0
def print_status_data(manager):
    '''updates the statusline data'''
    global last_lookups
    running_seconds = (time.time() - config.START_TIME)

    domains = manager.input_thread.getDomainCount()
    good_saved = manager.save_thread.getNumGood()  
    fail_saved = manager.save_thread.getNumFails()
    total_saved = manager.save_thread.getNumSaved()
    skipped = manager.input_thread.getNumSkipped()
    active_threads = whoisThread.getActiveThreadCount()
    total_threads = whoisThread.getProxyThreadCount()
    running_time = str(datetime.timedelta(seconds=int(running_seconds)))
    q_size = manager.input_queue.qsize()
    progress = 100*manager.input_thread.getProgress()

    rlookups = good_saved
    if not config.DPS:
        rlookups = whoisThread.getLookupCount()

    last_lps = (rlookups-last_lookups)/config.STATUS_UPDATE_DELAY
    total_lps = rlookups/running_seconds
    lps = (last_lps * 0.8) + (total_lps * 0.2)
    last_lookups = rlookups

    allDomains = (domains + skipped) - q_size
    
    failp = 0.0
    if total_saved != 0:
        failp = 100.0 * ( float(fail_saved) / float(total_saved) )

    # term info
    (width, height) = getTerminalSize()
    # clear screen
    sys.stdout.write('\r' + (' ' * width))

    data = "\r%3.0f%% %9d  %9d  %5.1f%%  %9d  %6d / %-6d  %6.1f  %s" % (progress, allDomains, domains, failp, good_saved, active_threads, total_threads, lps, running_time)

    sys.stdout.write(data)

    if q_size < (config.MAX_QUEUE_SIZE/10):
        sys.stdout.write("  WARNING: input queue is %d " % q_size)

    sq_size = manager.save_queue.qsize()
    if sq_size > (config.MAX_QUEUE_SIZE/5):
        sys.stdout.write("  WARNING: save queue is %d " % sq_size)

    sys.stdout.flush()
Esempio n. 6
0
    def run(self):
        #startSaveThread
        self.save_thread = SaveThread(self.save_queue)
        self.save_thread.start()

        #start whois threads
        try:
            for l in open(config.PROXY_LIST, 'r'):
                if config.NUM_PROXIES == 0 or len(
                        self.threads) < config.NUM_PROXIES:
                    l = l.strip()
                    if l[0] != '#':  #if not a comment
                        url = urlparse.urlparse(l)
                        proxy_type = None
                        if url.scheme == "http":
                            proxy_type = whoisThread.socks.PROXY_TYPE_HTTP
                        elif url.scheme == "socks":
                            proxy_type = whoisThread.socks.PROXY_TYPE_SOCKS4
                        else:
                            print "Unknown Proxy Type"
                        if proxy_type:
                            proxy = whoisThread.Proxy(url.hostname, url.port,
                                                      proxy_type)
                            t = whoisThread.WhoisThread(
                                proxy, self.input_queue, self.save_queue)
                            t.start()
                            self.threads.append(t)
        except IOError:
            print "Unable to open proxy file: " + config.PROXY_LIST
            return
        if config.DEBUG:
            print str(whoisThread.getProxyThreadCount()) + " threads started"

        #now start EnqueueThread
        self.input_thread = EnqueueThread(self.input_queue)
        self.input_thread.start()

        #wait for threads to settle
        time.sleep(0.2)

        self.ready = True

        #now wait for all the work to be done
        while self.input_thread.isAlive():
            time.sleep(0.5)

        if config.DEBUG:
            print "Done loading domains to queue"

        while self.input_queue.qsize() > whoisThread.getProxyThreadCount():
            time.sleep(config.WHOIS_SERVER_JUMP_DELAY)

        #when the reamining queries are all waiting for an open proxy, reduce the delay
        #TODO this does not always prevent getting stuck on the last few
        config.WHOIS_SERVER_JUMP_DELAY = config.WHOIS_SERVER_SLEEP_DELAY
        config.WHOIS_SERVER_SLEEP_DELAY = 1

        self.input_queue.join()

        if config.DEBUG:
            print "Saving results"
        self.save_queue.join()