Beispiel #1
0
def scanner(ips):
    pool = SimplePool.ThreadPool()
    for ip in ips:
        job = SimplePool.ThreadJob(scan_ip, ip)
        pool.add_job(job)
    pool.start()
    pool.finish()
    return
Beispiel #2
0
def pinger(ips):
    pool = SimplePool.ThreadPool()
    cprint("[+] Starting Ping Scan ", 'green')
    for id, ip in enumerate(ips):
        job = SimplePool.ThreadJob(verbose_ping, ip)
        pool.add_job(job)
    pool.start()
    pool.finish()
Beispiel #3
0
def pinger(ips):
    pool = SimplePool.ThreadPool()
    cprint("[+] Starting Ping Scan ", 'green')
    for ip in ips:
        job = SimplePool.ThreadJob(ping_ip, ip)
        pool.add_job(job)
    pool.start()
    pool.finish()
    return live
Beispiel #4
0
def pinger(ips):
    pool = SimplePool.ThreadPool()
    cprint("[+] Starting Ping Scan ", 'green')
    for id, ip in enumerate(ips):
        job = SimplePool.ThreadJob(ping, [ip, id ^ os.getpid()])
        pool.add_job(job)
    pool.start()
    pool.finish()
    return live
Beispiel #5
0
    def start_requests(self):
        count = self.sql.get_proxy_count(config.free_ipproxy_table)
        count_free = self.sql.get_proxy_count(config.httpbin_table)

        # ids = self.sql.get_proxy_ids(config.free_ipproxy_table)
        # ids_httpbin = self.sql.get_proxy_ids(config.httpbin_table)

        logging.info('init data...')
        for data in self.sql.db[config.free_ipproxy_table].find(self.query):
            url = random.choice(self.urls)
            cur_time = time.time()

            proxy = Proxy()
            proxy.set_value(ip=data.get('ip'),
                            port=data.get('port'),
                            country=data.get('country'),
                            anonymity=data.get('country'),
                            https=data.get('https'),
                            speed=data.get('speed'),
                            source=data.get('source'),
                            vali_count=data.get('vali_count'),
                            err_count=data.get('err_count'))
            proxy.id = data.get('_id')

            args = (cur_time, data, 'http://%s:%s' % (proxy.ip, proxy.port))

            j = SimplePool.ThreadJob(self.valid, args)

            self.threadpool.add_job(j)

        result = ValidThread(self.threadpool)
        result.start()
        self.threadpool.start()
        self.threadpool.finish()
Beispiel #6
0
def test_intern():
    for i in range(100):
        args = ('formalx', 'formaly')
        kwargs = {'testx': 'keywordx', 'testy': 'keywordy'}
        j = SimplePool.ThreadJob(printer, args, kwargs)
        t.add_job(j)
    t.start()
    print(t.is_active)
    t.finish()
Beispiel #7
0
    def __init__(self, name=None, **kwargs):

        self.urls = []
        self.headers = None
        self.timeout = 10
        self.success_status = [200]
        self.is_record_web_page = False
        self.query = {}

        self.sql = SqlManager()

        self.threadpool = SimplePool.ThreadPool(config.thread_num)
Beispiel #8
0
def console_main():
    setproctitle('image-scraper')
    scraper = ImageScraper()
    scraper.get_arguments()
    print("\nImageScraper\n============\nRequesting page....\n")
    try:
        scraper.get_html()
    except PageLoadError as e:
        scraper.page_html = ""
        scraper.page_url = ""
        print("Page failed to load. Status code: {0}".format(e.status_code))
        sys.exit()

    scraper.get_img_list()

    if len(scraper.images) == 0:
        sys.exit("Sorry, no images found.")
    if scraper.no_to_download is None:
        scraper.no_to_download = len(scraper.images)

    print("Found {0} images: ".format(len(scraper.images)))

    try:
        scraper.process_download_path()
    except DirectoryAccessError:
        print("Sorry, the directory can't be accessed.")
        sys.exit()
    except DirectoryCreateError:
        print("Sorry, the directory can't be created.")
        sys.exit()

    if scraper.dump_urls:
        for img_url in scraper.images:
            print(img_url)

    status_flags = {
        'count': 0,
        'percent': 0.0,
        'failed': 0,
        'over_max_filesize': 0
    }
    widgets = [
        'Progress: ',
        Percentage(), ' ',
        Bar(marker=RotatingMarker()), ' ',
        ETA(), ' ',
        FileTransferSpeed()
    ]
    pbar = ProgressBar(widgets=widgets, maxval=100).start()
    pool = SimplePool.ThreadPool()
    status_lock = threading.Lock()
    for img_url in scraper.images:
        if status_flags['count'] == scraper.no_to_download:
            break
        download_job = SimplePool.ThreadJob(
            download_worker_fn,
            (scraper, img_url, pbar, status_flags, status_lock))
        pool.add_job(download_job)
        status_flags['count'] += 1

    pool.start()
    pool.finish()

    pbar.finish()
    print("\nDone!\nDownloaded {0} images\nFailed: {1}\n".format(
        status_flags['count'] - status_flags['failed'] -
        status_flags['over_max_filesize'], status_flags['failed']))
    return
Beispiel #9
0
import SimplePool


def printer(x, y, testx=None, testy=None):
    # print x, y, testx, testy
    # print "Done"
    a = {}
    # print a['b']
    return True


t = SimplePool.ThreadPool()
for i in range(100):
    args = ('formalx', 'formaly')
    kwargs = {'testx': 'keywordx', 'testy': 'keywordy'}
    j = SimplePool.ThreadJob(printer, args, kwargs)
    t.add_job(j)
t.start()
print(t.is_active)
t.finish()
print(t.is_active)
Beispiel #10
0
import SimplePool
from nose.tools import eq_

t = SimplePool.ThreadPool()


def printer(x, y, testx=None, testy=None):
    print(x, y, testx, testy)
    # print "Done"
    a = {}
    # print a['b']
    eq_(x, 'formalx')
    eq_(y, 'formaly')
    eq_(testx, 'keywordx')
    eq_(testy, 'keywordy')
    return True


def test_intern():
    for i in range(100):
        args = ('formalx', 'formaly')
        kwargs = {'testx': 'keywordx', 'testy': 'keywordy'}
        j = SimplePool.ThreadJob(printer, args, kwargs)
        t.add_job(j)
    t.start()
    print(t.is_active)
    t.finish()


test_intern()