Example #1
0
    def submit(self, fn, *a, **kw):
        context = get_context()

        def fnwrapper(*aa, **akw):
            if context:
                with context:
                    return fn(*aa, **akw)
            else:
                return fn(*aa, **akw)

        res = super().submit(fnwrapper, *a, **kw)
        _threads_queues.clear()  # hack to stop joining from preventing ctrl-c
        return res
Example #2
0
 def _close_threads(self):
     if self.futures._exception:
         raise self.futures._exception
     _THREAD_POOL._threads.clear()
     _threads_queues.clear()
     self.futures.result()
Example #3
0
def save_urls(urls,
              name,
              ext,
              jobs=1,
              fail_confirm=True,
              fail_retry_eta=3600,
              reporthook=multi_hook):

    if not hit_conn_cache(urls[0]):
        clear_conn_cache()  # clear useless caches

    def run(*args, **kwargs):
        fn, *args = args
        futures = []
        for no, url in enumerate(urls):
            if status[no] == 1:
                continue
            futures.append(
                fn(*args,
                   url,
                   name,
                   ext,
                   status,
                   part=no,
                   reporthook=reporthook,
                   **kwargs))
            time.sleep(0.1)
        futures.reverse()
        return futures

    count = len(urls)
    status = [0] * count
    cost = 0
    tries = 1
    multi = False
    if count > 1:
        if jobs > 1:
            multi = True
        else:
            tries = 3
    print('Start downloading: ' + name, file=sys.stderr)
    reporthook(['init'])
    while tries:
        if count > 1 and os.path.exists(name + '.' + ext):
            print('Skipped: files has already been downloaded',
                  file=sys.stderr)
            return True
        tries -= 1
        reporthook(['start', not multi, status])
        if count == 1:
            save_url(urls[0], name, ext, status, reporthook=reporthook)
        elif jobs > 1:
            if min(count - sum(status), jobs) > 12:
                logger.warning(
                    'number of active download processes is too big to works well!!'
                )
            worker = ThreadPoolExecutor(max_workers=jobs)
            # does not call Thread.join(), catch KeyboardInterrupt in main thread
            try:
                futures = run(worker.submit, save_url)
                downloading = True
                while downloading:
                    time.sleep(0.1)
                    for future in futures:
                        downloading = not future.done()
                        if downloading:
                            break
            except KeyboardInterrupt:
                from concurrent.futures.thread import _threads_queues
                from threading import _shutdown_locks
                _threads_queues.clear()
                _shutdown_locks.clear()
                print(file=sys.stderr)
                raise
        else:
            run(save_url, tries=1)
        downloaded, size, total, _cost = reporthook(['end'])
        cost += _cost
        print('\nCurrent downloaded %s, cost %s.'
              '\nTotal downloaded %s of %s, cost %s' %
              (human_size(downloaded), human_time(_cost), human_size(size),
               human_size(total), human_time(cost)),
              file=sys.stderr)
        succeed = 0 not in status
        if not succeed:
            if count == 1:
                logger.error('donwload failed')
            else:
                logger.error('download failed at parts: ' + ', '.join(
                    [str(no) for no, s in enumerate(status) if s == 0]))
            if not tries:
                # increase retry automatically, speed 16KBps and ETA 3600s
                speed = downloaded / _cost or 1
                eta = (total - size) / speed
                if speed > 16384 and 0 < eta < fail_retry_eta:
                    tries += 1
        if succeed or not tries and (not fail_confirm or input(
                'The estimated ETA is %s, '
                'do you want to continue downloading? [Y] ' %
                human_time(eta)).upper() != 'Y'):
            break
        if not tries:
            tries += 1
        print('Restart downloading: ' + name, file=sys.stderr)
    return succeed
Example #4
0
 def clear_thread_queues():
     _threads_queues.clear()
Example #5
0
def _abandon_all_tasks():
    """Only used for abandon_all_tasks and exit the main thread,
    to prevent the main thread waiting for unclosed thread while exiting."""
    _threads_queues.clear()
Example #6
0
    try:
        for x in ports:
            for y in IPRange(argv[1], argv[2]):
                urls.append("http://" + str(y) + ":" + str(x))
    except AddrFormatError:
        exit(__usage__)

    with ThreadPoolExecutor(max_workers=int(argv[3])) as executor:
        workers = {executor.submit(scanner, x): x for x in urls}
        try:
            for worker in as_completed(workers):
                data = list(reduce(lambda x, y: x + y, worker.result().items()))
                if data[0]:
                    print(color.LIGHTCYAN_EX + __msg__.format(link=workers[worker],
                                         model=data[1]))
        except KeyboardInterrupt:
            executor._threads.clear()
            _threads_queues.clear()

    try:
        for x, y in found_routers.items():
            print("%s: %s" % (y, x), file=open("found_routers.txt", "a"))
    except (KeyboardInterrupt, RuntimeError):
        pass

    if len(found_routers) == 0:
        print(color.LIGHTRED_EX + "• Nenhum roteador foi encontrado")
    else:
        print(color.LIGHTGREEN_EX + "\n• Roteadores encontrados: %d" % len(found_routers))