Example #1
0
def init(name, number=10):
	global cnt
	global visited
	# blog_name = input('输入博客名称:')
	# thread_num = input('输入启动线程数:')
	blog_name = name.lower()
	th_num = int(number)
	url = 'http://blog.csdn.net/' + blog_name + '/'
	opener = urllib.request.build_opener(urllib.request.HTTPHandler)
	headers = [
		('User-Agent', 'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko')
	]
	urllib.request.install_opener(opener)
	opener.addheaders = headers

	queue.put(url)
	visited |= {url}
	cnt = 0

	for i in range(th_num):
		t = CsdnBlogSpider(queue,opener,blog_name)
		t.setDaemon(True)
		t.start()
	queue.join()
	print('--------end!!!-----')
	print('共抓取:' + str(cnt))
Example #2
0
 def flush(self):
     """Forces a flush from the internal queue to the server"""
     queue = self.queue
     size = queue.qsize()
     queue.join()
     # Note that this message may not be precise, because of threading.
     self.log.debug('successfully flushed about %s items.', size)
 def thbaslat(self,sublar,num_lines):
     
     lock    = threading.Lock()
     
     for subum in sublar:
         if subum.strip():
             if self.filem!="":
                 queue.put(subum.strip())
             else:
                 queue.put(subum.strip()+"."+self.domain)
                 
             
     threads = []
     exit = threading.Event()
             
     for i in range(self.thread):
         t = DnsSorgu(queue,lock,num_lines,exit,self.filem)
         t.setDaemon(True)
         threads.append(t)
         t.start()
         
     try:
         queue.join() 
     except KeyboardInterrupt:
         print("Exit")
         sys.exit(1)        
Example #4
0
def main():
    '''
    print(len(common.rq_index), common.rq_index[-5:])
    for down_rq in common.rq_index[-100:]:
        print(down_rq)
    '''
    '''
        fsdir = common.base_diretory + "\\sina\\" + down_rq + "\\"
        if (not os.path.exists(fsdir)):
            print("not exist...", fsdir)
            os.mkdir(fsdir)
    '''

    print("hahah")
    readdm()
    # dms.append("002079")
    # dms.append("002340")
    # dms.append("000917")
    # dms.append("600388")
    for dm in dms:
        # print "h:",dm
        queue.put(dm)
    print("h:")

    dirs = ["201208", "201209"]

    # spawn a pool of threads, and pass them queue instance
    for i in range(10):
        t = ThreadUrl(queue, dirs)
        t.setDaemon(True)
        t.start()

    # wait on the queue until everything has been processed
    queue.join()
Example #5
0
 def flush(self):
     """Forces a flush from the internal queue to the server"""
     queue = self.queue
     size = queue.qsize()
     queue.join()
     # Note that this message may not be precise, because of threading.
     self.log.debug('successfully flushed about %s items.', size)
Example #6
0
    def thread(threadNum, queue):

        def loop():
            while True:
                argv = queue.get()
                succeed = False
                for i in range(5):
                    try:
                        func(*argv)
                        succeed = True
                        break
                    except StopIteration:
                        pass
                if not succeed:
                    print(func.__name__, argv, 'FAILED')
                    print('-'*80)
                queue.task_done()
        
        for i in range(threadNum):
            t = threading.Thread(target = loop)
            t.setDaemon(True)
            t.start()
            #func(*argv)

        queue.join()
Example #7
0
def init(name, number=10):
    global cnt
    global visited
    # blog_name = input('输入博客名称:')
    # thread_num = input('输入启动线程数:')
    blog_name = name.lower()
    th_num = int(number)
    url = 'http://blog.csdn.net/' + blog_name + '/'
    opener = urllib.request.build_opener(urllib.request.HTTPHandler)
    headers = [(
        'User-Agent',
        'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko')
               ]
    urllib.request.install_opener(opener)
    opener.addheaders = headers

    queue.put(url)
    visited |= {url}
    cnt = 0

    for i in range(th_num):
        t = CsdnBlogSpider(queue, opener, blog_name)
        t.setDaemon(True)
        t.start()
    queue.join()
    print('--------end!!!-----')
    print('共抓取:' + str(cnt))
Example #8
0
def main(codes):
    print("start thread")
    ts = time()
    # codes=sl.get_all_toplist()

    # pool = ThreadPool()

    # client_id = os.getenv('IMGUR_CLIENT_ID')
    # if not client_id:
    #     raise Exception("Couldn't find IMGUR_CLIENT_ID environment variable!")
    # download_dir = setup_download_dir()
    # links = [l for l in get_links(client_id) if l.endswith('.jpg')]
    # Create a queue to communicate with the worker threads
    queue = queue.Queue()
    if len(codes) >= 8:
        thread_n = 8
    else:
        thread_n = len(codes)
    # Create 8 worker threads
    print("thread is ::::", thread_n)
    print("")
    for x in range(thread_n):
        worker = DownloadWorker(queue)
        # Setting daemon to True will let the main thread exit even though the workers are blocking
        worker.daemon = True
        worker.start()
    # Put the tasks into the queue as a tuple
    for code in codes:
        logger.info('Queueing {}'.format(code))
        queue.put((code))
    # Causes the main thread to wait for the queue to finish processing all the tasks
    queue.join()
    logging.info('Took %s', time() - ts)
Example #9
0
 def parse_potential_recent_deaths(self, refresh):
     from queue import Queue, Empty
     queue = Queue()
     for name, info in self.chars.items():
         if (info.is_online() or time.time() - info.last_online() < 1200) and info.vocation != 'N':
             queue.put(name)
     task_count = queue.qsize()
     def get_info():
         while True:
             try:
                 name = queue.get(block=False)
                 tasks_left = queue.qsize()
             except Empty:
                 return
             info = tibiacom.char_info(name)
             self.chars[name].deaths = info["deaths"]
             refresh()
             queue.task_done()
             print("pzlock update: %d/%d" % ((task_count - tasks_left), task_count))
     threads = []
     for i in range(10):
         thrd = threading.Thread(target=get_info)
         thrd.start()
         threads.append(thrd)
     queue.join()
     for t in threads:
         t.join()
Example #10
0
def create_jobs():
    global queue

    for i in range(1, NUMBER_OF_THREADS + 1):
        queue.put(i)

    queue.join()  # Halt the main thread
Example #11
0
def downloadd(txt1,path,houz):
	
	'''-----------------------------------------------------------------------------------------------------'''
	#读取文本文件中的url返回列表
	txt=open(txt1,'r')
	for line in txt.readlines():
		queue.put(line.strip('\n').strip())#将列表中的url放入队列中
	txt.close()
	'''-----------------------------------------------------------------------------------------------------'''
	
	'''
	# 1.普通下载
	start=time.time()
	for line in lines:
		try:
			urllib.request.urlretrieve(line.strip('\n').strip(),filename='%s%s%s'% (path='./pictures/',uuid.uuid4(),houz=".jpg"))
		except:
			pass
	print(time.time()-start)
	'''
	
	
	# 2.多线程下载
	start=time.time()
	for i in range(20):
		t=ThreadUrl(queue,path,houz)
		t.setDaemon(True)#守护进程#穿衣出门
		t.start()
	queue.join()
	print(time.time()-start)
Example #12
0
def startworkers(queue, nthreads):
    finished = []
    for i in range(nthreads):
        t = threading.Thread(name="Thread #{0}".format(i + 1), target=worker, args=(queue, finished), daemon=False)
        t.start()
    queue.join()
    return finished
Example #13
0
    def parse_potential_recent_deaths(self, refresh):
        from queue import Queue, Empty
        queue = Queue()
        for name, info in self.chars.items():
            if (info.is_online() or time.time() - info.last_online() < 1200
                ) and info.vocation != 'N':
                queue.put(name)
        task_count = queue.qsize()

        def get_info():
            while True:
                try:
                    name = queue.get(block=False)
                    tasks_left = queue.qsize()
                except Empty:
                    return
                info = tibiacom.char_info(name)
                self.chars[name].deaths = info["deaths"]
                refresh()
                queue.task_done()
                print("pzlock update: %d/%d" %
                      ((task_count - tasks_left), task_count))

        threads = []
        for i in range(10):
            thrd = threading.Thread(target=get_info)
            thrd.start()
            threads.append(thrd)
        queue.join()
        for t in threads:
            t.join()
Example #14
0
def main():
    print("Warming up Robot supervisor ....")
    init()

    threads = []

    bluetooth = Bluetooth(lock, queue)
    motor = Motor(lock, queue)
    blink_20 = Blink(lock, queue, 20, _gpio.HIGH)
    blink_21 = Blink(lock, queue, 21, _gpio.HIGH)

    blink_20.init()
    blink_21.init()
    motor.init(5, 50)
    motor.setup_motor("right", (16, 0, 0))
    motor.setup_motor("left", (0, 0, 0))

    bluetooth.start()
    motor.start()
    blink_20.start()
    blink_21.start()

    queue.join()

    bluetooth.join()
    motor.join()
    blink_20.join()
    blink_21.join()
def main():
    for i in range(100):
        t = ThreadUrl(queue,port)
        t.setDaemon(True)
        t.start()
    for n in range(len(a)):
        queue.put(a[n])
    queue.join()
Example #16
0
def test(l, ts=20):
    ll = [i.rstrip() for i in l]
    for j in range(ts):
        t = MyThread(queue, ll)
        t.setDaemon(True)
        t.start()
    for url in ll:
        queue.put(url)
    queue.join()
    opertaeExcel.write_excel_xlsx("tdk检测结果.xlsx", "tdk", array)
Example #17
0
def main():
    for i in range(30):
        t = ThreadPro(queue)
        t.setDaemon(True)
        t.start()

    for i in open('order.txt', 'r'):
        queue.put(i)

    queue.join()
Example #18
0
def main():
	for tr in trains:
		queue.put(tr)
	for i in range(DAEMONS):
		t = Thread(queue, print_lock)
		t.setDaemon(True)
		t.start()


	queue.join()
def main():
    queue = Queue()
    producer = Producer('Pro.', queue)
    consumer = Consumer('Con.', queue)
    producer.start()
    consumer.start()
    queue.join()
    producer.join()
    consumer.join()
    print("All threads terminate!")
Example #20
0
def main():
    p = Producer("Producer task0", queue)  #生产者
    p.setDaemon(True)
    p.start()
    startConsumer(30)  # 开启n个消费者

    # 确保所有的任务都生成
    p.join()

    # 等待处理完所有任务
    queue.join()
Example #21
0
def main():
    for passwd in password_list:
        pwd = passwd.rstrip()
        queue.put(pwd)
    for i in range(5):
        t = Mythread(queue)  # 调用线程的工作函数
        t.setDaemon(True)
        t.start()
        time.sleep(0.3)
    # 当队列中的 url 被执行完成,接触阻塞
    queue.join()  # 主线程在此等待 queue这个子线程 完成再解除阻塞,往下执行
Example #22
0
def test_base_build():
    queue = queue.Queue()
    for tdlfile in utils.TEMPLATE_FILES:
        queue.put(tdlfile)
    for i in range(utils.MAX_THREADS):
        t = threading.Thread(target=_build_base_from_queue, args=(queue, ))
        t.daemon = True
        t.start()
    queue.join()
    for tdlfile in utils.TEMPLATE_FILES:
        yield _assert_base_complete, tdlfile
Example #23
0
def main():
    for i in range(5):
        t = process(queue)
        t.setDaemon(True)
        t.start()
    ports = [
        21, 22, 23, 25, 53, 69, 80, 110, 135, 137, 161, 443, 3306, 3389, 8080,
        2121, 3218, 1521, 1524, 1364, 1433, 8081, 9090
    ]
    for num in ports:
        queue.put(num)
    queue.join()
Example #24
0
def test_provider_build():
    queue = queue.Queue()
    for tdlfile in utils.TEMPLATE_FILES:
        for provider in utils.PROVIDERS:
            queue.put((tdlfile, provider))
    for i in range(utils.MAX_THREADS):
        t = threading.Thread(target=_build_provider_from_queue, args=(queue, ))
        t.daemon = True
        t.start()
    queue.join()
    for tdlfile in utils.TEMPLATE_FILES:
        for provider in utils.PROVIDERS:
            yield _assert_provider_complete, tdlfile, provider
Example #25
0
def main():
    # 产生一个 threads pool, 并把消息传递给thread函数进行处理,这里开启10个并发
    for i in range(10):
        t = ThreadNum(queue)
        t.daemon = True
        t.start()

    # 往队列中填充数据
    for num in range(10):
        queue.put(num)
        print("输入: " + str(num))
    # wait on the queue until everything has been processed
    queue.join()
Example #26
0
def main():
    #产生一个 threads pool, 并把消息传递给thread函数进行处理,这里开启10个并发
    for i in range(5):
        t = ThreadNum(queue)
        t.setDaemon(True)
        t.start()

    #往队列中填数据
    for num in range(10):
        queue.put(num)
        #wait on the queue until everything has been processed

    queue.join()
Example #27
0
def update(num_workers, force, feed):
    if feed:
        need_to_update_feeds = BoardFeed.objects.filter(rss=feed)
    else:
        new_feeds = BoardFeed.objects.filter(refreshed_at__isnull=True)
        outdated_feeds = BoardFeed.objects.filter(url__isnull=False)
        if not force:
            outdated_feeds = BoardFeed.objects.filter(
                refreshed_at__lte=datetime.utcnow() - MIN_REFRESH_DELTA)
        need_to_update_feeds = list(new_feeds) + list(outdated_feeds)

    tasks = []
    for feed in need_to_update_feeds:
        tasks.append({
            "id": feed.id,
            "board_id": feed.board_id,
            "name": feed.name,
            "rss": feed.rss,
            "mix": feed.mix,
            "conditions": feed.conditions,
            "is_parsable": feed.is_parsable,
        })

    threads = []
    for i in range(num_workers):
        t = threading.Thread(target=worker)
        t.start()
        threads.append(t)

    # put tasks to the queue
    for item in tasks:
        queue.put(item)

    # wait until tasks are done
    queue.join()

    # update timestamps
    updated_boards = {feed.board_id for feed in need_to_update_feeds}
    Board.objects.filter(id__in=updated_boards).update(
        refreshed_at=datetime.utcnow())

    # remove old data
    Article.objects.filter(created_at__lte=datetime.now() -
                           DELETE_OLD_ARTICLES_DELTA).delete()

    # stop workers
    for i in range(num_workers):
        queue.put(None)

    for t in threads:
        t.join()
Example #28
0
def run(a):
    global curmusiclist
    curmusiclist = []
    # 把所有解析加入个任务队列
    for i in Type:
        queue.put(i)
    # 开10个线程
    for i in range(len(Type)):
        threadName = 'Thread' + str(i)
        Worker(threadName, queue, a)
    # 所有线程执行完毕后关闭
    queue.join()
    print(curmusiclist)
    return curmusiclist
Example #29
0
def main():

    #spawn a pool of threads, and pass them queue instance
    for i in range(5):
        t = ThreadUrl(queue)
        t.setDaemon(True)
        t.start()

        #populate queue with data
        for host in hosts:
            queue.put(host)

    #wait on the queue until everything has been processed
    queue.join()
def main(tasks):
    queue = queue.Queue()
    #populate queue with tasks 
    for task in tasks:
        queue.put(task)
    # create a list of threads and pass the queue as its argument
    for i in range(6):
        mythread = Worker(queue)
        #mythread.setDaemon(True)
        mythread.start()

    # wait for the queue to finish
    queue.join()
    print('all tasks completed')
Example #31
0
def program():
    # Threads without locking
    print("Threads without locking:")
    thread_1 = ExampleThread(1, "Thread 1", 1)
    thread_2 = ExampleThread(2, "Thread 2", 2)
    thread_1.start()
    thread_2.start()

    # Do something and then block until the threads are finished
    time.sleep(1)
    thread_1.join()
    thread_2.join()

    # Threads with locking
    print("\nThreads with locking:")
    thread_3 = ExampleThreadLock(1, "Thread 3", 1)
    thread_4 = ExampleThreadLock(2, "Thread 4", 2)
    thread_3.start()
    thread_4.start()

    # Do something and then block until the threads are finished
    time.sleep(5)
    thread_3.join()
    thread_4.join()

    # Threads with queueing
    print("Threads with queueing:")
    thread_5 = ExampleThreadQueue(1, "Thread 5", queue)
    thread_6 = ExampleThreadQueue(2, "Thread 6", queue)
    thread_5.start()
    thread_6.start()

    # Add items to the queue
    source = [
        "Pigeondust",
        "Ill Sugi",
        "J'adore Banania",
    ]

    for item in source:
        queue.put(item)

    # Add items that allow the threads to terminate when the work is done
    queue.put(None)
    queue.put(None)

    # Block until all jobs in the queue are finished
    queue.join()
Example #32
0
def train(config, num_workers):
    excepts = []
    queue = mp.JoinableQueue()
    for gamma in config.gammas:
        for width in config.widths:
            for eta in config.etas:
                queue.put((width, gamma, eta, config.runs))
    # stop signals
    workers = [mp.Process(target=train_worker, args=(queue,))
        for i in range(num_workers)]
    [w.start() for w in workers]

    # Wait for work to complete
    queue.join()
    queue.close()
    [w.join() for w in workers]
Example #33
0
def fast_download(image_results, path=None, threads=10):
    # print path
    queue = queue.Queue()
    total = len(image_results)

    for image_result in image_results:
        queue.put(image_result)

    # spawn a pool of threads, and pass them queue instance
    for i in range(threads):
        t = ThreadUrl(queue, path, total)
        t.setDaemon(True)
        t.start()

    # wait on the queue until everything has been processed
    queue.join()
Example #34
0
    def thread_pool(self, num_of_threads, tasks, queue, func):
        threads = []
        for thread_num in range(1, num_of_threads + 1):
            thread = threading.Thread(target=func, args=(thread_num, ))
            threads.append(thread)
            thread.start()

        for item in tasks:
            time.sleep(3)
            queue.put(item)

        queue.join()
        for i in range(num_of_threads):
            queue.put(None)
        for thread_end in threads:
            thread_end.join()
Example #35
0
def fast_download(image_results, path=None, threads=10):
    # print path
    queue = queue.Queue()
    total = len(image_results)

    for image_result in image_results:
        queue.put(image_result)

    # spawn a pool of threads, and pass them queue instance
    for i in range(threads):
        t = ThreadUrl(queue, path, total)
        t.setDaemon(True)
        t.start()

    # wait on the queue until everything has been processed
    queue.join()
Example #36
0
def OnKeyboardEvent(event):
    key = event.Key

    if key == "F10":
        print("Starting auto clicker")
        # Start consumers
        clicker = Clicker(queue, 0.001)
        clicker.start()
    elif key == "F4":
        print("Stopping auto clicker")
        # Add exit message to queue
        queue.put("exit")
        # Wait for all of the tasks to finish
        queue.join()

    # return True to pass the event to other handlers
    return True
def crop_dir(src_dir, dst_dir, is_train=True):
    if os.path.exists(dst_dir):
        shutil.rmtree(dst_dir)
    if not os.path.exists(dst_dir):
        os.mkdir(dst_dir)

    queue = queue.Queue()
    for i in range(1):
        t = CropThread(queue)
        t.setDaemon(True)
        t.start()

    image_list = FFDio.collect_data_set(src_dir)

    for image_name in image_list:
        queue.put((dst_dir, image_name, is_train))
    queue.join()
Example #38
0
def update(num_workers, force, feed):
    if feed:
        need_to_update_feeds = BoardFeed.objects.filter(rss=feed)
    else:
        never_updated_feeds = BoardFeed.objects.filter(refreshed_at__isnull=True)
        if not force:
            need_to_update_feeds = BoardFeed.objects.filter(
                rss__isnull=False,
                refreshed_at__lte=datetime.utcnow() - MIN_REFRESH_DELTA
            )
        else:
            need_to_update_feeds = BoardFeed.objects.filter(rss__isnull=False)
        need_to_update_feeds = list(never_updated_feeds) + list(need_to_update_feeds)

    tasks = []
    for feed in need_to_update_feeds:
        tasks.append({
            "id": feed.id,
            "board_id": feed.board_id,
            "name": feed.name,
            "rss": feed.rss
        })

    threads = []
    for i in range(num_workers):
        t = threading.Thread(target=worker)
        t.start()
        threads.append(t)

    # put tasks to the queue
    for item in tasks:
        queue.put(item)

    # wait until tasks are done
    queue.join()

    # update timestamps
    updated_boards = {feed.board_id for feed in need_to_update_feeds}
    Board.objects.filter(id__in=updated_boards).update(refreshed_at=datetime.utcnow())

    # stop workers
    for i in range(num_workers):
        queue.put(None)

    for t in threads:
        t.join()
Example #39
0
            f = io.BytesIO()
            im.save(f, test_format, optimize=1)
            data = f.getvalue()
            result.append(len(data))
            im = Image.open(io.BytesIO(data))
            im.load()
            sys.stdout.write(".")
            queue.task_done()

t0 = time.time()

threads = 20
jobs = 100

for i in range(threads):
    w = Worker()
    w.start()

for i in range(jobs):
    queue.put(im)

for i in range(threads):
    queue.put(None)

queue.join()

print()
print(time.time() - t0)
print(len(result), sum(result))
print(result)
Example #40
0
 def fitness(self):
     # Wait until all organisms in this population have it's fitness calculated
     # Could wait only for it's fitness but it's more complicated...
     queue.join()
     return self.result_dict['fitness']
Example #41
0
 def flush(self):
     """Forces a flush from the internal queue to the server"""
     queue = self.queue
     size = queue.qsize()
     queue.join()
     self.log.debug('successfully flushed {0} items.'.format(size))
Example #42
0
def main(argv, environ):
    options, remainder = getopt.getopt(argv[1:], "w:c:h:a:", ["warn=", "crit=", "host=", "address="])
    status = {'OK': 0, 'WARNING': 1, 'CRITICAL': 2, 'UNKNOWN': 3}
    host = None
    addr = None

    if 3 != len(options):
        usage(argv[0])
        sys.exit(status['UNKNOWN'])

    for field, val in options:
        if field in ('-w', '--warn'):
            warn_limit = int(val)
        elif field in ('-c', '--crit'):
            crit_limit = int(val)
        elif field in ('-h', '--host'):
            host = val
        elif field in ('-a', '--address'):
            addr = val
        else:
            usage(argv[0])
            sys.exit(status['UNKNOWN'])

    if host and addr:
        print
        "ERROR: Cannot use both host and address, choose one."
        sys.exit(status['UNKNOWN'])

    if host:
        try:
            addr = socket.gethostbyname(host)
        except:
            print("ERROR: Host '%s' not found - maybe try a FQDN?" % host)
            sys.exit(status['UNKNOWN'])
    addr_parts = str.split(addr, '.')
    addr_parts.reverse()
    check_name = '.'.join(addr_parts)
    # We set this to make sure the output is nice. It's not used except for the output after this point.
    host = addr

    ###### Thread stuff:

    # spawn a pool of threads, and pass them queue instance
    for i in range(10):
        t = ThreadRBL(queue)
        t.setDaemon(True)
        t.start()

        # populate queue with data
    for blhost in serverlist:
        queue.put((check_name, blhost))

    # wait on the queue until everything has been processed
    queue.join()

    ###### End Thread stuff

    warn = False
    if len(on_blacklist) >= warn_limit:
        warn = True

    crit = False
    if len(on_blacklist) >= crit_limit:
        crit = True
    if warn == True:
        if crit == True:
            print('CRITICAL: %s on %s spam blacklists|%s' % (host, len(on_blacklist), on_blacklist))
            sys.exit(status['CRITICAL'])
        else:
            print('WARNING: %s on spam blacklist %s' % (host, on_blacklist[0],))
            sys.exit(status['WARNING'])
    else:
        print('OK: %s not on known spam blacklists' % host)
        sys.exit(status['OK'])
    t = threading.Thread(target=download_worker)
    t.daemon = True
    t.start()

# Add image URLs to queue
page = 1
count = 0
while True:
    pageContent = open_page(page)
    links = IMG_PATH_PATTERN.finditer(pageContent)
    for link in links:
        url = get_url_from_path(link.group('path'))
        filename = IMG_FILE_PATTERN.search(url).group()
        saveFile = os.path.join(SAVE_DIR, filename)

        if OVERWRITE or not os.path.isfile(saveFile):
            queue.put(url)
            count += 1
        else:
            print('[-] Skipped %s (already exists)' % filename)

    # break if no next page
    if has_next_page(pageContent, page):
        page += 1
    else:
        break

queue.join() # block until all urls processed
print('[*] Download finished! (%d files)' % count)
print('[*] Time taken: %s' % pretty_time(time.time() - timeStart))