예제 #1
0
 def scrap(self,url):
     if url is not None and len(url) > 0:
         url_info = tldextract.extract(url)
         self.domain = url_info.domain
         print '[INFO] Scrapper::scrap, domain',self.domain
         tp = ThreadPool(max_workers=120)
         tp.add_job(threaded_function,[self.domain,url])            
     else:
         print '[ALARM] Scrapper:scrap, invalid url'
예제 #2
0
#!/usr/bin/python

import Queue

from ThreadPool import *
from util import *

hosts = get_hosts()
queue = Queue.Queue()
tp = ThreadPool(queue, len(hosts))

for hostname in hosts:
    tp.add_job(read_result, hostname)

tp.wait_for_complete()
예제 #3
0
                ('InputSampleBitDepth', input_bit_depth),
                ('SampleBitDepth', sample_bit_depth),
            ]
            enc_other_cfg = [
                ('QPIFrame', int(QP)),
                ('QPPFrame', int(QP) + 1),
                ('QPBFrame', int(QP) + 4),
                ('stdout', encoder_log),
                # ('err_log', err_log),
                # ('xxEnable', 1)
            ]
            enc_command = ' '.join(
                parse_param(enc_base_cfg) + parse_param(enc_other_cfg))

            # 解码参数
            dec_cfg = [
                ('encoder', 'TAppDecoder'),
                ('-b', bin_stream),
                ('-o', dec_yuv),
                ('stdout', decoder_log),
            ]
            dec_command = ' '.join(parse_param(dec_cfg))

            my_pool.add_job(job_func, enc_command, dec_command)
            pass

    my_pool.start()
    my_pool.wait_complete()

    print('finished!')