Esempio n. 1
0
    def scan(self):
        if self.source:
            threadpool = ThreadPool.ThreadPool(1)
        else:
            threadpool = ThreadPool.ThreadPool(5)

        for rule in self.rules:
            threadpool.addtask(rule.detect, (), (self.results, rule))
            #if rule.detect():
            #    self.results.append(rule)
        threadpool.start()
        threadpool.wait()
        #threadpool.clear()
        #threadpool.stop()
        pass
Esempio n. 2
0
def unzip_zip_file_async(zipfilename, unziptodir, end_cb=None):
    if not os.path.exists(unziptodir):
        os.makedirs(unziptodir)

    def unzip(zipfilename, unziptodir, end_cb=None):
        zfobj = zipfile.ZipFile(zipfilename, 'r')
        for name in zfobj.namelist():
            name = name.replace('\\', '/')
            if name.endswith('/'):
                os.makedirs(os.path.join(unziptodir, name))
            else:
                ext_filename = os.path.join(unziptodir, name)
                ext_dir = os.path.dirname(ext_filename)

                if not os.path.exists(ext_dir):
                    os.makedirs(ext_dir)

                data = zfobj.read(name)
                outfile = open(ext_filename, 'wb')
                outfile.write(data)
                outfile.close()
        if None != end_cb:
            end_cb(zipfilename, zfobj.namelist())

    t = ThreadPool.ThreadPool().Thread(target=unzip,
                                       args=(zipfilename, unziptodir, end_cb))
    t.start()
    return t
Esempio n. 3
0
def get_weibo_comment(weibo_content_id):
    url = URL_COMMIT_URL.replace('id=4184562986557218',
                                 'id=' + weibo_content_id)
    content = request_url(url, 'get', '')
    try:
        result_json = json.loads(content)
        ok_flag = result_json['ok']
        test_comment_pool = ThreadPool(10)
        if ok_flag == 1:
            comment_data_arr = result_json['data']['data']
            get_comment_by_arr(weibo_content_id, comment_data_arr)
            page_count = result_json['data']['max']
            for page_index in range(2, page_count + 1):
                url = url.replace('&page=' + str(page_index - 1),
                                  '&page=' + str(page_index))
                content = request_url(url, 'get', '')
                try:
                    result_json = json.loads(content)
                    ok_flag = result_json['ok']
                    if ok_flag == 1:
                        comment_data_arr = result_json['data']['data']
                        #get_comment_by_arr(weibo_content_id,comment_data_arr)
                        test_comment_pool(get_comment_by_arr, (
                            weibo_content_id,
                            comment_data_arr,
                        ),
                                          callback=None)
                except Exception as e:
                    continue
            test_comment_pool.close()
    except Exception as e:
        pass
 def preprocessPercentileRatios(self):
     print "preprocessPercentileRatios start"
     distributionsFile = self.getDatasetSlidingSizesFile()
     if os.path.isfile(distributionsFile):
         #Teh distributions file is processed
         print "The distribution file exists"
         return
     self.initializePropertiesComputeStructures(False)
     
     print "computing the ratios"
     try:
         zpa = zipfile.ZipFile(distributionsFile,"w",zipfile.ZIP_DEFLATED)
         zpa.writestr("dummy.txt","dummy file")
         zpa.close()
         self.ziplock = threading.Lock()
         # Create a pool with three worker threads
         pool = ThreadPool.ThreadPool(5)  
         sys.setcheckinterval(1000)          
         for windowSize in self.slidingWindowSizes:
             if self.useWindowThreading:
                 pool.queueTask(self.preprocessWindowSize, windowSize, None)
             else:
                 self.preprocessWindowSize(windowSize)
         # When all tasks are finished, allow the threads to terminate
         if self.useWindowThreading:
             print "Joining the threads"
             pool.joinAll()             
     except:            
         os.unlink(distributionsFile)
         raise
     print "preprocessPercentileRatios end"
Esempio n. 5
0
 def __init__(self, store_location):
     #logging.debug("OFFLINE IMAGE CACHE STARTUP")
     self._store_location = store_location
     self._threadpool = ThreadPool.ThreadPool(5,
                                              "OfflineImageCache",
                                              daemon=True)
     self._cachers = {}  # dict of urlcacher objects?
Esempio n. 6
0
 def __init__(self,
              crawlername,
              workerThreadNum,
              pollInterval=0.5,
              pollTimeout=None,
              downloadTimeout=5):
     self.threadPool = ThreadPool(workerThreadNum)
     self.crawlername = crawlername
     self.pollTimeout = pollTimeout
     self.crawlerThread = CrawlerThread(self.threadPool, pollTimeout)
     self.mp3Downloader = MP3Downloader(downloadTimeout)
def process_jobs(num_threads, job_queue):
    """ Process the jobs in parallel """

    # Create the thread pool
    tp = ThreadPool.ThreadPool(num_threads)

    # Process the jobs
    job_queue = tp.process_jobs(job_queue)

    for i in range(len(job_queue)):
        job = job_queue[i]
        print(job.thread.thread_id, job.start_time)
Esempio n. 8
0
def siper_weibo_comment(weibo_link_dict):
    comment_pool = ThreadPool(10)
    print 'weibo comment siper start ...'
    for weibo_content_id in weibo_link_dict:
        created_at = weibo_link_dict[weibo_content_id]
        print weibo_content_id, created_at
        comment_pool.run(get_weibo_comment, (weibo_content_id, ),
                         callback=None)
        # 解析微博评论
        #get_weibo_comment(weibo_content_id)
    print 'weibo comment siper end ...'
    comment_pool.close()
Esempio n. 9
0
 def __init__(self):
     self.countPosition = 0
     self.threadPool = ThreadPool.ThreadPool(1000)
     self.diffTime = 1114
     self.deviationTime = 200
     self.timer = QBasicTimer()
     self.__noteMap = collections.defaultdict(list)  # <startTime, note[]>
     self.__player = None
     self.__musicPlayerView = None
     self.sheetRecorder = None
     self.scoreRecorder = None
     self.sheet = None
Esempio n. 10
0
    def __init__(self):
        self.fairness_factor = 0.2
        self.fairness_scores = {}
        self.share_scores = {}
        self.dag_planners = {}
        self.alpha = 0.5
        self.available_bandwidth = 1200  # 10Gbps = 1.2 GBps = 1200 MBps
        self.cache_block_size = 1  # 1MBype
        self.cache = cache.Cache(10000)

        self.n_thp = 10
        self.d3n_conn = thp.ThreadPool(self.n_thp)
Esempio n. 11
0
def siper_data(user_id, start_date, end_date):
    main_pool = ThreadPool(5)
    start_date = datetime.strptime(start_date, '%Y-%m-%d')
    end_date = datetime.strptime(end_date, '%Y-%m-%d')
    # 采集微博用户信息
    get_user_info(user_id)
    # 读取微博ID
    container_id = get_containerid(user_id)
    # 采集微博LINK时间以及ID
    weibo_link_dict = get_weibo_link(user_id, container_id, start_date,
                                     end_date)
    main_pool.run(siper_weibo_content, (weibo_link_dict, ), callback=None)
    main_pool.run(siper_weibo_comment, (weibo_link_dict, ), callback=None)
Esempio n. 12
0
def siper_weibo_content(weibo_link_dict):
    content_pool = ThreadPool(10)
    print 'weibo content siper start ....'
    # 采集微博内容信息
    for weibo_content_id in weibo_link_dict:
        created_at = weibo_link_dict[weibo_content_id]
        content_pool.run(get_weibo_content,
                         (user_id, weibo_content_id, created_at),
                         callback=None)
        # 采集微博内容信息
        #get_weibo_content(user_id,weibo_content_id,created_at)
    print 'weibo content siper end ....'
    content_pool.close()
Esempio n. 13
0
    def __init__(self, sizeOfBuffer, numberOfCores, numberOfThreads,
                 timeQuantum, contextSwitchTime):
        Buffer.Buffer.initBufferCount()
        self.buffer = Buffer.Buffer(sizeOfBuffer)
        self.numberOfCores = numberOfCores
        self.cores = []

        for y in list(range(numberOfCores)):
            self.cores.append(Core.Core(y, 0))

        self.threadPool = ThreadPool.ThreadPool(numberOfThreads)
        self.timeQuantum = timeQuantum
        self.contextSwitchTime = contextSwitchTime
Esempio n. 14
0
def siper_weibo_comment(weibo_link_dict, user_id):
    comment_pool = ThreadPool(MAX_THREAD)
    print 'weibo comment siper start ...'
    for weibo_content_id in weibo_link_dict:
        created_at = weibo_link_dict[weibo_content_id]
        print weibo_content_id, created_at
        #get_weibo_comment_spider(weibo_content_id,user_id)
        comment_pool.run(get_weibo_comment_spider, (
            weibo_content_id,
            user_id,
        ),
                         callback=None)
        # 解析微博评论
        #get_weibo_comment(weibo_content_id)
    print 'weibo comment siper end ...'
    comment_pool.close()
Esempio n. 15
0
 def __init__(self, parent):
     super().__init__(parent)
     self.fixedSprites = list(
         range(0, 8)
     )  # eight fixed top arrow, each arrow should be put in the number of the arrow
     self.arrowSprites = []
     self.levelFeedbackSprites = [
         None
     ] * 2  # level feedback of two players, [0] -> p1, [1] -> p2
     self.setFocusPolicy(Qt.StrongFocus)
     self.threadPool = ThreadPool(20)
     self.spritePrototypeFactory = SpritePrototypeFactory()
     self.spritePrototypeFactory.prepare(MyGameView.GAME_WIDTH,
                                         MyGameView.GAME_HEIGHT)
     self.createAllTopFixedArrow()
     self.scoreRecordersMap = defaultdict(ScoreRecorder)
Esempio n. 16
0
	def __init__(self, view, home, share_path):
		PTVhtml.PTVhtml.__init__(self, view, home, share_path)
		self._htmlview = None
		self._document_lock = threading.Lock()
		self._image_cache = SimpleImageCache.SimpleImageCache()
		self._css = ""
		self._last_link_time = 0
		
		self._view = view
		
		f = open(os.path.join(share_path, "gtkhtml.css"))
		for l in f.readlines(): self._css += l
		f.close()
		self._image_pool = ThreadPool.ThreadPool(5, "PlanetView")
		self._dl_total = 0
		self._dl_count = 0
Esempio n. 17
0
    def __init__(self,
                 app,
                 media_dir,
                 progress_callback=None,
                 finished_callback=None):
        self.index = 0
        #should this be lucene compatible?
        if utils.RUNNING_HILDON:
            max_downloads = 1
        else:
            max_downloads = 5
        self._style = BYDATE
        self.pool = ThreadPool.ThreadPool(max_downloads, "MediaManager")
        self.downloads = []
        self.db = app.db
        self.time_appendix = 0
        self.bt_settings = {'min_port': 6881, 'max_port': 6999, 'ul_limit': 0}
        self.id_time = 0
        self.quitting = False
        self._net_connected = True
        self.pause_state = RUNNING
        if finished_callback:
            self.app_callback_finished = finished_callback
        else:
            self.app_callback_finished = self._basic_finished_callback

        if progress_callback:
            self.app_callback_progress = progress_callback
        else:
            self.app_callback_progress = self._basic_progress_callback
        home = self.db.home

        if media_dir[0] == '~':
            media_dir = os.getenv('HOME') + media_dir[1:]

        try:
            os.stat(media_dir)
        except:
            try:
                os.mkdir(media_dir)
            except:
                raise NoDir, "error creating " + media_dir
        self._media_dir = media_dir

        app.connect('online-status-changed', self.__online_status_changed)
        app.connect('new-database', self.__new_database_cb)
Esempio n. 18
0
def getRemoteFileByUrlAsync(url, path, progress_cb=None, end_cb=None):
    file_size = 0
    try:
        file_size = int(urlopen(url).info().get('Content-Length', -1))
    except Exception as e:
        print(e)
        return None

    def getRemoteFunc(url, path, file_size, progress_cb=None, end_cb=None):
        try:
            if not os.path.exists(os.path.dirname(path)):
                os.makedirs(os.path.dirname(path))

            pbar = tqdm(total=file_size,
                        initial=0,
                        desc=path,
                        unit='B',
                        unit_scale=True)
            r = requests.get(url, stream=True, verify=False)
            curSize = 0.0
            chunksize = 1024 * 1024 * 4
            with open(path, "wb") as f:
                for chunk in r.iter_content(chunk_size=chunksize):
                    f.write(chunk)
                    curSize = curSize + len(chunk)

                    if None != progress_cb:
                        progress_cb(curSize, file_size)
                    pbar.update(chunksize)
                f.close()
                pbar.close()
                if None != end_cb:
                    end_cb(path, True)
        except Exception as e:
            print(e)
            if None != end_cb:
                end_cb(path, False)

    t = ThreadPool.ThreadPool().Thread(target=getRemoteFunc,
                                       args=(url, path, file_size, progress_cb,
                                             end_cb))
    t.start()
    return t
Esempio n. 19
0
def unzip_7z_file_async(zipfilename, unziptodir, end_cb=None):
    if not os.path.exists(unziptodir):
        os.makedirs(unziptodir)

    def unzip(zipfilename, unziptodir, end_cb=None):
        try:
            archive = py7zr.SevenZipFile(zipfilename, mode='r')
            names = archive.getnames()
            archive.extractall(path=unziptodir)
            archive.close()
            if None != end_cb:
                end_cb(zipfilename, names)
        except Exception as e:
            print(e)

    t = ThreadPool.ThreadPool().Thread(target=unzip,
                                       args=(zipfilename, unziptodir, end_cb))
    t.start()
    return t
Esempio n. 20
0
def spider_page(url, fid):
    content = request_url(url, 'get', '')
    soup = BeautifulSoup(content, 'lxml')
    tbody_list = soup.find('table', id='threadlisttableid').find_all('tbody')
    main_pool = ThreadPool(MAX_THREAD)
    for tbody in tbody_list:
        id = tbody.get('id')
        id = id.replace('separatorline',
                        '').replace('stickthread',
                                    '').replace('normalthread',
                                                '').replace('_', '')
        if len(id) > 0 and id:
            content_url = 'https://bbs.ichunqiu.com/thread-' + str(
                id) + '-1-1.html'
            #ichunqiu_sipder(content_url,fid)
            main_pool.run(ichunqiu_sipder, (
                content_url,
                fid,
            ), callback=None)
    main_pool.close()
Esempio n. 21
0
def get_weibo_comment_spider(content_id, user_id):
    url = 'https://weibo.com/aj/v6/comment/big?id=' + content_id + '&filter=all&page=1'
    try:
        page_count = get_weibo_comment_ext(url, content_id, user_id)
        comment_pool = ThreadPool(MAX_THREAD)
        for i in range(2, page_count + 1):
            url = 'https://weibo.com/aj/v6/comment/big?id=' + content_id + '&filter=all&page=' + str(
                i)
            try:
                #get_weibo_comment_ext(url,content_id,user_id)
                comment_pool.run(get_weibo_comment_ext,
                                 (url, content_id, user_id),
                                 callback=None)
            except Exception as e1:
                print e1
                traceback.print_exc()
                continue
        comment_pool.close()
    except Exception as e:
        traceback.print_exc()
Esempio n. 22
0
def test():
    import random
    import time
    import datetime

    def do_work(data):
        time.sleep(random.randint(1, 3))
        res = str(datetime.datetime.now()) + "" + str(data)
        return res

    def print_result(request, result):
        print "---Result from request %s : %r" % (request.requestID, result)

    main = ThreadPool(3)
    for i in range(40):
        req = WorkRequest(do_work, args=[i], kwds={}, callback=print_result)
        main.putRequest(req)
        print "work request #%s added." % req.requestID

    print '-' * 20, main.workersize(), '-' * 20

    counter = 0
    while True:
        try:
            time.sleep(0.5)
            main.poll()
            if (counter == 5):
                print "Add 3 more workers threads"
                main.createWorkers(3)
                print '-' * 20, main.workersize(), '-' * 20
            if (counter == 10):
                print "dismiss 2 workers threads"
                main.dismissWorkers(2)
                print '-' * 20, main.workersize(), '-' * 20
            counter += 1
        except NoResultsPending:
            print "no pending results"
            break

    main.stop()
    print "Stop"
Esempio n. 23
0
    def __init__(self,filepath):
        with open(filepath,'r',encoding='utf-8') as f:#解析配置文件
            self.config=json.loads(f.read())
        self.max_Task=self.config['max_Task']#最大任务数
        self.thread_count=self.config['threadCount']#线程数
        self.M_BLL=Mission_BLL.Mission()#任务表
        self.SI_BLL=SpiderInfo_BLL.SpiderInfo()#配置表
        self.HI_BLL=HoneyInfo_BLL.HoneyInfo()#数据库信息  
        self.EI_BLL=ExceptionInfo_BLL.ExceptionInfo()#异常信息
        self.Exception_Model=Model.exceptionInfo("","")#异常模型
        self.PRI=["high","common","low"]#优先级
        self.T_Pool = ThreadPool(self.thread_count,self.max_Task)#任务线程池

        #日志信息
        self.loger=logging.getLogger() 
        self.loger.setLevel(logging.DEBUG)
        hfile=logging.FileHandler("./Hive/LOG/ServerLogInfo.log")
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        hfile.setFormatter(formatter)
        self.loger.addHandler(hfile)
        self.loger.info("Server初始化!")
Esempio n. 24
0
    def __init__(self,
                 server_address,
                 RequestHandlerClass,
                 bind_and_activate=True,
                 thread_num=10,
                 RequestInterceptorClasses=[]):
        """Constructor.  
            thread_num 线程池默认的线程数量
            
            """
        self.requestInterceptorClasses = RequestInterceptorClasses
        #初始化线程池
        self.threadPool = ThreadPool.ThreadPool(thread_num)
        #设置urllib opener, HTTP server接收到请求时,需要通过urllib 转发出请求,这里全局设置urllib
        cj = http.cookiejar.CookieJar()
        #proxyHandler = urllib.request.ProxyHandler({'http','127.0.0.1:8888'}) 代理
        opener = urllib.request.build_opener(
            urllib.request.HTTPCookieProcessor(cj), KIZHTTPRedirectHandler,
            KIZHttpErrorHandler)
        urllib.request.install_opener(opener)

        super().__init__(server_address, RequestHandlerClass,
                         bind_and_activate)
Esempio n. 25
0
																	Cek.close()

																	Cekpoint.append(user+pass7)

																	

															

		Except:

			Pass

		

	P = ThreadPool(30)

	P.map(main, id)

	Print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mBlackMafia\033[1;95m♡──────────•◈•──────────♡"

	Print "  \033[1;93m«---•◈•---Developed By love---•◈•---»" #Dev:love_hacker

	Print '\033[1;91mProcess Has Been Completed\033[1;92m....'

	Print"\033[1;91mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;91m"+str(len(oks))+"\033[1;97m/\033[1;95m"+str(len(cekpoint))

	Print """

             
Esempio n. 26
0
import tkinter as tk
import tkinter.messagebox
import socket
import time
import threading
import queue
import sys
from Encryption import encryptPasswd, encodeId, decodeId, pad, readMessage, readRoomList, readUserList
from Settings import HOST, PORT, COMMAND_CODE
import ThreadPool

client_thread_pool = ThreadPool.ThreadPool(3, True)
MessageQueue = queue.Queue()
RoomsList = list()
# True for Rooms List HAVE BEEN Read, False for not
RoomsListFlag = True
UsersList = list()
# True for Users List HAVE BEEN Read, False for not
UsersListFlag = True
ReturnCode = 0
# True for message HAVE BEEN Read, False for HAVE NOT BEEN Read
ReturnCodeFlag = True
ChatRooms = dict()


def clientReceiveLogic(conn):
    global RoomsListFlag, RoomsList, ReturnCode, ReturnCodeFlag, loginPage
    conn = conn[0]
    while True:
        data = MessageQueue.get()
        Command = int.from_bytes(data[0:2], byteorder='big')
Esempio n. 27
0

#python computeFVs.py videos vid_in vid_out
if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument("vid_path", help="Directory of the input videos", type=str)
    parser.add_argument("vid_in", help="list of input videos in .txt file", type=str)
    parser.add_argument("output_dir", help="output directory to save FVs (.fisher files)", type=str)
    parser.add_argument("gmm_list", help="File of saved list of GMMs", type=str)

    args = parser.parse_args()

    f = open(args.vid_in, 'r')
    input_videos = f.readlines()
    f.close()
    input_videos = [line.split()[0].split('/')[-1] for line in [video.rstrip() for video in input_videos]]

    ###Just to prevent overwriting already processed vids
    completed_vids = [filename.split('.')[0] for filename in os.listdir(args.output_dir) if filename.endswith('.npz')]
    overlap = [vid for vid in input_videos if vid.split('.')[0] in completed_vids]



    #Multi-threaded FV construction.
    numThreads = 10
    pool = ThreadPool.ThreadPool(numThreads)
    for vid in input_videos:
        if vid not in overlap:
            pool.add_task(processVideo,vid,args.vid_path,args.output_dir,args.gmm_list)
    pool.wait_completion()
Esempio n. 28
0
    return asyncDecorator

if __name__ == "__main__":
    from time import sleep
    from ThreadPool import *
    try:
        xrange
    except NameError:
        xrange = range
    class TestClass():
        @Async()
        def testDecorated(self):
            print (345)
    testClass = TestClass()
    testClass.testDecorated()
    @Async(executor=ThreadPool(5))
    def func(a, b):
        print ("func called")
        sleep(1)
        print ("func exit:" + str(a))

    @Async()
    def funcWithoutExecutor(a):
        print (a)

    for x in xrange(1, 10):
        funcWithoutExecutor("noExecutor:" + str(x))


    for x in xrange(1, 15):
        func(x, 2)
Esempio n. 29
0
            author = string_tag.find('a',class_='xi2').string
            break
    for date_div in soup.find_all('div',class_='cl',attrs={'style':'font-size: 12px; color: #888888;'}):
        tmp_str = str(date_div)
        date_arr =  re.findall('\d{4}-\d{1,2}-\d{1,2} \d{1,2}:\d{1,2}:\d{1,2}',tmp_str)
        if len(date_arr) > 0:
            content_date = date_arr[0]
            break
    content_id = MySQLdb.escape_string(content_id)
    title = MySQLdb.escape_string(title)
    author = MySQLdb.escape_string(author)
    content_date = MySQLdb.escape_string(content_date)
    connection = pool.connection()
    cursor = connection.cursor()
    sql = "INSERT IGNORE INTO ichunqiu_content(id,content_id,title,url,author,content_date,create_date,update_date) VALUES (DEFAULT,'"+content_id+"','"+title+"','"+url+"','"+author+"','"+content_date+"',NOW(),NOW())"
    #print sql
    cursor.execute(sql)
    connection.commit()
    cursor.close()
    connection.close()
    print 'content_id -->',content_id,' ok'

if __name__ == '__main__':
    main_pool = ThreadPool(MAX_THREAD)
    for i in range(33742,34832+1):
        url = 'https://bbs.ichunqiu.com/thread-'+str(i)+'-1-1.html'
        #ichunqiu_sipder(url)
        main_pool.run(ichunqiu_sipder,(url,), callback=None)
        #break
       # print url
    main_pool.close()
Esempio n. 30
0
from interface import implements
from Task import *
from ThreadPool import *
from threading import Lock


class ExampleTask(implements(Task)):
    def __init__(self, num, print_lock):
        self.num = num
        self.print_lock = print_lock

    def executeTask(self):
        self.print_lock.acquire()
        print("Executing task # " + str(self.num))
        self.print_lock.release()


if __name__ == "__main__":
    pool = ThreadPool(5)
    pool.initialize()
    print_lock = Lock()

    for i in range(50):
        task = ExampleTask(i, print_lock)
        pool.submit_task(task)

    pool.stop_threads()