def test_server_list(cls, target_list, shared_settings): """ Tests connectivity with each server of the target_list and returns the list of online servers. """ # Use a thread pool to connect to each server thread_pool = ThreadPool() for target_str in target_list: thread_pool.add_job((cls._test_server, (target_str, shared_settings))) nb_threads = min(len(target_list), cls.MAX_THREADS) thread_pool.start(nb_threads) # Return valid targets for (job, target) in thread_pool.get_result(): yield target # Use None as a sentinel yield None # Return invalid targets for (job, exception) in thread_pool.get_error(): yield exception thread_pool.join() return
def test_connectivity(self, timeout): """ Tests connectivity with each server of the target_list and returns the list of online servers. """ # Use a thread pool to connect to each server thread_pool = ThreadPool() for target_str in self._target_list: thread_pool.add_job((self._test_server, (target_str, timeout))) nb_threads = min(len(self._target_list), self.MAX_THREADS) thread_pool.start(nb_threads) # Recover valid targets for (job, target) in thread_pool.get_result(): self._targets_OK.append(target) yield target # Store invvalid targets for (job, exception) in thread_pool.get_error(): self._targets_ERR.append(exception) thread_pool.join() return
def test_connectivity(self, timeout): """ Tests connectivity with each server of the target_list and returns the list of online servers. """ # Use a thread pool to connect to each server thread_pool = ThreadPool() for target_str in self._target_list: thread_pool.add_job((self._test_server, (target_str, timeout))) nb_threads = min(len(self._target_list), self.MAX_THREADS) thread_pool.start(nb_threads) # Recover valid targets for (job, target) in thread_pool.get_result(): self._targets_OK.append(target) yield target # Store invvalid targets for (job, exception) in thread_pool.get_error(): self._targets_ERR.append(exception) thread_pool.join() return
def test(): print('start testing') tp = ThreadPool() for i in range(15): time.sleep(0.1) tp.add_job(test_job, i) t = time.time() tp.wait_for_complete() print('s:'+str(time.time()-t)) print('end testing')
def GetMatchPage(season, league, matches): ''' 获取指定联赛的Match页面 ''' tp = ThreadPool() print('Start updating ' + season + ' ' + league + ' matches...') dirLeague = Global.Dir_Root + season + '\\' + league for matchID in sorted(matches.keys()): urlMatch = matches.get(matchID) tp.add_job(GetPageText, dirLeague, matchID, urlMatch, len(matches)) tp.wait_for_complete() if len(matches) > 0: print('\n' + season + ' ' +league + ' updating complete!\n') else: print(season + ' ' + league + ' updating complete!\n')
class Worker(QThread): update_ui_signal = pyqtSignal() def __init__(self, parent=None): QThread.__init__(self, parent) self.toDisplay = Queue() self.threadpool = ThreadPool(max_workers=cpu_count()) def __del__(self): self.threadpool.shutdown() def compress_file(self, images, showapp, verbose, imagelist): """Start the worker thread.""" for image in images: #FIXME:http://code.google.com/p/pythonthreadpool/issues/detail?id=5 time.sleep(0.05) self.threadpool.add_job(image.compress, None, return_callback=self.toDisplay.put) self.showapp = showapp self.verbose = verbose self.imagelist = imagelist self.start() def run(self): """Compress the given file, get data from it and call update_table.""" tp = self.threadpool while self.showapp or not (tp._ThreadPool__active_worker_count == 0 and tp._ThreadPool__jobs.empty()): image = self.toDisplay.get() self.update_ui_signal.emit() if not self.showapp and self.verbose: # we work via the commandline if image.retcode == 0: ir = ImageRow(image) print("File: " + ir['fullpath'] + ", Old Size: " + ir['oldfilesizestr'] + ", New Size: " + ir['newfilesizestr'] + ", Ratio: " + ir['ratiostr']) else: print("[error] {} could not be compressed".format( image.fullpath), file=sys.stderr)
class Worker(QThread): update_ui_signal = pyqtSignal() def __init__(self, parent=None): QThread.__init__(self, parent) self.toDisplay = Queue() self.threadpool = ThreadPool(max_workers=cpu_count()) def __del__(self): self.threadpool.shutdown() def compress_file(self, images, showapp, verbose, imagelist): """Start the worker thread.""" for image in images: #FIXME:http://code.google.com/p/pythonthreadpool/issues/detail?id=5 time.sleep(0.05) self.threadpool.add_job(image.compress, None, return_callback=self.toDisplay.put) self.showapp = showapp self.verbose = verbose self.imagelist = imagelist self.start() def run(self): """Compress the given file, get data from it and call update_table.""" tp = self.threadpool while self.showapp or not (tp._ThreadPool__active_worker_count == 0 and tp._ThreadPool__jobs.empty()): image = self.toDisplay.get() self.update_ui_signal.emit() if not self.showapp and self.verbose: # we work via the commandline if image.retcode == 0: ir = ImageRow(image) print("File: " + ir['fullpath'] + ", Old Size: " + ir['oldfilesizestr'] + ", New Size: " + ir['newfilesizestr'] + ", Ratio: " + ir['ratiostr']) else: print("[error] {} could not be compressed".format(image.fullpath), file=sys.stderr)
def simulate_flow(self, minute): for i in xrange(minute): flows = {} flow_seq = {} with open(flow_record(i), "rb") as f: flows = pickle.load(f) f.close() with open(flow_seq_record(i), "rb") as f: flow_seq = pickle.load(f) f.close() # 使用线程池模拟一分钟内的流量 thread_pool = ThreadPool(thread_num=ThreadParameter.max_num) for i in xrange(60): fs = flow_seq[i] for idx in fs: flow = flows[idx] thread_pool.add_job(func=self.net.udp_flow, time_seq=i, src=flow.src, dst=flow.dst, size=flow.size) thread_pool.start() return
from Util import diskWriter def deskWriteJob(arglist, kwargs): filewt = diskWriter() info = arglist[0] print "in the thread is:" + str(info) filename = (info["timestamp"].split(" ")[0]) + "-" + info["dev_name"] filepath = "data/" + info["dev_name"] + "/" targetpath = os.path.dirname(filepath) if not os.path.isdir(targetpath): os.makedirs(targetpath) filename = filepath + filename filewt.write(filename, str(info) + '\n') print 'test ser start' udp_server = UDPser('127.0.0.1', 9876) METAclient = () tdpool = ThreadPool(10) while True: data, addr = udp_server.readmeg() parsertest = Parser() info = parsertest.parserMeg(data) print "info json = " print info if info["dev_name"] not in METAclient: METAclient.__add__(tuple(info["dev_name"])) tdpool.add_job(deskWriteJob, info, info) print "out the thread is:" + str(info)
import os from Util import diskWriter def deskWriteJob( arglist, kwargs): filewt = diskWriter() info = arglist[0] print "in the thread is:" + str(info) filename = (info["timestamp"].split(" ")[0]) + "-"+ info["dev_name"] filepath = "data/"+ info["dev_name"] + "/" targetpath = os.path.dirname(filepath) if not os.path.isdir(targetpath): os.makedirs(targetpath) filename = filepath + filename filewt.write( filename, str(info)+'\n') print 'test ser start' udp_server = UDPser( '127.0.0.1', 9876) METAclient =() tdpool = ThreadPool(10) while True: data,addr = udp_server.readmeg() parsertest = Parser() info = parsertest.parserMeg(data) print "info json = " print info if info["dev_name"] not in METAclient: METAclient.__add__( tuple(info["dev_name"])) tdpool.add_job( deskWriteJob, info, info) print "out the thread is:" + str(info)