def execute(self, frees, threads): """ Begins multithreaded execution Parameters ---------- frees: list(Freeway) list of freeways to begin multithreaded execution threads: number of threads """ from java.util.concurrent import Executors, ExecutorCompletionService pool = Executors.newFixedThreadPool(threads) ecs = ExecutorCompletionService(pool) for f in frees: ecs.submit(f) submitted = len(frees) while submitted > 0: result = ecs.take().get() print str(result) submitted -= 1
def __del__(self): del self.result from java.util.concurrent import Executors, ExecutorCompletionService import java.lang.Runtime as JavaRunTime rt = JavaRunTime.getRuntime() # over 2 because most intel machines report double the number of real cores, # returned by this function call, -1 because we want to leave something for WAT/ResSim/the OS. MAX_THREADS = max(1, rt.availableProcessors()/2 - 1) ... pool = Executors.newFixedThreadPool(numThreads) ecs = ExecutorCompletionService(pool) jobs = list() def runKnnAndStorageAreas(tableLabel, modelFPart, ...): # function to do compute # call kNN compute functions and post-processors for interpolations ... return listOfTimeSeriesContainers for task in listOfTasksCsvFile: ... # get the parameters from task ... jobs.append(ThreadWrapper(runKnnAndStorageAreas, tableLabel, modelFPart, ..., jobName=tableLabel))
json_string = data_file.read() except EnvironmentError, err: print str(err) usage() sys.exit(3) try: config = json.loads(json_string.decode('utf-8')) except: print "JSON from file '" + configurationFile + "' is malformed." e = sys.exc_info()[0] print str(e) sys.exit(4) pool = Executors.newFixedThreadPool(len(config["input"])) ecs = ExecutorCompletionService(pool) def scheduler(roots): for inputConfig in roots: yield inputConfig def getClassByName(module, className): if not module: if className.startswith("services."): className = className.split("services.")[1] l = className.split(".") m = __services__[l[0]] return getClassByName(m, ".".join(l[1:])) elif "." in className:
def client(ip, port, message): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((ip, port)) sock.send(message) response = sock.recv(1024) # print threading.currentThread().getName(), response sock.close() if __name__ == "__main__": # ephemeral ports should work on every Java system now HOST, PORT = "localhost", 0 server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler) ip, port = server.server_address # Start a daemon thread with the server -- that thread will then start one # more thread for each request server_thread = threading.Thread(target=server.serve_forever) server_thread.setDaemon(True) server_thread.start() # create a client pool to run all client requests pool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() + 1) ecs = ExecutorCompletionService(pool) for i in range(4000): # empirically, this will exhaust heap when run with 16m heap ecs.submit(lambda: client(ip, port, "Hello World %i" % i)) ecs.take() # wait until we have a thread available in the pool pool.shutdown()
DEFAULT_SIZE = (480, 640) CoordinateSystems = enum('AREA', 'LATLON', 'IMAGE') AREA = CoordinateSystems.AREA LATLON = CoordinateSystems.LATLON IMAGE = CoordinateSystems.IMAGE Places = enum(ULEFT='Upper Left', CENTER='Center') ULEFT = Places.ULEFT CENTER = Places.CENTER MAX_CONCURRENT = 5 pool = Executors.newFixedThreadPool(MAX_CONCURRENT) ecs = ExecutorCompletionService(pool) def _satBandUrl(**kwargs): # needs at least server, port, debug, user, and proj # follow AddeImageChooser.appendMiscKeyValues in determining which extra keys to add satbandUrlFormat = "adde://%(server)s/text?&FILE=SATBAND&COMPRESS=gzip&PORT=%(port)s&DEBUG=%(debug)s&VERSION=1&USER=%(user)s&PROJ=%(proj)s" return satbandUrlFormat % kwargs # NOTE: remember that Callable means that the "task" returns some kind of # result from CallableObj.get()! # RunnableObj.get() just returns null. class _SatBandReq(Callable): def __init__(self, url): self.url = url
import hashlib MAX_CONCURRENT = 3 SITES = [ "http://www.cnn.com/", "http://www.nytimes.com/", "http://www.washingtonpost.com/", "http://www.dailycamera.com/", "http://www.timescall.com/", # generate a random web site name that is very, very unlikely to exist "http://" + hashlib.md5("unlikely-web-site-" + os.urandom(4)).hexdigest() + ".com", ] pool = Executors.newFixedThreadPool(MAX_CONCURRENT) ecs = ExecutorCompletionService(pool) # this function could spider the links from these roots; # for now just schedule these roots directly def scheduler(roots): for site in roots: yield site # submit tasks indefinitely for site in scheduler(SITES): ecs.submit(Downloader(site)) # work with results as soon as they become available submitted = len(SITES)
json_string = data_file.read() except EnvironmentError, err: print str(err) usage() sys.exit(3) try: config = json.loads(json_string.decode('utf-8')) except: print "JSON from file '" + configurationFile + "' is malformed." e = sys.exc_info()[0] print str(e) sys.exit(4) pool = Executors.newFixedThreadPool(len(config["input"])) ecs = ExecutorCompletionService(pool) def scheduler(roots): for inputConfig in roots: yield inputConfig def getClassByName(module, className): if not module: if className.startswith("services."): className = className.split("services.")[1] l = className.split(".") m = __services__[l[0]] return getClassByName(m, ".".join(l[1:])) elif "." in className: l = className.split(".") m = getattr(module, l[0])
import hashlib MAX_CONCURRENT = 3 SITES = [ "http://www.cnn.com/", "http://www.nytimes.com/", "http://www.washingtonpost.com/", "http://www.dailycamera.com/", "http://www.timescall.com/", # generate a random web site name that is very, very unlikely to exist "http://" + hashlib.md5( "unlikely-web-site-" + os.urandom(4)).hexdigest() + ".com", ] pool = Executors.newFixedThreadPool(MAX_CONCURRENT) ecs = ExecutorCompletionService(pool) # this function could spider the links from these roots; # for now just schedule these roots directly def scheduler(roots): for site in roots: yield site # submit tasks indefinitely for site in scheduler(SITES): ecs.submit(Downloader(site)) # work with results as soon as they become available submitted = len(SITES) while submitted > 0: result = ecs.take().get()
def __init__(_self, reporter=None, max_concurrency=1000): _self.max_concurrency = max_concurrency _self.reporter = reporter _self.workers = [] _self.pool = Executors.newFixedThreadPool(_self.max_concurrency) _self.ecs = ExecutorCompletionService(_self.pool)