def __init__( self, name, worker_threads, thread_class, connection, request_queue=None, done_queue=None, sentinel=None, socket=None, local=None, master=None, ): BaseService.__init__(self, name) multiprocessing.Process.__init__(self, name=name) self.worker_threads = worker_threads self.thread_class = thread_class self.connection = connection self.request_queue = request_queue self.done_queue = done_queue self.sentinel = sentinel self.socket = socket self.is_alive = True self.master = master self.local = local
def __init__(self, name, address, worker_threads, threadClass, requestHandler): # initialize base service BaseService.__init__(self, name) self.addr = address self.worker_threads = worker_threads self.requestHandler = requestHandler self.threadClass = threadClass # create client request queue self.requestQueue = Queue.Queue(worker_threads*5) # create queue for inactive requestHandler objects i.e. those served self.rhQueue = Queue.Queue(0) # create threads tuple self.threadPool = []
def __init__(self, name, address, worker_processes, worker_threads, thread_class): # initialize base service BaseService.__init__(self, name) self.addr = address self.worker_threads = worker_threads self.thread_class = thread_class self.is_multiprocess = False if multiprocessing: if worker_processes == 'auto': cpus = multiprocessing.cpu_count() if cpus == 1: worker_processes = 0 else: worker_processes = cpus else: worker_processes = int(worker_processes) self.is_multiprocess = worker_processes > 0 self.worker_processes = worker_processes self.pipes = [] # used for sending management tasks # to subrpocesses self.task_dispatchers = [] # used for getting completed requests # from subprocesses only when sockets are # not pickleable self.sentinel = None self._socket = None request_queue = None done_queue = None if self.is_multiprocess: if not hasattr(socket, 'fromfd'): # create queues for communicating request_queue = get_shared_queue(2048) done_queue = get_shared_queue(2048) self.sentinel = (-1, b'EOF') else: request_queue = queue.Queue(worker_threads * 2) Dispatcher.__init__(self, request_queue, done_queue) # create worker tuple self.worker_pool = []
def __init__(self, name, address, worker_processes, worker_threads, thread_class): # initialize base service BaseService.__init__(self, name) self.addr = address self.worker_threads = worker_threads self.thread_class = thread_class self.is_multiprocess = False if multiprocessing: if worker_processes == 'auto': cpus = multiprocessing.cpu_count() if cpus == 1: worker_processes = 0 else: worker_processes = cpus else: worker_processes = int(worker_processes) self.is_multiprocess = worker_processes > 0 self.worker_processes = worker_processes self.pipes = [] # used for sending management tasks # to subrpocesses self.task_dispatchers = [] # used for getting completed requests # from subprocesses only when sockets are # not pickleable self.sentinel = None self._socket = None request_queue = None done_queue = None if self.is_multiprocess: if not hasattr(socket, 'fromfd'): # create queues for communicating request_queue = get_shared_queue(2048) done_queue = get_shared_queue(2048) self.sentinel = (-1, b'EOF') else: request_queue = queue.Queue(worker_threads * 2) Dispatcher.__init__(self, request_queue, done_queue) # create worker tuple self.worker_pool = []
def __init__(self, name, worker_threads, thread_class, connection, request_queue=None, done_queue=None, sentinel=None, socket=None, local=None, master=None): BaseService.__init__(self, name) multiprocessing.Process.__init__(self, name=name) self.worker_threads = worker_threads self.thread_class = thread_class self.connection = connection self.request_queue = request_queue self.done_queue = done_queue self.sentinel = sentinel self.socket = socket self.is_alive = True self.master = master self.local = local
def __init__(self, name, interval): BaseService.__init__(self, name) self.interval = interval