def __init__(self, app, listeners, controller): self.clients = Clients() self.serializers = StreamSerializer() self.app = app self.listeners = listeners self.controller = controller self._bootstrapped = set() super(ProcessManager, self).__init__()
def __init__(self, app, listeners, controller): self.clients = Clients() self.serializers = StreamSerializer() self.app = app self.listeners = listeners self.controller = controller # store process id and start time self._bootstrapped = {} super(ProcessManager, self).__init__()
class ProcessManager(LogsMixin, LoopMixin): """Start and manage workers.""" process_name = 'worker' name_template = '[thriftworker-{0}]' \ ' -c {1.CONCURRENCY}' \ ' -k {1.WORKER_TYPE}' gevent_monkey = 'from gevent.monkey import patch_all; patch_all();' script = 'from thriftpool.bin.thriftworker import main; main();' def __init__(self, app, listeners, controller): self.clients = Clients() self.serializers = StreamSerializer() self.app = app self.listeners = listeners self.controller = controller self._bootstrapped = set() super(ProcessManager, self).__init__() @property def manager(self): return self.app.gaffer_manager @cached_property def _is_started(self): return self.app.env.RealEvent() @cached_property def _is_stopped(self): return self.app.env.RealEvent() @cached_property def _stdout(self): return RedirectStream(self.loop, sys.stdout) @cached_property def _stderr(self): return RedirectStream(self.loop, sys.stderr) def _bootstrap_process(self, proxy, process): # Change name of process. name = self.name_template.format(process.id, self.app.config) proxy.change_title(name) # Register acceptors in remote process. proxy.register_acceptors({i: listener.name for i, listener in iteritems(self.listeners.enumerated)}) for listener in self.listeners: if listener.started: proxy.start_acceptor(listener.name) # Notify about process initialization. self._bootstrapped.add(process.id) self._info('Worker %d initialized.', process.id) state = self.manager.get_process(self.process_name) if len(self._bootstrapped) >= state.numprocesses: self._info('Workers initialization done.') self._is_started.set() def _do_handshake(self, process): # Pass application to created process. stream = process.streams['handshake'] stream.write(self.serializers.encode_with_length(self.app)) def handshake_done(evtype, info): stream.unsubscribe(handshake_done) # Process exited and we do the same. if not process.active: return self.clients.register(process, self._bootstrap_process) # Wait for worker answer. stream.subscribe(handshake_done) def _redirect_io(self, process): """Redirect stdout & stderr.""" monitor_io = (lambda evtype, msg: (evtype == 'err' and self._stderr or self._stdout) .write(msg['data'])) process.monitor_io('.', monitor_io) def _on_event(self, evtype, msg): """Handle process events.""" if msg['name'] != self.process_name: # Not our process. return if evtype == 'exit': # Log exit event. log = msg['term_signal'] and self._critical or self._info log('Worker %d exited with term signal %d and exit status %d.', msg['pid'], msg['term_signal'], msg['exit_status']) elif evtype == 'spawn': # Log spawn event. self._info('Worker %d spawned with pid %d.', msg['pid'], msg['os_pid']) if evtype == 'spawn' and self.controller.is_running: # New process spawned, handle event. process = self.manager.get_process(msg['pid']) self._redirect_io(process) self._do_handshake(process) elif evtype == 'exit': # Process exited, handle event. self._bootstrapped.remove(msg['pid']) self.clients.unregister(msg['pid']) def _create_proc_kwargs(self): """Create arguments for worker.""" config = self.app.config worker_type = config.WORKER_TYPE if worker_type == 'gevent': startup_line = '{0} {1}'.format(self.gevent_monkey, self.script) elif worker_type == 'sync': startup_line = self.script else: raise NotImplementedError() return dict(cmd=sys.executable, args=['-c', '{0}'.format(startup_line)], redirect_output=['out', 'err'], custom_streams=['handshake', 'incoming', 'outgoing'], custom_channels=self.listeners.channels, env=dict(os.environ, IS_WORKER='1'), numprocesses=config.WORKERS, redirect_input=True, graceful_timeout=STOP_TIMEOUT / 2) def _create_apps(self): """Create applications for gaffer.""" apps = [] options = dict(clients=self.clients) handlers = [ (r'/timers', ClientsHandler, options), (r'/timers/([0-9^/]+)', TimerHandler, options), (r'/counters', ClientsHandler, options), (r'/counters/([0-9^/]+)', CounterHandler, options), (r'/stack', ClientsHandler, options), (r'/stack/([0-9^/]+)', StackHandler, options), ] endpoints = self.app.config.TORNADO_ENDPOINTS if endpoints: apps.append(HttpHandler(handlers=handlers, log_function=self.app.log.log_tornado_request, endpoints=[HttpEndpoint(uri=uri) for uri in endpoints])) return apps @in_loop def _setup(self): manager = self.manager manager.subscribe('.', self._on_event) manager.add_process(self.process_name, **self._create_proc_kwargs()) manager.start(apps=self._create_apps()) def start(self): self._setup() self._is_started.wait(START_TIMEOUT) if not self._is_started.is_set(): self._error('Timeout when starting processes.') self._teardown() raise SystemTerminate() @in_loop def _teardown(self): self.clients.clear() stop_callback = lambda *args: self._is_stopped.set() self.manager.stop(stop_callback) def stop(self): self._teardown() self._is_stopped.wait(STOP_TIMEOUT) if not self._is_stopped.is_set(): self._error('Timeout when terminating processes.') raise SystemTerminate()
class ProcessManager(LogsMixin, LoopMixin): """Start and manage workers.""" process_name = "worker" name_template = "[thriftworker-{0}]" " -c {1.CONCURRENCY}" " -k {1.WORKER_TYPE}" gevent_monkey = "from gevent.monkey import patch_all; patch_all();" script = "from thriftpool.bin.thriftworker import main; main();" def __init__(self, app, listeners, controller): self.clients = Clients() self.serializers = StreamSerializer() self.app = app self.listeners = listeners self.controller = controller # store process id and start time self._bootstrapped = {} super(ProcessManager, self).__init__() def __iter__(self): return iter(self._bootstrapped) def get_start_time(self, process_id): """When process was registered?""" return self._bootstrapped.get(process_id) @property def manager(self): return self.app.gaffer_manager @cached_property def _is_started(self): return self.app.env.RealEvent() @cached_property def _is_stopped(self): return self.app.env.RealEvent() @cached_property def _stdout(self): return RedirectStream(self.loop, sys.stdout) @cached_property def _stderr(self): return RedirectStream(self.loop, sys.stderr) @property def initialized(self): """All workers started or not?""" state = self.manager.get_process(self.process_name) return len(self._bootstrapped) >= state.numprocesses def _bootstrap_process(self, proxy, process): # Change name of process. name = self.name_template.format(process.id, self.app.config) proxy.change_title(name) # Register acceptors in remote process. proxy.register_acceptors({i: listener.name for i, listener in iteritems(self.listeners.enumerated)}) for listener in self.listeners: if listener.started: proxy.start_acceptor(listener.name) # Notify about process initialization. self._bootstrapped[process.id] = self.loop.now() self._info("Worker %d initialized.", process.id) if self.initialized: self._info("Workers initialization done.") self._is_started.set() def _do_handshake(self, process): # Pass application to created process. stream = process.streams["handshake"] stream.write(self.serializers.encode_with_length(self.app)) def handshake_done(evtype, info): stream.unsubscribe(handshake_done) # Process exited and we do the same. if not process.active: return self.clients.register(process, self._bootstrap_process) # Wait for worker answer. stream.subscribe(handshake_done) def _redirect_io(self, process): """Redirect stdout & stderr.""" monitor_io = lambda evtype, msg: (evtype == "err" and self._stderr or self._stdout).write(msg["data"]) process.monitor_io(".", monitor_io) def _on_event(self, evtype, msg): """Handle process events.""" if msg["name"] != self.process_name: # Not our process. return if evtype == "exit": # Log exit event. log = msg["term_signal"] and self._critical or self._info log( "Worker %d exited with term signal %d and exit status %d.", msg["pid"], msg["term_signal"], msg["exit_status"], ) elif evtype == "spawn": # Log spawn event. self._info("Worker %d spawned with pid %d.", msg["pid"], msg["os_pid"]) if evtype == "spawn" and self.controller.is_running: # New process spawned, handle event. process = self.manager.get_process(msg["pid"]) self._redirect_io(process) self._do_handshake(process) elif evtype == "exit": # Process exited, handle event. self._bootstrapped.pop(msg["pid"]) self.clients.unregister(msg["pid"]) def _create_proc_kwargs(self): """Create arguments for worker.""" config = self.app.config worker_type = config.WORKER_TYPE if worker_type == "gevent": startup_line = "{0} {1}".format(self.gevent_monkey, self.script) elif worker_type == "sync": startup_line = self.script else: raise NotImplementedError() return dict( cmd=sys.executable, args=["-c", "{0}".format(startup_line)], redirect_output=["out", "err"], custom_streams=["handshake", "incoming", "outgoing"], custom_channels=self.listeners.channels, env=dict(os.environ, IS_WORKER="1"), numprocesses=config.WORKERS, redirect_input=True, graceful_timeout=STOP_TIMEOUT / 2, ) def _create_apps(self): """Create applications for gaffer.""" apps = [] options = dict(clients=self.clients) handlers = [ (r"/timers", ClientsHandler, options), (r"/timers/([0-9^/]+)", TimerHandler, options), (r"/counters", ClientsHandler, options), (r"/counters/([0-9^/]+)", CounterHandler, options), (r"/stack", ClientsHandler, options), (r"/stack/([0-9^/]+)", StackHandler, options), ] endpoints = self.app.config.TORNADO_ENDPOINTS if endpoints: apps.append( HttpHandler( handlers=handlers, log_function=self.app.log.log_tornado_request, endpoints=[HttpEndpoint(uri=uri) for uri in endpoints], ) ) return apps @in_loop def _setup(self): manager = self.manager manager.subscribe(".", self._on_event) manager.add_process(self.process_name, **self._create_proc_kwargs()) manager.start(apps=self._create_apps()) def start(self): self._setup() self._is_started.wait(START_TIMEOUT) if not self._is_started.is_set(): self._error("Timeout when starting processes.") self._teardown() raise SystemTerminate() @in_loop def _teardown(self): self.clients.clear() stop_callback = lambda *args: self._is_stopped.set() self.manager.stop(stop_callback) def stop(self): self._teardown() self._is_stopped.wait(STOP_TIMEOUT) if not self._is_stopped.is_set(): self._error("Timeout when terminating processes.") raise SystemTerminate()