def _delay(self): d = Deferred() ioloop.IOLoop.current().add_timeout(timedelta(seconds=self._cur_delay), lambda: d.trigger(None)) self.logger.debug('Delaying for {:.2f} s'.format(self._cur_delay)) yield d self.logger.debug('Resuming from delay...') self._cur_delay = min(self._cur_delay * self.delay_exp, self.max_delay)
def __init__(self, sock, ioLoop=None): self.address = None self.sock = sock self.sock.setblocking(False) if self.sock.type == socket.SOL_TCP: self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) fcntl.fcntl(self.sock.fileno(), fcntl.F_SETFD, fcntl.FD_CLOEXEC) self._ioLoop = ioLoop or Loop.instance() self._state = self.NOT_CONNECTED self._onConnectedDeferred = Deferred() self._connectionTimeoutTuple = None
def wrapper(*args, **kwargs): future = Deferred() timeout = kwargs.get('timeout', None) if timeout is not None: timeoutId = self._ioLoop.add_timeout( time() + timeout, lambda: future.error(TimeoutError(timeout))) def timeoutRemover(func): def wrapper(*args, **kwargs): self._ioLoop.remove_timeout(timeoutId) return func(*args, **kwargs) return wrapper future.close = timeoutRemover(future.close) self._session += 1 self._writableStream.write([methodId, self._session, args]) self._subscribers[self._session] = future return Chain([lambda: future], ioLoop=self._ioLoop)
def async_subprocess(command, callbacks=None, cwd=None, io_loop=None): """Run subprocess asynchronously and get bound `Deferred` object. This function runs separate subprocess `command` and attaches to the standard output stream (`stdout`) and error stream (`stderr`), providing ability to read them asynchronously. This can be useful when running multiple subprocesses simultaneously, e.g. in web server. You can attach up to two callbacks as list for `callbacks` parameter, first of them will be callback for `stdout`, second - for `stderr`. For example:: engine.subprocess(['echo 123'], callbacks=[sys.stdout.write, None]) means `sys.stdout` function as callback for `stdout` and there will be no callback for `stderr`. Returned `Deferred` object will trigger immediately after subprocess is finished, transferring error code as parameter. If process exits with error code differed from 0 an `IOError` exception will be thrown. An subprocess pipe exception will be raised if subprocess can not be started. .. note:: You can `yield` this function in `engine.asynchronous` context. :param command: command for subprocess to start, same as `subprocess.Popen` first argument. :param callbacks: list of two callbacks for `stdout` and `stderr` respectively. If you don't want to attach any callback, you can pass `None` as function. :param cwd: current working directory for subprocess. :param io_loop: tornado event loop, current by default. """ io_loop = io_loop or IOLoop.current() PIPE = subprocess.PIPE process = subprocess.Popen(command, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True, cwd=cwd) fhs = [process.stdout, process.stderr] deferred = Deferred() def create_handler(fh, callback): def handle(fd, events): assert events == io_loop.READ data = fh.readline() if callback is not None and data: callback(data) if process.poll() is not None: io_loop.remove_handler(fd) if process.returncode == 0: deferred.trigger(process.returncode) else: deferred.error(IOError(process.returncode)) return handle for fh, callback in zip(fhs, callbacks): io_loop.add_handler(fh.fileno(), create_handler(fh, callback), io_loop.READ) return deferred