Пример #1
0
    async def ensure_process(self):
        """
        Start the process
        """
        # We don't want multiple requests trying to start the process at the same time
        # FIXME: Make sure this times out properly?
        # Invariant here should be: when lock isn't being held, either 'proc' is in state &
        # running, or not.
        with (await self.state['proc_lock']):
            if 'proc' not in self.state:
                # FIXME: Prevent races here
                # FIXME: Handle graceful exits of spawned processes here
                cmd = self.get_cmd()
                server_env = os.environ.copy()

                # Set up extra environment variables for process
                server_env.update(self.get_env())

                timeout = self.get_timeout()

                proc = SupervisedProcess(self.name, *cmd, env=server_env, ready_func=self._http_ready_func, ready_timeout=timeout, log=self.log)
                self.state['proc'] = proc

                try:
                    await proc.start()

                    is_ready = await proc.ready()

                    if not is_ready:
                        await proc.kill()
                        raise web.HTTPError(500, 'could not start {} in time'.format(self.name))
                except:
                    # Make sure we remove proc from state in any error condition
                    del self.state['proc']
                    raise
Пример #2
0
async def main():
    count = int(sys.argv[1])
    pids = []
    for i in range(count):
        proc = SupervisedProcess('signalprinter-{}'.format(i),
                                 *[sys.executable, signal_printer, '1'])

        await proc.start()
        pids.append(proc.pid)

    print(' '.join([str(pid) for pid in pids]), flush=True)
Пример #3
0
async def test_ready():
    """
    Test web app's readyness
    """
    httpserver_file = os.path.join(
        os.path.dirname(os.path.abspath(__file__)),
        'child_scripts',
        'simplehttpserver.py'
    )

    port = '9005'
    # We tell our server to wait this many seconds before it starts serving
    ready_time = 3.0

    async def _ready_func(p):
        url = 'http://localhost:{}'.format(port)
        async with aiohttp.ClientSession() as session:
            try:
                async with session.get(url) as resp:
                    logging.debug('Got code {} back from {}', resp.status, url)
                    return resp.status == 200
            except aiohttp.ClientConnectionError:
                logging.debug('Connection to {} refused', url)
                return False

    proc = SupervisedProcess(
        'socketserver',
        sys.executable, httpserver_file, str(ready_time),
        ready_func=_ready_func,
        env={'PORT': port}
    )

    try:
        await proc.start()
        start_time = time.time()
        assert (await proc.ready())
        assert time.time() - start_time > ready_time
    finally:
        # Clean up our process after ourselves
        await proc.kill()
Пример #4
0
    def connection_requested(self, dest_host, dest_port, orig_host, orig_port):
        # Only allow localhost connections
        if dest_host != '127.0.0.1':
            raise asyncssh.ChannelOpenError(
                asyncssh.OPEN_ADMINISTRATIVELY_PROHIBITED,
                "Only localhost connections allowed")

        username = self.conn.get_extra_info('username')
        user_pod = UserPod(username, self.namespace)

        cache_key = f'{user_pod.pod_name}:{dest_port}'

        if cache_key in self.forwarding_processes:
            proc = self.forwarding_processes[cache_key]

            port = proc.port
        else:
            port = random_port()
            command = [
                'kubectl', 'port-forward', user_pod.pod_name,
                f'{port}:{dest_port}'
            ]

            async def _socket_ready(proc):
                try:
                    sock = socket.create_connection(('127.0.0.1', port))
                    sock.close()
                    return True
                except:
                    # FIXME: Be more specific in errors we are catching?
                    return False

            proc = SupervisedProcess('kubectl',
                                     *command,
                                     always_restart=True,
                                     ready_func=_socket_ready)
            self.forwarding_processes[cache_key] = proc
            proc.port = port

        async def transfer_data(reader, writer):
            # Make sure our pod is running
            async for status in user_pod.ensure_running():
                if status == PodState.RUNNING:
                    break
            # Make sure our kubectl port-forward is running
            await proc.start()
            await proc.ready()

            # Connect to the local end of the kubectl port-forward
            (upstream_reader, upstream_writer) = await asyncio.open_connection(
                '127.0.0.1', port)

            # FIXME: This should be as fully bidirectional as possible, with minimal buffering / timeouts
            while not reader.at_eof():
                try:
                    data = await asyncio.wait_for(reader.read(8092),
                                                  timeout=0.1)
                except asyncio.TimeoutError:
                    data = None
                if data:
                    upstream_writer.write(data)
                    await upstream_writer.drain()

                try:
                    in_data = await asyncio.wait_for(
                        upstream_reader.read(8092), timeout=0.1)
                except asyncio.TimeoutError:
                    in_data = None
                if in_data:
                    writer.write(in_data)
                    await writer.drain()
                if upstream_reader.at_eof():
                    break
            writer.close()

        return transfer_data
Пример #5
0
    async def ensure_process(self):
        """
        Start the process
        """
        # We don't want multiple requests trying to start the process at the same time
        # FIXME: Make sure this times out properly?
        # Invariant here should be: when lock isn't being held, either 'proc' is in state &
        # running, or not.
        async with self.state['proc_lock']:
            if 'proc' not in self.state:
                # FIXME: Prevent races here
                # FIXME: Handle graceful exits of spawned processes here
                cmd = self.get_cmd()
                server_env = os.environ.copy()

                # Set up extra environment variables for process
                server_env.update(self.get_env())

                timeout = self.get_timeout()

                self.log.info(cmd)

                proc = SupervisedProcess(self.name,
                                         *cmd,
                                         env=server_env,
                                         ready_func=self._http_ready_func,
                                         ready_timeout=timeout,
                                         log=self.log,
                                         stderr=subprocess.PIPE,
                                         stdout=subprocess.PIPE)
                self.state['proc'] = proc

                try:
                    await proc.start()

                    is_ready = await proc.ready()

                    if not is_ready:

                        self.stderr_str = None
                        self.stdout_str = None

                        stderr, stdout = await proc.proc.communicate()

                        if stderr:
                            self.stderr_str = str(stderr.decode("utf-8"))
                            self.log.info(
                                'Process {} failed with stderr: {}'.format(
                                    self.name, self.stderr_str))

                        if stdout:
                            self.stdout_str = str(stdout.decode("utf-8"))
                            self.log.info(
                                'Process {} failed with stdout: {}'.format(
                                    self.name, self.stdout_str))

                        await proc.kill()

                        del self.state['proc']
                        return False

                    else:
                        # Make sure we empty the buffers periodically

                        async def pipe_output(proc, pipename, log):
                            while True:
                                if proc.proc:
                                    stream = getattr(proc.proc, pipename, None)
                                    if stream:
                                        try:
                                            line = await stream.readline()
                                            if line:
                                                if pipename == 'stdout':
                                                    log.info(line)
                                                else:
                                                    log.error(line)
                                            else:
                                                break
                                        except ValueError:
                                            log.info(
                                                'Truncated log line from subprocess'
                                            )

                        ensure_future(pipe_output(proc, 'stderr', self.log))
                        ensure_future(pipe_output(proc, 'stdout', self.log))

                except:
                    # Make sure we remove proc from state in any error condition
                    del self.state['proc']
                    raise
            return True
    async def ensure_process(self):
        """
        Start the process
        """
        # We don't want multiple requests trying to start the process at the same time
        # FIXME: Make sure this times out properly?
        # Invariant here should be: when lock isn't being held, either 'proc' is in state &
        # running, or not.
        async with self.state['proc_lock']:
            if 'proc' not in self.state:
                # FIXME: Prevent races here
                # FIXME: Handle graceful exits of spawned processes here
                cmd = self.get_cmd()
                server_env = os.environ.copy()

                # Set up extra environment variables for process
                server_env.update(self.get_env())

                timeout = self.get_timeout()

                self.log.info(cmd)

                proc = SupervisedProcess(self.name,
                                         *cmd,
                                         env=server_env,
                                         ready_func=self._http_ready_func,
                                         ready_timeout=timeout,
                                         log=self.log,
                                         stderr=subprocess.PIPE,
                                         stdout=subprocess.PIPE)
                self.state['proc'] = proc

                try:
                    await proc.start()

                    is_ready = await proc.ready()

                    if not is_ready:

                        #if proc._restart_process_future:
                        #    proc._restart_process_future.cancel()

                        self.stderr_str = None
                        self.stdout_str = None

                        stderr, stdout = await proc.proc.communicate()

                        if stderr:
                            self.stderr_str = str(stderr.decode("utf-8"))
                            self.log.info(
                                'Process {} failed with stderr: {}'.format(
                                    self.name, self.stderr_str))

                        if stdout:
                            self.stdout_str = str(stdout.decode("utf-8"))
                            self.log.info(
                                'Process {} failed with stdout: {}'.format(
                                    self.name, self.stdout_str))

                        await proc.kill()

                        del self.state['proc']
                        return False
                except:
                    # Make sure we remove proc from state in any error condition
                    del self.state['proc']
                    raise
            return True