Ejemplo n.º 1
0
    async def websocket_stream_handler(self, channel, data):
        queue = MaxSizeErrorDeque(max_size=MAX_QUEUE_SIZE)
        client_key = object()
        hash_key = None

        try:
            hash_key = channel, data['hash_name']
            self.stream_clients[hash_key][client_key] = queue

            # write any response
            await websocket.send(self.encode({'data': 'hello'}))

            packet = 0
            async for item_data, item_channel, item_hash in queue:
                msg_data = {
                    'packet': packet,
                    'data': {
                        'data': item_data,
                        'stream': item_channel,
                        'hash_name': item_hash
                    }
                }
                packet += 1

                await websocket.send(self.encode(msg_data))
        except Exception as e:
            # todo: ignore error when socket is closed remotely
            ret_data = {'exception': serialize_exception(e)}
            await websocket.send(self.encode(ret_data))
        finally:
            if hash_key is not None:
                del self.stream_clients[hash_key][client_key]
                if not self.stream_clients[hash_key]:
                    del self.stream_clients[hash_key]
Ejemplo n.º 2
0
async def socket_stream_handler(executor: ProcessSocketServer,
                                stream: SocketStream, channel, data):
    queue = MaxSizeErrorDeque(max_size=MAX_QUEUE_SIZE)
    client_key = object()
    hash_key = None

    try:
        hash_key = channel, data['hash_name']
        executor.stream_clients[hash_key][client_key] = queue

        # write any response
        await executor.write_socket(executor.encode({'data': 'hello'}), stream)

        packet = 0
        async for item_data, item_channel, item_hash in queue:
            msg_data = {
                'packet': packet,
                'data': {
                    'data': item_data,
                    'stream': item_channel,
                    'hash_name': item_hash
                }
            }
            packet += 1

            await executor.write_socket(executor.encode(msg_data), stream)
    except Exception as e:
        ret_data = {'exception': serialize_exception(e)}
        await executor.write_socket(executor.encode(ret_data), stream)
    finally:
        if hash_key is not None:
            del executor.stream_clients[hash_key][client_key]
            if not executor.stream_clients[hash_key]:
                del executor.stream_clients[hash_key]
Ejemplo n.º 3
0
    async def websocket_data_stream_handler(self, data):
        queue = MaxSizeErrorDeque(max_size=MAX_QUEUE_SIZE)

        def add_to_queue(item):
            encoded_data = self.encode(item)
            queue.add_item(encoded_data, len(encoded_data))

        try:
            binding, initial = await self.start_logging_object_data(
                data, add_to_queue)

            try:
                # write any response
                await websocket.send(self.encode({'data': 'hello'}))

                msg_data = {
                    'packet': 0,
                    'data': {
                        'data': self.encode(initial)
                    }
                }
                await websocket.send(self.encode(msg_data))

                packet = 1
                async for data_item in queue:
                    msg_data = {'packet': packet, 'data': {'data': data_item}}
                    packet += 1

                    await websocket.send(self.encode(msg_data))
            finally:
                await self.stop_logging_object_data(binding)
        except Exception as e:
            ret_data = {'exception': serialize_exception(e)}
            await websocket.send(self.encode(ret_data))
Ejemplo n.º 4
0
        async def send_events():
            if msg is not None:
                data = self.encode(msg)
                id_data = json.dumps((None, None, None))

                message = f"data: {data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
                return

            queue = MaxSizeErrorDeque(max_size=MAX_QUEUE_SIZE)
            client_key = object()
            hash_key = None

            try:
                hash_key = channel, req_data['hash_name']
                self.stream_clients[hash_key][client_key] = queue

                data = json.dumps('alive')
                message = f"data: {data}\n\n"
                yield message.encode('utf-8')

                packet = 0
                async for item_data, item_channel, item_hash in queue:
                    msg_data = {
                        'data': {
                            'data': item_data,
                            'stream': item_channel,
                            'hash_name': item_hash
                        }
                    }

                    data = self.encode(msg_data)
                    id_data = json.dumps((packet, item_channel, item_hash))
                    message = f"data: {data}\nid: {id_data}\n\n"
                    yield message.encode('utf-8')

                    packet += 1
            except Exception as e:
                msg_data = {'exception': serialize_exception(e)}
                data = self.encode(msg_data)
                id_data = json.dumps((None, None, None))

                message = f"data: {data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
            finally:
                if hash_key is not None:
                    del self.stream_clients[hash_key][client_key]
                    if not self.stream_clients[hash_key]:
                        del self.stream_clients[hash_key]
Ejemplo n.º 5
0
        async def send_events():
            if msg is not None:
                data = self.encode(msg)
                id_data = json.dumps((None, ))

                message = f"data: {data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
                return

            queue = MaxSizeErrorDeque(max_size=MAX_QUEUE_SIZE)

            def add_to_queue(item):
                encoded_data = self.encode(item)
                queue.add_item(encoded_data, len(encoded_data))

            try:
                binding, initial = await self.start_logging_object_data(
                    req_data, add_to_queue)

                try:
                    data = json.dumps('alive')
                    message = f"data: {data}\n\n"
                    yield message.encode('utf-8')

                    data = self.encode(
                        {'data': {
                            'data': self.encode(initial)
                        }})
                    id_data = json.dumps((0, ))
                    message = f"data: {data}\nid: {id_data}\n\n"
                    yield message.encode('utf-8')

                    packet = 1
                    async for data_item in queue:
                        data = self.encode({'data': {'data': data_item}})
                        id_data = json.dumps((packet, ))
                        message = f"data: {data}\nid: {id_data}\n\n"
                        yield message.encode('utf-8')

                        packet += 1
                finally:
                    await self.stop_logging_object_data(binding)
            except Exception as e:
                msg_data = {'exception': serialize_exception(e)}
                data = self.encode(msg_data)
                id_data = json.dumps((None, ))

                message = f"data: {data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
Ejemplo n.º 6
0
    async def inner(self: 'QuartRestServer'):
        try:
            data = (await request.get_data()).decode('utf8')
            decoded = self.decode(data)

            result = await func(self, decoded)

            encoded = self.encode({'data': result})
        except Exception as e:
            # todo: ignore write_socket in generator
            ret_data = {'exception': serialize_exception(e)}
            encoded = self.encode(ret_data)

        return await make_response(encoded,
                                   {'Content-Type': 'application/json'})
Ejemplo n.º 7
0
        async def send_events():
            resp_data = json.dumps('alive')
            message = f"data: {resp_data}\n\n"
            yield message.encode('utf-8')

            try:
                id_data = json.dumps(False)
                async with aclosing(self.execute_generator(decoded)) as aiter:
                    async for item in aiter:
                        resp_data = self.encode({'data': item})
                        message = f"data: {resp_data}\nid: {id_data}\n\n"
                        yield message.encode('utf-8')

                resp_data = self.encode({})
                id_data = json.dumps(True)
                message = f"data: {resp_data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
            except Exception as e:
                ret_data = {'exception': serialize_exception(e)}
                resp_data = self.encode(ret_data)
                id_data = json.dumps(False)

                message = f"data: {resp_data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
Ejemplo n.º 8
0
async def socket_data_stream_handler(executor: ProcessSocketServer,
                                     stream: SocketStream, data):
    queue = MaxSizeErrorDeque(max_size=MAX_QUEUE_SIZE)

    def add_to_queue(item):
        encoded_data = executor.encode(item)
        queue.add_item(encoded_data, len(encoded_data))

    try:
        binding, initial = await executor.start_logging_object_data(
            data, add_to_queue)

        try:
            # write any response
            await executor.write_socket(executor.encode({'data': 'hello'}),
                                        stream)

            msg_data = {
                'packet': 0,
                'data': {
                    'data': executor.encode(initial)
                }
            }
            await executor.write_socket(executor.encode(msg_data), stream)

            packet = 1
            async for data_item in queue:
                msg_data = {'packet': packet, 'data': {'data': data_item}}
                packet += 1

                await executor.write_socket(executor.encode(msg_data), stream)
        finally:
            await executor.stop_logging_object_data(binding)
    except Exception as e:
        ret_data = {'exception': serialize_exception(e)}
        await executor.write_socket(executor.encode(ret_data), stream)
Ejemplo n.º 9
0
    async def websocket_handler(self):
        await websocket.send(self.encode({'data': 'hello'}))

        while True:
            try:
                msg = self.decode_json_buffers(await websocket.receive())
                cmd = msg['cmd']
                packet = msg['packet']
                data = msg['data']

                ret_data = {
                    'cmd': cmd,
                    'packet': packet,
                }

                if cmd == 'remote_import':
                    res = await self.remote_import(data)
                elif cmd == 'register_remote_class':
                    res = await self.register_remote_class(data)
                elif cmd == 'ensure_remote_instance':
                    res = await self.ensure_instance(data)
                elif cmd == 'delete_remote_instance':
                    res = await self.delete_instance(data)
                elif cmd == 'execute':
                    res = await self.execute(data)
                elif cmd == 'execute_generator':
                    ret_data['done_execute'] = False
                    async with aclosing(self.execute_generator(data)) as aiter:
                        async for res in aiter:
                            ret_data['data'] = res
                            try:
                                await websocket.send(self.encode(ret_data))
                            except BrokenResourceError:
                                # client closed
                                return

                    res = None
                    ret_data['done_execute'] = True
                elif cmd == 'get_remote_objects':
                    res = await self.get_objects(data)
                elif cmd == 'get_remote_object_config':
                    res = await self.get_object_config(data)
                elif cmd == 'get_remote_object_property_data':
                    res = await self.get_object_data(data)
                elif cmd == 'get_echo_clock':
                    res = await self.get_echo_clock(data)
                elif cmd == 'sleep':
                    res = await self.sleep(data)
                else:
                    raise Exception(f'Unknown command "{cmd}"')

                ret_data['data'] = res
                encoded_ret = self.encode(ret_data)
            except Exception as e:
                # todo: ignore write_socket in generator
                ret_data = {'exception': serialize_exception(e)}
                encoded_ret = self.encode(ret_data)
            # todo: should we be catching base exception and notify and
            #  re-raise? Otherwise client may be waiting forever in a
            #  shielded state

            # todo: exception response when remote closed socket or for REST

            await websocket.send(encoded_ret)
Ejemplo n.º 10
0
    async def sse_channel(self, channel):
        # todo: send alive with timeout in case skipped packets
        # we must read here in the view function, otherwise the request is gone
        msg = None
        try:
            req_data = (await request.get_data()).decode('utf8')
            req_data = self.decode(req_data)
        except Exception as e:
            msg = {'exception': serialize_exception(e)}

        async def send_events():
            if msg is not None:
                data = self.encode(msg)
                id_data = json.dumps((None, None, None))

                message = f"data: {data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
                return

            queue = MaxSizeErrorDeque(max_size=MAX_QUEUE_SIZE)
            client_key = object()
            hash_key = None

            try:
                hash_key = channel, req_data['hash_name']
                self.stream_clients[hash_key][client_key] = queue

                data = json.dumps('alive')
                message = f"data: {data}\n\n"
                yield message.encode('utf-8')

                packet = 0
                async for item_data, item_channel, item_hash in queue:
                    msg_data = {
                        'data': {
                            'data': item_data,
                            'stream': item_channel,
                            'hash_name': item_hash
                        }
                    }

                    data = self.encode(msg_data)
                    id_data = json.dumps((packet, item_channel, item_hash))
                    message = f"data: {data}\nid: {id_data}\n\n"
                    yield message.encode('utf-8')

                    packet += 1
            except Exception as e:
                msg_data = {'exception': serialize_exception(e)}
                data = self.encode(msg_data)
                id_data = json.dumps((None, None, None))

                message = f"data: {data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
            finally:
                if hash_key is not None:
                    del self.stream_clients[hash_key][client_key]
                    if not self.stream_clients[hash_key]:
                        del self.stream_clients[hash_key]

        response = await make_response(
            TrioIterableBodyExit(send_events()),
            {
                'Content-Type': 'text/event-stream',
                'Cache-Control': 'no-cache',
                'Transfer-Encoding': 'chunked',
            },
        )
        response.timeout = None
        return response
Ejemplo n.º 11
0
    async def sse_data(self):
        # todo: send alive with timeout in case skipped packets
        # todo: make sure in all the apps decoding will raise user
        #  error, not system error like here
        # we must read here in the view function, otherwise the request is gone
        msg = None
        try:
            req_data = (await request.get_data()).decode('utf8')
            req_data = self.decode(req_data)
        except Exception as e:
            msg = {'exception': serialize_exception(e)}

        async def send_events():
            if msg is not None:
                data = self.encode(msg)
                id_data = json.dumps((None, ))

                message = f"data: {data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')
                return

            queue = MaxSizeErrorDeque(max_size=MAX_QUEUE_SIZE)

            def add_to_queue(item):
                encoded_data = self.encode(item)
                queue.add_item(encoded_data, len(encoded_data))

            try:
                binding, initial = await self.start_logging_object_data(
                    req_data, add_to_queue)

                try:
                    data = json.dumps('alive')
                    message = f"data: {data}\n\n"
                    yield message.encode('utf-8')

                    data = self.encode(
                        {'data': {
                            'data': self.encode(initial)
                        }})
                    id_data = json.dumps((0, ))
                    message = f"data: {data}\nid: {id_data}\n\n"
                    yield message.encode('utf-8')

                    packet = 1
                    async for data_item in queue:
                        data = self.encode({'data': {'data': data_item}})
                        id_data = json.dumps((packet, ))
                        message = f"data: {data}\nid: {id_data}\n\n"
                        yield message.encode('utf-8')

                        packet += 1
                finally:
                    await self.stop_logging_object_data(binding)
            except Exception as e:
                msg_data = {'exception': serialize_exception(e)}
                data = self.encode(msg_data)
                id_data = json.dumps((None, ))

                message = f"data: {data}\nid: {id_data}\n\n"
                yield message.encode('utf-8')

        response = await make_response(
            TrioIterableBodyExit(send_events()),
            {
                'Content-Type': 'text/event-stream',
                'Cache-Control': 'no-cache',
                'Transfer-Encoding': 'chunked',
            },
        )
        response.timeout = None
        return response
Ejemplo n.º 12
0
async def socket_handler(executor: ProcessSocketServer, stream: SocketStream):
    # write any response
    await executor.write_socket(executor.encode({'data': 'hello'}), stream)

    while True:
        try:
            msg = await executor.read_decode_json_buffers(stream)
            cmd = msg['cmd']
            packet = msg['packet']
            data = msg['data']

            ret_data = {
                'cmd': cmd,
                'packet': packet,
            }

            if cmd == 'remote_import':
                res = await executor.remote_import(data)
            elif cmd == 'register_remote_class':
                res = await executor.register_remote_class(data)
            elif cmd == 'ensure_remote_instance':
                res = await executor.ensure_instance(data)
            elif cmd == 'delete_remote_instance':
                res = await executor.delete_instance(data)
            elif cmd == 'execute':
                res = await executor.execute(data)
            elif cmd == 'execute_generator':
                ret_data['done_execute'] = False
                async with aclosing(executor.execute_generator(data)) as aiter:
                    # if this raises an error it stops the underlying generator
                    async for res in aiter:
                        ret_data['data'] = res
                        try:
                            await executor.write_socket(
                                executor.encode(ret_data), stream)
                        except BrokenResourceError:
                            return

                res = None
                ret_data['done_execute'] = True
            elif cmd == 'get_remote_objects':
                res = await executor.get_objects(data)
            elif cmd == 'get_remote_object_config':
                res = await executor.get_object_config(data)
            elif cmd == 'get_remote_object_property_data':
                res = await executor.get_object_data(data)
            elif cmd == 'get_echo_clock':
                res = await executor.get_echo_clock(data)
            elif cmd == 'sleep':
                res = await executor.sleep(data)
            else:
                raise Exception(f'Unknown command "{cmd}"')

            ret_data['data'] = res
            encoded_ret = executor.encode(ret_data)

        except Exception as e:
            # todo: ignore write_socket in generator
            ret_data = {'exception': serialize_exception(e)}
            encoded_ret = executor.encode(ret_data)

        await executor.write_socket(encoded_ret, stream)