def test_multiple_large_messages(self, generate_messages): stream = SendStream(1) stream.populate(generate_messages(count=100, length=200)) header = struct.pack("?", False) + struct.pack(">I", 200) max_bytes = 50 chunk_size = 10 chunks = list(stream.read(max_bytes, chunk_size)) expected = [ header + b"\x00\x00\x00\x00\x00", # 5 bytes header + 5 bytes payload b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", # 10 bytes payload b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", # 10 bytes payload b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", # 10 bytes payload b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", # 10 bytes payload ] assert chunks == expected assert sum(map(len, chunks)) == max_bytes # queue is emptied into buffer assert len( stream.buffer) == 100 * (5 + 200) - max_bytes # 20450 bytes left assert stream.queue.qsize() == 0
def test_mark_as_sent(self): stream = SendStream(1) stream.headers.set(("foo", "bar")) assert stream.headers_to_send(False) == [(b"foo", b"bar") ] # marks as sent assert stream.headers_to_send(False) is False # previously sent
def test_empty_queue(self): stream = SendStream(1) stream.headers.set(("foo", "bar")) assert stream.queue.qsize() == 0 assert stream.headers_to_send(True) is False assert stream.headers_to_send(False) == [(b"foo", b"bar")]
def test_stream_closed(self): stream = SendStream(1) max_bytes = 10 chunk_size = 5 stream.close() assert list(stream.read(max_bytes, chunk_size)) == []
def test_no_data(self): stream = SendStream(1) max_bytes = 10 chunk_size = 10 assert stream.buffer.empty() assert list(stream.read(max_bytes, chunk_size)) == []
def test_less_than_one_chunk_of_data(self): stream = SendStream(1) stream.buffer.write(b"abc") max_bytes = 10 chunk_size = 5 assert list(stream.read(max_bytes, chunk_size)) == [b"abc"] assert stream.buffer.empty()
def test_defer_until_data(self): stream = SendStream(1) stream.headers.set(("foo", "bar")) assert stream.headers_to_send(True) is False # defer until data stream.queue.put(Mock()) assert stream.queue.qsize() == 1 assert stream.headers_to_send(True) == [(b"foo", b"bar")]
def test_more_than_max_bytes_of_data(self): stream = SendStream(1) stream.buffer.write(b"abcdefghijklm") max_bytes = 10 chunk_size = 5 assert list(stream.read(max_bytes, chunk_size)) == [b"abcde", b"fghij"] assert stream.buffer.peek() == b"klm"
def test_chunk_greater_than_max_bytes(self): stream = SendStream(1) stream.buffer.write(b"abcdefghijklm") max_bytes = 5 chunk_size = 10 assert list(stream.read(max_bytes, chunk_size)) == [b"abcde"] assert stream.buffer.peek() == b"fghijklm"
def test_less_than_max_bytes_of_data(self): stream = SendStream(1) stream.buffer.write(b"abcdefghijklm") max_bytes = 20 chunk_size = 5 assert list(stream.read(max_bytes, chunk_size)) == [b"abcde", b"fghij", b"klm"] assert stream.buffer.empty()
def test_stream_closed_with_error(self): stream = SendStream(1) error = GrpcError("boom", "details", "error string") stream.close(error) max_bytes = 10 chunk_size = 5 with pytest.raises(GrpcError): next(stream.read(max_bytes, chunk_size))
def test_error_on_queue(self, generate_messages): stream = SendStream(1) stream.populate(generate_messages(count=2, length=20)) error = GrpcError("boom", "details", "error string") stream.close(error) with pytest.raises(GrpcError): stream.flush_queue_to_buffer()
def test_data_in_buffer_and_messages_in_queue(self, generate_messages): stream = SendStream(1) stream.buffer.write(b"\xff\xff\xff\xff\xff") stream.populate(generate_messages(count=10, length=10)) header = struct.pack("?", False) + struct.pack(">I", 10) max_bytes = 10 chunk_size = 10 chunks = list(stream.read(max_bytes, chunk_size)) expected = [b"\xff\xff\xff\xff\xff" + header] assert chunks == expected assert sum(map(len, chunks)) == max_bytes # queue is emptied into buffer assert len( stream.buffer) == 5 + 10 * (5 + 10) - max_bytes # 145 bytes left assert stream.queue.qsize() == 0
def test_stream_closed(self, generate_messages): stream = SendStream(1) stream.populate(generate_messages(count=2, length=20)) header = struct.pack("?", False) + struct.pack(">I", 20) stream.flush_queue_to_buffer() assert stream.buffer.peek( ) == header + b"\x00" * 20 + header + b"\x01" * 20 stream.flush_queue_to_buffer() # stream closed; no-op assert stream.buffer.peek( ) == header + b"\x00" * 20 + header + b"\x01" * 20
def test_populate_closed_stream(self): stream = SendStream(1) stream.close() assert stream.closed stream.populate(range(10)) assert stream.queue.qsize() == 1
def test_multiple_small_messages(self, generate_messages): stream = SendStream(1) stream.populate(generate_messages(count=100, length=1)) header = struct.pack("?", False) + struct.pack(">I", 1) max_bytes = 20 chunk_size = 10 chunks = list(stream.read(max_bytes, chunk_size)) expected = [ # 5 bytes header + 1 byte payload + 4 bytes of next header header + b"\x00" + header[:4], # remaining 1 byte of header + 1 byte payload # + 5 bytes header + 1 byte payload + 2 bytes of next header header[4:] + b"\x01" + header + b"\x02" + header[:2], ] assert chunks == expected assert sum(map(len, chunks)) == max_bytes # queue is emptied into buffer assert len( stream.buffer) == 100 * (5 + 1) - max_bytes # 580 bytes left assert stream.queue.qsize() == 0
def test_error_on_queue(self, generate_messages): stream = SendStream(1) error = GrpcError("boom", "details") messages = itertools.chain(generate_messages(count=2, length=20), [error]) stream.populate(messages) with pytest.raises(GrpcError): stream.flush_queue_to_buffer()
def send_request(self, request_headers): """ Called by the client to invoke a GRPC method. Establish a `SendStream` to send the request payload and `ReceiveStream` for receiving the eventual response. `SendStream` and `ReceiveStream` are returned to the client for providing the request payload and iterating over the response. Invocations are queued and sent on the next iteration of the event loop. """ stream_id = next(self.counter) request_stream = SendStream(stream_id) response_stream = ReceiveStream(stream_id) self.receive_streams[stream_id] = response_stream self.send_streams[stream_id] = request_stream request_stream.headers.set(*request_headers) self.pending_requests.append(stream_id) return request_stream, response_stream
def request_received(self, event): """ Receive a GRPC request and pass it to the GrpcServer to fire any appropriate entrypoint. Establish a `ReceiveStream` to receive the request payload and `SendStream` for sending the eventual response. """ super().request_received(event) stream_id = event.stream_id request_stream = ReceiveStream(stream_id) response_stream = SendStream(stream_id) self.receive_streams[stream_id] = request_stream self.send_streams[stream_id] = response_stream request_stream.headers.set(*event.headers, from_wire=True) compression = select_algorithm( request_stream.headers.get("grpc-accept-encoding"), request_stream.headers.get("grpc-encoding"), ) try: response_stream.headers.set( (":status", "200"), ("content-type", "application/grpc+proto"), ("grpc-accept-encoding", ",".join(SUPPORTED_ENCODINGS)), # TODO support server changing compression later ("grpc-encoding", compression), ) response_stream.trailers.set(("grpc-status", "0")) self.handle_request(request_stream, response_stream) except GrpcError as error: response_stream.trailers.set((":status", "200"), *error.as_headers()) self.end_stream(stream_id)
def test_no_headers(self): stream = SendStream(1) assert len(stream.headers) == 0 assert stream.headers_to_send(False) is False
def test_send_trailers(self): stream = SendStream(1) stream.trailers.set(("foo", "bar")) assert stream.trailers_to_send() == [(b"foo", b"bar")]
def test_no_trailers(self): stream = SendStream(1) assert len(stream.trailers) == 0 assert stream.trailers_to_send() is False
def test_empty_queue(self): stream = SendStream(1) assert stream.queue.qsize() == 0 stream.flush_queue_to_buffer() assert stream.buffer.empty()