def get_server_metadata(self, as_json=False): """Contact the inference server and get its metadata. Parameters ---------- as_json : bool If True then returns server metadata as a json dict, otherwise as a protobuf message. Default value is False. Returns ------- dict or protobuf message The JSON dict or ServerMetadataResponse message holding the metadata. Raises ------ InferenceServerException If unable to get server metadata. """ try: request = grpc_service_v2_pb2.ServerMetadataRequest() response = self._client_stub.ServerMetadata(request) if as_json: return json.loads(MessageToJson(response)) else: return response except grpc.RpcError as rpc_error: raise_error_grpc(rpc_error)
def get_server_metadata(self, headers=None, as_json=False): """Contact the inference server and get its metadata. Parameters ---------- headers: dict Optional dictionary specifying additional HTTP headers to include in the request. as_json : bool If True then returns server metadata as a json dict, otherwise as a protobuf message. Default value is False. Returns ------- dict or protobuf message The JSON dict or ServerMetadataResponse message holding the metadata. Raises ------ InferenceServerException If unable to get server metadata. """ if headers is not None: metadata = headers.items() else: metadata = () try: request = grpc_service_v2_pb2.ServerMetadataRequest() response = self._client_stub.ServerMetadata(request=request, metadata=metadata) if as_json: return json.loads(MessageToJson(response)) else: return response except grpc.RpcError as rpc_error: raise_error_grpc(rpc_error)
response = grpc_stub.ServerLive(request) print("server {}".format(response)) except Exception as ex: print(ex) request = grpc_service_v2_pb2.ServerReadyRequest() response = grpc_stub.ServerReady(request) print("server {}".format(response)) request = grpc_service_v2_pb2.ModelReadyRequest(name=model_name, version=model_version) response = grpc_stub.ModelReady(request) print("model {}".format(response)) # Metadata request = grpc_service_v2_pb2.ServerMetadataRequest() response = grpc_stub.ServerMetadata(request) print("server metadata:\n{}".format(response)) request = grpc_service_v2_pb2.ModelMetadataRequest(name=model_name, version=model_version) response = grpc_stub.ModelMetadata(request) print("model metadata:\n{}".format(response)) # Configuration request = grpc_service_v2_pb2.ModelConfigRequest(name=model_name, version=model_version) response = grpc_stub.ModelConfig(request) print("model config:\n{}".format(response)) # Infer