def get_model_metadata(body): prediction_service_stub = _get_prediction_service_stub() request = ParseDict(body, model_metadata.GetModelMetadataRequest()) result = prediction_service_stub.GetModelMetadata(request) return MessageToDict(result, preserving_proto_field_name=True, including_default_value_fields=True)
def cache_prediction_metadata(self): channel = grpc.insecure_channel('{}:{}'.format(self.host, self.tf_serving_port), options=[ ('grpc.max_send_message_length', MAX_GRPC_MESSAGE_SIZE), ('grpc.max_receive_message_length', MAX_GRPC_MESSAGE_SIZE) ]) stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = self.model_name request.metadata_field.append('signature_def') result = stub.GetModelMetadata(request, self.request_timeout) _logger.info( '---------------------------Model Spec---------------------------') _logger.info(json_format.MessageToJson(result)) _logger.info( '----------------------------------------------------------------') signature_def = result.metadata['signature_def'] signature_map = get_model_metadata_pb2.SignatureDefMap() signature_map.ParseFromString(signature_def.value) serving_default = signature_map.ListFields()[0][1]['serving_default'] serving_inputs = serving_default.inputs self.input_type_map = { key: serving_inputs[key].dtype for key in serving_inputs.keys() } self.prediction_type = serving_default.method_name self.prediction_service_stub = stub
def get_fake_model_metadata_request(model_name, metadata_field, version=None): request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = model_name if version is not None: request.model_spec.version.value = version request.metadata_field.append(metadata_field) return request
def get_model_version(model_name, stub): request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = 'amazon_review' request.metadata_field.append("signature_def") response = stub.GetModelMetadata(request, 10) # signature of loaded model is available here: response.metadata['signature_def'] return response.model_spec.version.value
def cache_prediction_metadata(self): channel = implementations.insecure_channel(self.host, self.tf_serving_port) stub = prediction_service_pb2.beta_create_PredictionService_stub( channel) request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = self.model_name request.metadata_field.append('signature_def') result = stub.GetModelMetadata(request, self.request_timeout) _logger.info( '---------------------------Model Spec---------------------------') _logger.info(json_format.MessageToJson(result)) _logger.info( '----------------------------------------------------------------') signature_def = result.metadata['signature_def'] signature_map = get_model_metadata_pb2.SignatureDefMap() signature_map.ParseFromString(signature_def.value) serving_default = signature_map.ListFields()[0][1]['serving_default'] serving_inputs = serving_default.inputs self.input_type_map = { key: serving_inputs[key].dtype for key in serving_inputs.keys() } self.prediction_type = serving_default.method_name
def _load_model_signatures( self, model_name: str, model_version: str, signature_key: Optional[str] = None ) -> None: """ Queries the signature defs from TFS. Args: model_name: Name of the model. model_version: Version of the model. signature_key: Signature key of the model as passed in with predictor:signature_key, predictor:models:paths:signature_key or predictor:models:signature_key. When set to None, "predict" is the assumed key. Raises: cortex_internal.lib.exceptions.UserException when the signature def can't be validated. """ # create model metadata request request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = model_name request.model_spec.version.value = int(model_version) request.metadata_field.append("signature_def") # get signature def last_idx = 0 for times in range(100): try: resp = self._pred.GetModelMetadata(request) break except grpc.RpcError as e: # it has been observed that it may take a little bit of time # until a model gets to be accessible with TFS (even though it's already loaded in) time.sleep(0.3) last_idx = times if last_idx == 99: raise UserException( "couldn't find model '{}' of version '{}' to extract the signature def".format( model_name, model_version ) ) sigAny = resp.metadata["signature_def"] signature_def_map = get_model_metadata_pb2.SignatureDefMap() sigAny.Unpack(signature_def_map) sigmap = json_format.MessageToDict(signature_def_map) signature_def = sigmap["signatureDef"] # extract signature key and input signature signature_key, input_signatures = self._extract_signatures( signature_def, signature_key, model_name, model_version ) model_id = f"{model_name}-{model_version}" self.models[model_id]["signature_def"] = signature_def self.models[model_id]["signature_key"] = signature_key self.models[model_id]["input_signatures"] = input_signatures
def _get_sig_def(self): channel = grpc.insecure_channel(self._server) service = prediction_service_pb2_grpc.PredictionServiceStub(channel) request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = self._model_name request.metadata_field.append("signature_def") result = MessageToDict(service.GetModelMetadata(request, 10.0)) # close the channel so that it won't be reused after fork and fail channel.close() return result["metadata"]["signature_def"]["signatureDef"][ "serving_default"]
def get_metadata(self, model_name, signature_name, timeout): field = 'signature_def' request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = model_name request.metadata_field.append(field) response = self.stub.GetModelMetadata(request, timeout) print(response.model_spec) raw_value = response.metadata[field].value signature_map = get_model_metadata_pb2.SignatureDefMap() signature_map.MergeFromString(raw_value) print(signature_map.signature_def[signature_name])
def get_io(self, sub_network): metadata_request = get_model_metadata_pb2.GetModelMetadataRequest() metadata_request.model_spec.name = sub_network metadata_request.metadata_field.append("signature_def") result = self.prediction_service.GetModelMetadata(metadata_request, self.timeout) signature_def_map = get_model_metadata_pb2.SignatureDefMap() result.metadata['signature_def'].Unpack(signature_def_map) default_signature_def = signature_def_map.signature_def['serving_default'] return list(default_signature_def.inputs),\ [(output_name, [dim.size for dim in metadata.tensor_shape.dim]) for output_name, metadata in sorted(default_signature_def.outputs.items(), key=lambda output: output[1].name)]
def __init__(self, name, parameters): self.__dict__ = parameters if 'access_key' not in parameters: self.access_key = os.environ['ACCESS_KEY'] if 'secret_key' not in parameters: self.secret_key = os.environ['SECRET_KEY'] if 'S3_URL' not in os.environ: self.url = parameters['url'] else: self.url = os.environ['S3_URL'] if 'bucket' not in parameters: self.bucket = os.environ['BUCKET'] if 'folder' not in parameters: self.folder = name self.s3 = boto3.client( 's3', endpoint_url=self.url, config=boto3.session.Config(signature_version='s3v4'), aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key) try: if not self.keep_temp: shutil.rmtree(tempfile.gettempdir() + '/' + name) except: print('temp dir ' + tempfile.gettempdir() + '/' + name + ' does not exist') tmpdir = Path(tempfile.gettempdir() + '/' + name) if not tmpdir.is_dir(): os.mkdir(tempfile.gettempdir() + '/' + name) self.tempdir = tempfile.gettempdir() + '/' + name + "/" if not hasattr(self, 'predict'): self.predict = False if self.predict: self.label_file = "trained_models/{}/{}/object-detection.pbtxt".format( self.model, self.version) self.get_classes() if 'grpc' not in parameters: self.grpc = False if self.grpc: channel = grpc.insecure_channel('{}:{}'.format( self.tfserver, self.tfport)) self.stub = prediction_service_pb2_grpc.PredictionServiceStub( channel) request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = self.model request.metadata_field.append("signature_def") response = self.stub.GetModelMetadata(request, 10.0) print(response.model_spec.version.value) print('project [{}] sig [{}]'.format( self.name, response.metadata['detection_signature']))
def __get_input_name_and_shape__(self): logging.info(f"start get_input_name") metadata_field = "signature_def" request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = self.model_name if self.model_version is not None: request.model_spec.version.value = self.model_version request.metadata_field.append(metadata_field) # result includes a dictionary with all model outputs result = self.stub.GetModelMetadata(request, 10.0) input_metadata, output_metadata = self.__get_input_and_output_meta_data__( result) input_blob = next(iter(input_metadata.keys())) output_blob = next(iter(output_metadata.keys())) logging.info(f"get_input_name_and_shape_function success!") return input_blob, input_metadata[input_blob][ 'shape'], output_blob, output_metadata[output_blob]['shape']
def get_signature_map(model_server_stub, model_name): """ Gets tensorflow signature map from the model server stub. Args: model_server_stub: The grpc stub to call GetModelMetadata. model_name: The model name. Returns: The signature map of the model. """ request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = model_name request.metadata_field.append("signature_def") try: response = model_server_stub.GetModelMetadata( request, MODEL_SERVER_METADATA_TIMEOUT_SEC) except grpc.RpcError as rpc_error: logging.exception( "GetModelMetadata call to model server failed with code " "%s and message %s", rpc_error.code(), rpc_error.details()) return None signature_def_map_proto = get_model_metadata_pb2.SignatureDefMap() response.metadata["signature_def"].Unpack(signature_def_map_proto) signature_def_map = signature_def_map_proto.signature_def if not signature_def_map: logging.error("Graph has no signatures.") # Delete incomplete signatures without input dtypes. invalid_signatures = [] for signature_name in signature_def_map: for tensor in signature_def_map[signature_name].inputs.itervalues(): if not tensor.dtype: logging.warn( "Signature %s has incomplete dtypes, removing from " "usable signatures", signature_name) invalid_signatures.append(signature_name) break for signature_name in invalid_signatures: del signature_def_map[signature_name] return signature_def_map
def testGetModelMetadata(self): """Test PredictionService.GetModelMetadata implementation.""" model_path = self._GetSavedModelBundlePath() model_server_address = TensorflowModelServerTest.RunServer( 'default', model_path)[1] print('Sending GetModelMetadata request...') # Send request request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = 'default' request.metadata_field.append('signature_def') channel = grpc.insecure_channel(model_server_address) stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) result = stub.GetModelMetadata(request, RPC_TIMEOUT) # 5 secs timeout # Verify response self.assertEqual('default', result.model_spec.name) self.assertEqual(self._GetModelVersion(model_path), result.model_spec.version.value) self.assertEqual(1, len(result.metadata)) self.assertIn('signature_def', result.metadata)
def get_model_version( model_name: str, stub: prediction_service_pb2_grpc.PredictionServiceStub) -> str: """Returns the version of the model. Parameters ---------- model_name : str stub : prediction_service_pb2_grpc.PredictionServiceStub Prediction API. Returns ------- str Version of the model. """ request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = model_name request.metadata_field.append("signature_def") response = stub.GetModelMetadata(request, 10) return response.model_spec.version.value
def create_get_model_metadata_request(): get_model_metadata_request = get_model_metadata_pb2.GetModelMetadataRequest( ) get_model_metadata_request.model_spec.name = "model" get_model_metadata_request.metadata_field.append("signature_def") return get_model_metadata_request
import json import grpc import numpy as np import tensorflow as tf from tensorflow_serving.apis import predict_pb2 from tensorflow_serving.apis import prediction_service_pb2_grpc from tensorflow_serving.apis import get_model_status_pb2 from tensorflow_serving.apis import get_model_metadata_pb2 from google.protobuf.json_format import MessageToJson hostport = "localhost:8500" channel = grpc.insecure_channel(hostport) stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) request = get_model_metadata_pb2.GetModelMetadataRequest() request.model_spec.name = "model" request.metadata_field.append("signature_def") result = stub.GetModelMetadata(request, 5) # 5 secs timeout result = json.loads(MessageToJson(result)) print("Model metadata:") print(result)
def __init__(self, model_spec=None, metadata_field=None, **kwargs): super().__init__(get_model_metadata_pb2.GetModelMetadataRequest(), model_spec=model_spec, metadata_field=metadata_field, **kwargs)
import grpc from tensorflow_serving.apis import predict_pb2 from tensorflow_serving.apis import model_pb2 from tensorflow_serving.apis import prediction_service_pb2_grpc from tensorflow_serving.apis import get_model_metadata_pb2 CHANNEL_ADDRESS = r'172.16.104.25:19001' MODEL_NAME = r'3d_nodule_detector' channel = grpc.insecure_channel(CHANNEL_ADDRESS) stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) request = get_model_metadata_pb2.GetModelMetadataRequest( model_spec=model_pb2.ModelSpec(name=MODEL_NAME), metadata_field=["signature_def"]) response = stub.GetModelMetadata(request) sigdef_str = response.metadata["signature_def"].value print ("Name:", response.model_spec.name) print ("Version:", response.model_spec.version.value) print (get_model_metadata_pb2.SignatureDefMap.FromString(sigdef_str))