def Predict(self, request, context): rid = uuid.uuid4() self.logger.info("[{}] Received inference request: {}".format( rid, request)) signature_name = request.model_spec.signature_name if signature_name in self.model.signatures: sig = self.model.signatures[signature_name] else: msg = "[{}] {} signature is not present in the model".format( rid, signature_name) context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_details(msg) self.logger.error(msg) return hs.PredictResponse() self.logger.debug("[{}] Using {} signature".format(rid, sig.name)) fetch = sig.outputs feed = {} for (k, v) in sig.inputs.items(): tensor = request.inputs[k] feed[v.name] = tf.contrib.util.make_ndarray(tensor) if self.model.is_stateful(): fetch = {**fetch, **self.state_fetch} feed.update(self.state) result = self.model.session.run(fetch, feed_dict=feed) self.logger.info("[{}] raw result: {}".format(rid, result)) converted_results = {} for out_key, out_tensor in sig.outputs.items(): out_value = result[out_key] self.logger.info( "[{}] Assembling tensor: dtype={} shape={} data={}".format( rid, out_tensor.dtype, out_tensor.shape, out_value)) original_tensor = fixed_make_tensor_proto(out_value, dtype=out_tensor.dtype, shape=out_tensor.shape) tensor_proto = hs.TensorProto() tensor_proto.ParseFromString(original_tensor.SerializeToString()) self.logger.info("[{}] Answer: {}".format(rid, tensor_proto)) converted_results[out_key] = tensor_proto for i, v in enumerate(self.model.state_placeholders): state_name = self.model.zero_states[i] self.state[state_name.name] = result[v.name] return hs.PredictResponse(outputs=converted_results)
def test_correct_signature(self): runtime = TensorflowRuntime("models/tf_summator") runtime.start(port="9090") try: time.sleep(1) channel = grpc.insecure_channel('localhost:9090') client = hs.PredictionServiceStub(channel=channel) a = hs.TensorProto() a.ParseFromString( tf.contrib.util.make_tensor_proto( 3, dtype=tf.int8, shape=[]).SerializeToString()) b = hs.TensorProto() b.ParseFromString( tf.contrib.util.make_tensor_proto( 2, dtype=tf.int8, shape=[]).SerializeToString()) request = hs.PredictRequest( model_spec=hs.ModelSpec(signature_name="add"), inputs={ "a": a, "b": b }) result = client.Predict(request) expected = hs.PredictResponse( outputs={ "sum": hs.TensorProto(dtype=hs.DT_INT8, tensor_shape=hs.TensorShapeProto(), int_val=[5]) }) self.assertEqual(result, expected) finally: runtime.stop()
def claim(client_profile): data = client_profile.double_val answer_tensor = hs.TensorProto(double_val=[sum(data) * 10], dtype=hs.DT_DOUBLE) return hs.PredictResponse(outputs={"amount": answer_tensor})
def unpack(image_base64): encoded_image = image_base64.string_val[0]; decoded_image = base64.b64decode(encoded_image); image = Image.open(io.BytesIO(decoded_image)); resized_image = image.resize((331, 331)); image_array = tf.keras.preprocessing.image.img_to_array(resized_image); image_shaped = image_array.reshape((1,) + image_array.shape); decoded_image_tensor_shape = hs.TensorShapeProto\ ( dim = \ [ hs.TensorShapeProto.Dim(size = dim)\ for dim in image_shaped.shape ] ); decoded_image_tensor_proto = hs.TensorProto\ ( dtype = hs.DT_DOUBLE, double_val = image_shaped.flatten(), tensor_shape = decoded_image_tensor_shape ); return hs.PredictResponse(outputs = {"shaped_image": decoded_image_tensor_proto});
def detect(image_b64): pic_matrix = ssd.preprocess(image_b64.string_val[0].decode("utf-8")) rpredictions, rlocalisations, rbbox_img = ssd.run(pic_matrix) rclasses, rscores, rbboxes = ssd.postprocess(rpredictions, rlocalisations, rbbox_img) return hs.PredictResponse(outputs={ "classes": rclasses, "scores": rscores, "bboxes": rbboxes })
def predict(**kwargs): extracted = np.array( [extract_value(kwargs[feature]) for feature in features]) transformed = np.dstack(extracted).reshape(1, len(features)) predicted = monitoring_model.decision_function(transformed) response = hs.TensorProto(double_val=[predicted.item()], dtype=hs.DT_DOUBLE, tensor_shape=hs.TensorShapeProto()) return hs.PredictResponse(outputs={"value": response})
def infer(features): data = np.array(features.double_val) \ .reshape([dim.size for dim in features.tensor_shape.dim]) prediction = clf.predict(data) guess_shape = hs.TensorShapeProto( dim=[hs.TensorShapeProto.Dim(size=item) for item in prediction.shape]) guess = hs.TensorProto(dtype=hs.DT_BOOL, bool_val=prediction, tensor_shape=guess_shape) return hs.PredictResponse(outputs={'is_fraud': guess})
def predict(**kwargs): extracted = np.array( [extract_value(kwargs[feature]) for feature in features]) transformed = np.dstack(extracted).reshape(1, len(features)) predicted = clf.predict(transformed) response = hs.TensorProto(int64_val=[predicted.item()], dtype=hs.DT_INT64, tensor_shape=hs.TensorShapeProto()) return hs.PredictResponse(outputs={"classes": response})
def infer(msg): # TODO только для этого numpy? msg_arr = np.array(msg.string_val) msg_str = msg_arr[0] tokens = infer_str(msg_str) y = hs.TensorProto( dtype=hs.DT_STRING, string_val=[t.encode('utf-8', 'ignore') for t in tokens], tensor_shape=hs.TensorShapeProto( dim=[hs.TensorShapeProto.Dim(size=len(tokens))])) # 3. Return the result return hs.PredictResponse(outputs={"preprocessed_msg": y})
def tokenize(text): sentence = [str.decode() for str in text.string_val][0] if 'www.' in sentence or 'http:' in sentence or 'https:' in sentence or '.com' in sentence: sentence = re.sub(r"([^ ]+(?<=\.[a-z]{3}))", "<url>", sentence) tok_sentence = tokenizer.texts_to_sequences([sentence]) pad_sentence = sequence.pad_sequences(tok_sentence, maxlen=maxlen)[0] tok_tensor = hs.TensorProto(int64_val=pad_sentence, dtype=hs.DT_INT64, tensor_shape=hs.TensorShapeProto( dim=[hs.TensorShapeProto.Dim(size=100)])) return hs.PredictResponse(outputs={'tokenized': tok_tensor})
def normalize(data): print(data) max_val = max(data.double_val) norm_data = [float(x) / max_val for x in data.double_val] shape = hs.TensorShapeProto(dim=[ hs.TensorShapeProto.Dim(size=1), hs.TensorShapeProto.Dim(size=1), hs.TensorShapeProto.Dim(size=24) ]) data = hs.TensorProto(dtype=hs.DT_DOUBLE, double_val=norm_data, tensor_shape=shape) print(data) return hs.PredictResponse(outputs={"data": data})
def detect(result): print(result) vals = result.double_val is_anomaly = False for val in vals: if val < 0.05: is_anomaly = True anomaly = hs.TensorProto(dtype=hs.DT_BOOL, bool_val=[is_anomaly]) print(result) print(anomaly) return hs.PredictResponse(outputs={"result": result, "anomaly": anomaly})
def increment(number): # <- keep in mind the signature request_number = tf.make_ndarray(number) response_number = request_number + 1 response_tensor_shape = [ hs.TensorShapeProto.Dim(size=dim.size) for dim in number.tensor_shape.dim ] response_tensor = hs.TensorProto( int_val=response_number.flatten(), dtype=hs.DT_INT32, tensor_shape=hs.TensorShapeProto(dim=response_tensor_shape)) return hs.PredictResponse(outputs={"number": response_tensor})
def predict(sample): output = model(sample) if (output[0] > output[1]): prediction = [b"normal"] else: prediction = [b"abnormal"] response_tensor_shape = hs.TensorShapeProto(dim=hs.TensorShapeProto.Dim( size=1)) return hs.PredictResponse(outputs = {"result": hs.TensorProto(dtype = DT_STRING,\ string_val = prediction, tensor_shape = response_tensor_shape)})
def infer(pclass, sex, age, fare, parch): df = pd.DataFrame({ 'Pclass': pclass.int_val, 'Sex': sex.string_val, 'Age': age.int_val, 'Fare': fare.double_val, 'Parch': parch.int_val }) df['Sex'] = df['Sex'].map({'male': 0, 'female': 1}).to_frame() score = gbm.predict(df.values) tensor = hs.TensorProto(dtype=hs.DT_INT32, int_val=score, tensor_shape=hs.TensorShapeProto( dim=[hs.TensorShapeProto.Dim(size=-1)])) return hs.PredictResponse(outputs={"survived": tensor})
def tokenize(x): sentences = np.array(x.string_val) sentences = sentences.reshape([dim.size for dim in x.tensor_shape.dim]) tokenized = np.copy(sentences) for index, sentence in enumerate(sentences): tokenized[index] = " ".join( nltk.word_tokenize(str(sentence[0], encoding="utf-8").lower())) tokenized = hs.TensorProto(dtype=hs.DT_STRING, string_val=tokenized.flatten(), tensor_shape=hs.TensorShapeProto(dim=[ hs.TensorShapeProto.Dim(size=-1), hs.TensorShapeProto.Dim(size=1) ])) return hs.PredictResponse(outputs={"input_data": tokenized})
def infer(**kwargs): tensor = kwargs["x"] data = np.array(tensor.double_val).reshape(1, 300, 3, 3) result = 0 for i in range(data.shape[1]): with graph.as_default(): vector = data[0, i, :, :].reshape(1, 9) rec_vector = model.predict(vector) result += np.mean(np.square(vector - rec_vector), axis=0) y_tensor = hs.TensorProto(dtype=hs.DT_DOUBLE, double_val=result.flatten().tolist(), tensor_shape=hs.TensorShapeProto()) # 4. Return the result return hs.PredictResponse(outputs={"value": y_tensor})
def infer(x): # 1. Retrieve tensor's content and put it to numpy array data = np.array(x.double_val) data = data.reshape([dim.size for dim in x.tensor_shape.dim]) # 2. Make a prediction with graph.as_default(): result = model.predict(data) print(result) # 3. Pack the answer y_shape = hs.TensorShapeProto(dim=[hs.TensorShapeProto.Dim(size=5)]) y_tensor = hs.TensorProto(dtype=hs.DT_DOUBLE, double_val=result.flatten(), tensor_shape=y_shape) # 4. Return the result return hs.PredictResponse(outputs={"y": y_tensor})
def infer(**kwargs): # use graph with initialized model in it with graph.as_default(): features = kwargs['X'] data = np.array(features.double_val) \ .reshape([dim.size for dim in features.tensor_shape.dim]) predicted = autoencoder.predict(data) score = np.mean(np.square(predicted - data), axis=1) response_shape = hs.TensorShapeProto( dim=[hs.TensorShapeProto.Dim(size=item) for item in (1, 1)]) response_tensor = hs.TensorProto(dtype=hs.DT_DOUBLE, double_val=np.expand_dims( score.flatten(), axis=0), tensor_shape=response_shape) return hs.PredictResponse(outputs={"reconstructed": response_tensor})
def predict(tokenized): global graph tokenized_sentence = np.array(tokenized.int64_val, ndmin=2) with graph.as_default(): prediction = amazon_model.predict([tokenized_sentence]) confidence = prediction[0,0] label = 1 if confidence >= 0.5 else 0 conf_tensor = hs.TensorProto( double_val=[confidence], dtype=hs.DT_DOUBLE, tensor_shape=hs.TensorShapeProto()) label_tensor = hs.TensorProto( int_val=[label], dtype=hs.DT_INT32, tensor_shape=hs.TensorShapeProto()) return hs.PredictResponse(outputs={'confidence': conf_tensor, 'label':label_tensor})
def infer(**kwargs): df = pd.DataFrame({ 'Pclass': kwargs['pclass'].int_val, 'Sex': kwargs['sex'].string_val, 'Age': kwargs['age'].int_val, 'Fare': kwargs['fare'].double_val, 'Parch': kwargs['parch'].int_val }) df['Sex'] = df['Sex'].map({'male': 0, 'female': 1}).to_frame() score = gbm.predict(df.values) return hs.PredictResponse( outputs={ "survived": hs.TensorProto(dtype=hs.DT_INT32, int_val=score, tensor_shape=hs.TensorShapeProto( dim=[hs.TensorShapeProto.Dim(size=-1)])) })
def infer(faces): img_data = np.array(faces.int_val) imgs = img_data.reshape([dim.size for dim in faces.tensor_shape.dim]) # preprocess imgs for facenet for i in range(len(imgs)): imgs[i] = prewhiten(imgs[i]) with graph.as_default(): with tf.Session(graph=graph) as sess: feed_dict = {images_placeholder: imgs, phase_train_placeholder: False} emb_array = sess.run(embeddings, feed_dict=feed_dict) predictions = classifier.predict_proba(emb_array) class_indices = np.argmax(predictions, axis=1) classes = [class_names[index] for index in class_indices] y_shape = hs.TensorShapeProto(dim=[hs.TensorShapeProto.Dim(size=len(classes))]) class_prediciton = hs.TensorProto( dtype=hs.DT_STRING, string_val=[class_name.encode() for class_name in classes], tensor_shape=y_shape) return hs.PredictResponse(outputs={'y': class_prediciton})
def gan(client_profile): with graph.as_default(): data = tf.make_ndarray(client_profile) data = np.expand_dims(data, axis=0) result = model.predict(data)[0].tolist() answer_tensor_one = hs.TensorProto( double_val=[result[0]], dtype=hs.DT_DOUBLE) answer_tensor_two = hs.TensorProto( double_val=[result[1]], dtype=hs.DT_DOUBLE) return hs.PredictResponse( outputs={ "class_one": answer_tensor_one, "class_two": answer_tensor_two } )
def detect(x): data = np.array(x.int_val, dtype=np.uint8) img = data.reshape([dim.size for dim in x.tensor_shape.dim]) dets = detector(img, 1) boxes = dlib.full_object_detections() for i, detection in enumerate(dets): boxes.append(sp(img, detection)) faces = [] for i in range(len(boxes)): faces.append(dlib.get_face_chip(img, boxes[i], size=160)) faces = np.array(faces) faces_shape = hs.TensorShapeProto(dim=[hs.TensorShapeProto.Dim(size=item) for item in faces.shape]) faces_tensor = hs.TensorProto( dtype=hs.DT_UINT8, int_val=faces.flatten(), tensor_shape=faces_shape ) return hs.PredictResponse(outputs={'faces': faces_tensor})
def predict(**kwargs): extracted = extract_value(kwargs['input']) with graph.as_default(): probas = m.predict(extracted) classes = np.array(probas).argmax(axis=0) probas_proto = hs.TensorProto( double_val=probas.flatten().tolist(), dtype=hs.DT_DOUBLE, tensor_shape=hs.TensorShapeProto( dim=[hs.TensorShapeProto.Dim(size=-1), hs.TensorShapeProto.Dim(size=10)])) classes_proto = hs.TensorProto( int64_val=classes.flatten().tolist(), dtype=hs.DT_INT64, tensor_shape=hs.TensorShapeProto( dim=[hs.TensorShapeProto.Dim(size=-1), hs.TensorShapeProto.Dim(size=1)])) return hs.PredictResponse(outputs={"classes": classes_proto, "probabilities": probas_proto})
def tensorflow_case(self, tf_version): docker_client = docker.from_env() container = docker_client.containers.run( "hydrosphere/serving-runtime-tensorflow:{}-latest".format(tf_version), remove=True, detach=True, ports={'9090/tcp': 9090}, volumes={os.path.abspath('models/tf_summator'): {'bind': '/model', 'mode': 'ro'}} ) time.sleep(15) try: channel = grpc.insecure_channel('localhost:9090') client = hs.PredictionServiceStub(channel=channel) a = hs.TensorProto() a.ParseFromString(tf.contrib.util.make_tensor_proto(3, dtype=tf.int8).SerializeToString()) b = hs.TensorProto() b.ParseFromString(tf.contrib.util.make_tensor_proto(2, dtype=tf.int8).SerializeToString()) request = hs.PredictRequest( model_spec=hs.ModelSpec(signature_name="add"), inputs={ "a": a, "b": b } ) result = client.Predict(request) expected = hs.PredictResponse( outputs={ "sum": hs.TensorProto( dtype=hs.DT_INT8, tensor_shape=hs.TensorShapeProto(), int_val=[5] ) } ) self.assertEqual(result, expected) finally: print("Container logs:") print(container.logs().decode("utf-8")) container.stop() time.sleep(15)
def predict(shaped_image): result = model(shaped_image) inference = result[0][0] * 1000 - 453 if (inference > 0.5): prediction = "normal ({})".format(inference) else: prediction = "abnormal ({})".format(inference) prediction_tensor_shape = hs.TensorShapeProto(dim=hs.TensorShapeProto( size=1)) prediction_tensor_proto = hs.TensorProto\ ( dtype = hs.DT_STRING, string_val = [prediction], tensor_shape = prediction_tensor_shape ) return hs.PredictResponse(outputs={"result": prediction_tensor_proto})
def gan(client_profile): data = client_profile.double_val print(data) print(np.array([data])) print("KEK") # gan_prediction = model.predict(np.array([np.array(data)])) # res = np.argmax(gan_prediction) # gan_res = list(map(lambda x: np.argmax(x), model.predict(np.array([data])))) res = model.predict(np.array([data]))[0].tolist() # answer_tensor = hs.TensorProto( # double_val=[res], # dtype=hs.DT_DOUBLE # ) # return hs.PredictResponse( # outputs={ # "result": answer_tensor # } # ) class_one = res[0] class_two = res[1] answer_tensor_one = hs.TensorProto(double_val=[class_one], dtype=hs.DT_DOUBLE) answer_tensor_two = hs.TensorProto(double_val=[class_two], dtype=hs.DT_DOUBLE) return hs.PredictResponse(outputs={ "class_one": answer_tensor_one, "class_two": answer_tensor_two })
def pack_predict(result): tensor = hs.TensorProto(dtype=hs.DT_DOUBLE, double_val=[result], tensor_shape=hs.TensorShapeProto()) return hs.PredictResponse(outputs={"value": tensor})
inputs={ "in": hs.TensorProto( dtype=hs.DT_DOUBLE, double_val=[random.random()], tensor_shape=hs.TensorShapeProto() ), }, ) # 3. Create a PredictResponse message. PredictResponse is used to define the # outputs of the model inference. predict_response_proto = hs.PredictResponse( outputs={ "out": hs.TensorProto( dtype=hs.DT_DOUBLE, double_val=[random.random()], tensor_shape=hs.TensorShapeProto() ), }, ) # 4. Create an ExecutionInformation message. ExecutionInformation contains all # request data and all auxiliary information about request execution, required # to calculate metrics. execution_information_proto = hs.ExecutionInformation( request=predict_request_proto, response=predict_response_proto, metadata=execution_metadata_proto, ) # 5. Use RPC method Analyse of the MonitoringService to calculate metrics