def init_op(self): self.seq = Sequential([ Resize(256), CenterCrop(224), RGB2BGR(), Transpose((2, 0, 1)), Div(255), Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225], True) ]) self.label_dict = {} label_idx = 0 with open("imagenet.label") as fin: for line in fin: self.label_dict[label_idx] = line.strip() label_idx += 1
def single_func(idx, resource): total_number = 0 profile_flags = False latency_flags = False if os.getenv("FLAGS_profile_client"): profile_flags = True if os.getenv("FLAGS_serving_latency"): latency_flags = True latency_list = [] if args.request == "rpc": client = Client() client.load_client_config(args.model) client.connect([resource["endpoint"][idx % len(resource["endpoint"])]]) start = time.time() for i in range(turns): if args.batch_size >= 1: l_start = time.time() seq = Sequential([ File2Image(), Resize(256), CenterCrop(224), RGB2BGR(), Transpose((2, 0, 1)), Div(255), Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225], True) ]) image_file = "daisy.jpg" img = seq(image_file) feed_data = np.array(img) feed_data = np.expand_dims(feed_data, 0).repeat(args.batch_size, axis=0) result = client.predict( feed={"image": feed_data}, fetch=["save_infer_model/scale_0.tmp_0"], batch=True) l_end = time.time() if latency_flags: latency_list.append(l_end * 1000 - l_start * 1000) total_number = total_number + 1 else: print("unsupport batch size {}".format(args.batch_size)) else: raise ValueError("not implemented {} request".format(args.request)) end = time.time() if latency_flags: return [[end - start], latency_list, [total_number]] else: return [[end - start]]
def run(args): client = Client() client.load_client_config( os.path.join(args.serving_client_path, "serving_client_conf.prototxt")) client.connect([args.serving_ip_port]) seq = Sequential([ File2Image(), RGB2BGR(), Div(255), Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5], False), Transpose((2, 0, 1)) ]) img = seq(args.image_path) fetch_map = client.predict( feed={"x": img}, fetch=["save_infer_model/scale_0.tmp_1"]) result = fetch_map["save_infer_model/scale_0.tmp_1"] color_img = get_pseudo_color_map(result[0]) color_img.save("./result.png") print("The segmentation image is saved in ./result.png")
''' #client.set_http_proto(True) client.connect(["127.0.0.1:9696"]) label_dict = {} label_idx = 0 with open("imagenet.label") as fin: for line in fin: label_dict[label_idx] = line.strip() label_idx += 1 seq = Sequential([ URL2Image(), Resize(256), CenterCrop(224), RGB2BGR(), Transpose((2, 0, 1)), Div(255), Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225], True) ]) start = time.time() image_file = "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg" for i in range(10): img = seq(image_file) fetch_map = client.predict(feed={"image": img}, fetch=["score"], batch=False) print(fetch_map) end = time.time()
from paddle_serving_app.reader import CenterCrop, RGB2BGR, Transpose, Div, Normalize import time client = Client() client.load_client_config(sys.argv[1]) client.connect(["127.0.0.1:9696"]) label_dict = {} label_idx = 0 with open("imagenet.label") as fin: for line in fin: label_dict[label_idx] = line.strip() label_idx += 1 seq = Sequential([ URL2Image(), Resize(256), CenterCrop(224), RGB2BGR(), Transpose((2, 0, 1)), Div(255), Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225], True) ]) start = time.time() image_file = "https://paddle-serving.bj.bcebos.com/imagenet-example/daisy.jpg" for i in range(10): img = seq(image_file) fetch_map = client.predict( feed={"image": img}, fetch=["score"], batch=False) prob = max(fetch_map["score"][0]) label = label_dict[fetch_map["score"][0].tolist().index(prob)].strip( ).replace(",", "") print("prediction: {}, probability: {}".format(label, prob)) end = time.time()