def net_inference(self, request): assert isinstance(request, pb.InferenceRequest) img = load_imagev2(request.data.body) assert img.ndim == 3 # TODO img_height, img_width, _ = img.shape img_cls = cls_preProcessImage(img) img_det = det_preProcessImage(img) forward_req = {'img_cls': img_cls, 'img_det': img_det} msg = pb.ForwardMsg(network_input_buf=cPickle.dumps( forward_req, protocol=cPickle.HIGHEST_PROTOCOL), reqid=request.reqid) msg_out = self.inference_req.inference_msg(msg) output = cPickle.loads(msg_out.network_output_buf) image_index = json.loads( msg_out.meta['data'].decode('utf8'))['image_index'] cls_result = cls_post_eval(output['output_fine'], output['output_coarse'], image_index, self.cls_model) det_result = det_post_eval(img_height, img_width, output['output_det'], self.det_label_dict, image_index) cls_result = cls_merge_det(cls_result, det_result, self.cls_model, self.det_model) cls_result = merge_confidences(cls_result, self.cls_model) resp = postProcess(cls_result, det_result, self.det_model) return pb.InferenceResponse(code=200, result=json.dumps(resp))
def _load_image(body, width, height, mean_value, std_value): img = load_imagev2(body) img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) img = img.astype(np.float32) img = cv2.resize(img, (width, height)) img -= mean_value img /= std_value img = np.swapaxes(img, 0, 2) img = np.swapaxes(img, 1, 2) return img
def net_inference(self, request): classes_dict = self.labels['class'] threshold_dict = self.labels['minMt'] # minModelThreshold assert isinstance(request, pb.InferenceRequest) img = load_imagev2(request.data.body) assert img.ndim == 3 # TODO nms = py_nms_wrapper(config.TEST.NMS) if img.shape[0] > img.shape[1]: long_side, short_side = img.shape[0], img.shape[1] else: long_side, short_side = img.shape[1], img.shape[0] if short_side > 0 and float(long_side) / float(short_side) > 50.0: raise ErrorBase( 400, 'aspect ration is too large, long_size:short_side should not larger than 50.0' ) batch = generate_batch(img) msg = pb.ForwardMsg() msg.network_input_buf = cPickle.dumps( batch, protocol=cPickle.HIGHEST_PROTOCOL) msg_out = self.inference_req.inference_msg(msg) scores = [] boxes = [] r = cPickle.loads(msg_out.network_output_buf) scores.append(r['scores']) boxes.append(r['boxes']) det_ret = [] for cls_index in sorted(classes_dict.keys()): cls_ind = cls_index cls_name = classes_dict.get(cls_ind) cls_boxes = boxes[0][:, 4:8] if config.CLASS_AGNOSTIC else boxes[ 0][:, 4 * cls_ind:4 * 4 * (cls_ind + 1)] cls_scores = scores[0][:, cls_ind, np.newaxis] threshold = float(threshold_dict[cls_ind]) keep = np.where(cls_scores > threshold)[0] dets = np.hstack( (cls_boxes, cls_scores)).astype(np.float32)[keep, :] keep = nms(dets) det_ret.extend( _build_result(det, cls_name, cls_ind, self.labels) for det in dets[keep, :]) return pb.InferenceResponse(code=200, result=json.dumps({'detections': det_ret}))