Ejemplo n.º 1
0
 def net_inference_wrap(self, request):
     start = time.time()
     try:
         response = self.net_inference(request)
     except ErrorCV2ImageRead as err:
         xl.warn("net_inference", extra={'reqid': request.reqid})
         response = pb.InferenceResponse(code=err.code, message=err.message)
     except ErrorBase as err:
         xl.warn("net_inference",
                 exc_info=err,
                 extra={'reqid': request.reqid})
         response = pb.InferenceResponse(code=err.code, message=err.message)
     except schema.SchemaError as err:
         xl.warn("net_inference",
                 exc_info=err,
                 extra={'reqid': request.reqid})
         response = pb.InferenceResponse(
             code=400, message='bad api param: {}'.format(err))
     except Exception as err:  # pylint: disable=broad-except
         xl.exception("net_inference",
                      exc_info=err,
                      extra={'reqid': request.reqid})
         response = pb.InferenceResponse(
             code=599,
             message='app {} net_inference unknow error: {}'.format(
                 self.app_name, err))
     self.monitor_push.send(
         pb.MonitorMetric(kind="eval_time",
                          pid=str(self.pid),
                          code=str(response.code),
                          value=time.time() - start).SerializeToString())
     return response
Ejemplo n.º 2
0
    def net_inference(self, request):
        assert isinstance(request, pb.InferenceRequest)
        img = load_imagev2(request.data.body)
        assert img.ndim == 3  # TODO
        img_height, img_width, _ = img.shape
        img_cls = cls_preProcessImage(img)
        img_det = det_preProcessImage(img)
        forward_req = {'img_cls': img_cls, 'img_det': img_det}
        msg = pb.ForwardMsg(network_input_buf=cPickle.dumps(
            forward_req, protocol=cPickle.HIGHEST_PROTOCOL),
                            reqid=request.reqid)
        msg_out = self.inference_req.inference_msg(msg)
        output = cPickle.loads(msg_out.network_output_buf)
        image_index = json.loads(
            msg_out.meta['data'].decode('utf8'))['image_index']

        cls_result = cls_post_eval(output['output_fine'],
                                   output['output_coarse'], image_index,
                                   self.cls_model)
        det_result = det_post_eval(img_height, img_width, output['output_det'],
                                   self.det_label_dict, image_index)

        cls_result = cls_merge_det(cls_result, det_result, self.cls_model,
                                   self.det_model)
        cls_result = merge_confidences(cls_result, self.cls_model)

        resp = postProcess(cls_result, det_result, self.det_model)
        return pb.InferenceResponse(code=200, result=json.dumps(resp))
Ejemplo n.º 3
0
    def net_inference(self, request):
        # pylint: disable=too-many-locals
        assert isinstance(request, pb.InferenceRequest)

        img = _load_image(request.data.body, self.image_width,
                          self.image_height, self.mean_value, self.std_value)
        msg = pb.ForwardMsg(network_input_buf=img.tobytes(),
                            reqid=request.reqid)
        msg_out = self.inference_req.inference_msg(msg)
        output = cPickle.loads(msg_out.network_output_buf)
        return pb.InferenceResponse(code=200,
                                    result=json.dumps(
                                        _build_result(output, self.labels)))
Ejemplo n.º 4
0
    def net_inference(self, request):
        classes_dict = self.labels['class']
        threshold_dict = self.labels['minMt']  # minModelThreshold
        assert isinstance(request, pb.InferenceRequest)

        img = load_imagev2(request.data.body)
        assert img.ndim == 3  # TODO

        nms = py_nms_wrapper(config.TEST.NMS)

        if img.shape[0] > img.shape[1]:
            long_side, short_side = img.shape[0], img.shape[1]
        else:
            long_side, short_side = img.shape[1], img.shape[0]

        if short_side > 0 and float(long_side) / float(short_side) > 50.0:
            raise ErrorBase(
                400,
                'aspect ration is too large, long_size:short_side should not larger than 50.0'
            )

        batch = generate_batch(img)
        msg = pb.ForwardMsg()
        msg.network_input_buf = cPickle.dumps(
            batch, protocol=cPickle.HIGHEST_PROTOCOL)

        msg_out = self.inference_req.inference_msg(msg)
        scores = []
        boxes = []

        r = cPickle.loads(msg_out.network_output_buf)
        scores.append(r['scores'])
        boxes.append(r['boxes'])

        det_ret = []
        for cls_index in sorted(classes_dict.keys()):
            cls_ind = cls_index
            cls_name = classes_dict.get(cls_ind)
            cls_boxes = boxes[0][:, 4:8] if config.CLASS_AGNOSTIC else boxes[
                0][:, 4 * cls_ind:4 * 4 * (cls_ind + 1)]
            cls_scores = scores[0][:, cls_ind, np.newaxis]
            threshold = float(threshold_dict[cls_ind])
            keep = np.where(cls_scores > threshold)[0]
            dets = np.hstack(
                (cls_boxes, cls_scores)).astype(np.float32)[keep, :]
            keep = nms(dets)
            det_ret.extend(
                _build_result(det, cls_name, cls_ind, self.labels)
                for det in dets[keep, :])
        return pb.InferenceResponse(code=200,
                                    result=json.dumps({'detections': det_ret}))
Ejemplo n.º 5
0
 def net_inference(self, request):  # pylint: disable=no-self-use
     assert isinstance(request, pb.InferenceRequest)
     return pb.InferenceResponse(code=200,
                                 result=json.dumps({'hello': 'world'}))