Exemple #1
0
 def __init__(self, app_name, cfg):
     super(InferenceServer, self).__init__(app_name)
     self.inference_req = InferenceReq()
     self.classes = aisdk.common.other.make_synset(
         cfg['model_files']['labels.csv'])
     label_file = cfg['model_files']['labels.csv']
     self.labels = parse_label_file(label_file)
     yaml_file = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                              'resnet.yaml')
     update_config(yaml_file)
Exemple #2
0
    def __init__(self, app_name, cfg):
        super(InferenceServer, self).__init__(app_name)
        self.inference_req = InferenceReq()

        percent_fine = float(cfg['custom_params']['percentage_fine'])

        fine_labels = str(cfg['model_files']["fine_labels.csv"])
        dict_label = parse_label_file(fine_labels)

        det_labelfile = str(cfg['model_files']["det_labels.csv"])
        det_label_dict = parse_label_file(det_labelfile)

        self.cls_model = {
            "percentage_fine": percent_fine,
            "dict_label": dict_label,
        }
        self.det_model = {'label': det_label_dict}
        self.det_label_dict = det_label_dict
Exemple #3
0
class InferenceServer(BaseInferenceServer):
    def __init__(self, app_name, cfg):
        super(InferenceServer, self).__init__(app_name)
        self.inference_req = InferenceReq()

        percent_fine = float(cfg['custom_params']['percentage_fine'])
        percent_coarse = float(cfg['custom_params']['percentage_coarse'])

        fine_labels = str(cfg['model_files']["fine_labels.csv"])
        dict_label = parse_label_file(fine_labels)

        coarse_labels = str(cfg['model_files']['coarse_labels.csv'])
        label_map_dict = get_label_map(coarse_labels)

        det_labelfile = str(cfg['model_files']["det_labels.csv"])
        det_label_dict = parse_label_file(det_labelfile)

        self.cls_model = {
            "percentage_fine": percent_fine,
            "percentage_coarse": percent_coarse,
            "dict_label": dict_label,
            "label_map_dict": label_map_dict
        }
        self.det_model = {'label': det_label_dict}
        self.det_label_dict = det_label_dict

    def net_inference(self, request):
        assert isinstance(request, pb.InferenceRequest)
        img = load_imagev2(request.data.body)
        assert img.ndim == 3  # TODO
        img_height, img_width, _ = img.shape
        img_cls = cls_preProcessImage(img)
        img_det = det_preProcessImage(img)
        forward_req = {'img_cls': img_cls, 'img_det': img_det}
        msg = pb.ForwardMsg(network_input_buf=cPickle.dumps(
            forward_req, protocol=cPickle.HIGHEST_PROTOCOL),
                            reqid=request.reqid)
        msg_out = self.inference_req.inference_msg(msg)
        output = cPickle.loads(msg_out.network_output_buf)
        image_index = json.loads(
            msg_out.meta['data'].decode('utf8'))['image_index']

        cls_result = cls_post_eval(output['output_fine'],
                                   output['output_coarse'], image_index,
                                   self.cls_model)
        det_result = det_post_eval(img_height, img_width, output['output_det'],
                                   self.det_label_dict, image_index)

        cls_result = cls_merge_det(cls_result, det_result, self.cls_model,
                                   self.det_model)
        cls_result = merge_confidences(cls_result, self.cls_model)

        resp = postProcess(cls_result, det_result, self.det_model)
        return pb.InferenceResponse(code=200, result=json.dumps(resp))
Exemple #4
0
    def __init__(self, app_name, cfg):
        super(InferenceServer, self).__init__(app_name)
        net = aisdk.common.mxnet_base.net
        conf = net.NetConfig()
        conf.parse(cfg)
        label_file = conf.file_synset
        inference_req = InferenceReq()

        self.net = net
        self.labels = net.load_labels(label_file)
        self.image_width = conf.image_width
        self.image_height = conf.image_height
        self.mean_value = conf.value_mean
        self.std_value = conf.value_std
        self.inference_req = inference_req
Exemple #5
0
class InferenceServer(BaseInferenceServer):
    def __init__(self, app_name, cfg):
        super(InferenceServer, self).__init__(app_name)
        self.inference_req = InferenceReq()
        self.classes = aisdk.common.other.make_synset(
            cfg['model_files']['labels.csv'])
        label_file = cfg['model_files']['labels.csv']
        self.labels = parse_label_file(label_file)
        yaml_file = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                 'resnet.yaml')
        update_config(yaml_file)

    def net_inference(self, request):
        classes_dict = self.labels['class']
        threshold_dict = self.labels['minMt']  # minModelThreshold
        assert isinstance(request, pb.InferenceRequest)

        img = load_imagev2(request.data.body)
        assert img.ndim == 3  # TODO

        nms = py_nms_wrapper(config.TEST.NMS)

        if img.shape[0] > img.shape[1]:
            long_side, short_side = img.shape[0], img.shape[1]
        else:
            long_side, short_side = img.shape[1], img.shape[0]

        if short_side > 0 and float(long_side) / float(short_side) > 50.0:
            raise ErrorBase(
                400,
                'aspect ration is too large, long_size:short_side should not larger than 50.0'
            )

        batch = generate_batch(img)
        msg = pb.ForwardMsg()
        msg.network_input_buf = cPickle.dumps(
            batch, protocol=cPickle.HIGHEST_PROTOCOL)

        msg_out = self.inference_req.inference_msg(msg)
        scores = []
        boxes = []

        r = cPickle.loads(msg_out.network_output_buf)
        scores.append(r['scores'])
        boxes.append(r['boxes'])

        det_ret = []
        for cls_index in sorted(classes_dict.keys()):
            cls_ind = cls_index
            cls_name = classes_dict.get(cls_ind)
            cls_boxes = boxes[0][:, 4:8] if config.CLASS_AGNOSTIC else boxes[
                0][:, 4 * cls_ind:4 * 4 * (cls_ind + 1)]
            cls_scores = scores[0][:, cls_ind, np.newaxis]
            threshold = float(threshold_dict[cls_ind])
            keep = np.where(cls_scores > threshold)[0]
            dets = np.hstack(
                (cls_boxes, cls_scores)).astype(np.float32)[keep, :]
            keep = nms(dets)
            det_ret.extend(
                _build_result(det, cls_name, cls_ind, self.labels)
                for det in dets[keep, :])
        return pb.InferenceResponse(code=200,
                                    result=json.dumps({'detections': det_ret}))