Exemple #1
0
 def net_inference(self, msgs):
     assert isinstance(msgs, pb.ForwardMsgs)
     start = time.time()
     start_forward = time.time()
     assert len(msgs.msgs) == 1
     arr = cPickle.loads(msgs.msgs[0].network_input_buf)
     data = [[mx.nd.array(arr['im_array']), mx.nd.array(arr['im_info'])]]
     data_batch = mx.io.DataBatch(data=data,
                                  label=[None],
                                  provide_data=arr['data_shapes'],
                                  provide_label=[None])
     with self.lock:
         # https://github.com/ataraxialab/Deformable-ConvNets/blob/master/rfcn/core/tester.py#L124
         scores, boxes, _ = im_detect(self.predictor, data_batch,
                                      ['data', 'im_info'], arr['im_scale'],
                                      config)
     end_forward = time.time()
     msgs_out = []
     for _ in range(len(msgs.msgs)):
         output = {
             'scores': scores[0],
             'boxes': boxes[0],
         }
         msgs_out.append(
             pb.ForwardMsg(network_output_buf=cPickle.dumps(
                 output, protocol=cPickle.HIGHEST_PROTOCOL)))
     log.info('{} use time {}, forward time {}, batch_size: {}/{}'.format(
         self.app_name,
         time.time() - start, end_forward - start_forward, len(msgs.msgs),
         self.batch_size))
     return pb.ForwardMsgs(msgs=msgs_out)
Exemple #2
0
    def net_inference(self, msgs):
        assert isinstance(msgs, pb.ForwardMsgs)
        start = time.time()
        img_batch = mx.nd.array(
            np.zeros((self.batch_size, 3, self.width, self.height)))
        for index, msg in enumerate(msgs.msgs):
            arr = np.frombuffer(msg.network_input_buf,
                                dtype=np.float32).reshape(
                                    (3, self.width, self.height))
            img_batch[index] = arr
        start_forward = time.time()

        with self.lock:
            self.mod.forward(Batch([img_batch]))
            output_batch = self.mod.get_outputs()[0].asnumpy()
        end_forward = time.time()
        msgs_out = []
        for i in range(len(msgs.msgs)):
            msgs_out.append(
                pb.ForwardMsg(network_output_buf=cPickle.dumps(
                    output_batch[i], protocol=cPickle.HIGHEST_PROTOCOL)))
        log.info('{} use time {}, forward time {}, batch_size: {}/{}'.format(
            self.app_name,
            time.time() - start, end_forward - start_forward, len(msgs.msgs),
            self.batch_size))
        return pb.ForwardMsgs(msgs=msgs_out)
Exemple #3
0
 def net_inference(self, msgs):  # pylint: disable=no-self-use
     assert isinstance(msgs, pb.ForwardMsgs)
     msgs_out = []
     for i in msgs:
         msg_out = pb.ForwardMsg()
         msg_out.network_output_buf = msgs[i].network_input_buf
         msgs_out.append(msg_out)
     return pb.ForwardMsgs(msgs=msgs_out)
Exemple #4
0
 def inference_msgs(self, msgs):  # pylint: disable=no-self-use
     assert isinstance(msgs, pb.ForwardMsgs)
     start = time.time()
     self.inference_req.send(msgs.SerializeToString())
     buf = self.inference_req.recv()
     self.monitor_push.send(
         pb.MonitorMetric(kind="forward_time",
                          pid=str(self.pid),
                          value=time.time() - start).SerializeToString())
     msgs_out = pb.ForwardMsgs()
     msgs_out.ParseFromString(buf)
     assert isinstance(msgs_out, pb.ForwardMsgs)
     return msgs_out
Exemple #5
0
    def net_inference(self, msgs):
        assert isinstance(msgs, pb.ForwardMsgs)
        start = time.time()
        start_forward = time.time()

        for index, msg in enumerate(msgs.msgs):
            r = cPickle.loads(msg.network_input_buf)
            img_cls = r['img_cls']
            assert img_cls.shape == (3, 225, 225)
            img_det = r['img_det']
            assert img_det.shape == (3, 320, 320)

            self.net_fine.blobs['data'].data[index] = img_cls
            self.net_coarse.blobs['data'].data[index] = img_cls
            self.net_det.blobs['data'].data[index] = img_det
        with self.lock:
            output_fine = self.net_fine.forward()
            output_coarse = self.net_coarse.forward()
            output_det = self.net_det.forward()
        assert output_fine['prob'].shape[1:] == (48, 1, 1)
        # shape 第一维是 batch_size,第二维度是48类
        assert output_coarse['prob'].shape[1:] == (7, 1, 1)
        # shape 第一维是 batch_size,第二维度是7类
        assert output_det['detection_out'].shape[1] == 1
        assert output_det['detection_out'].shape[3] == 7
        # shape 第一维是 batch_size,第三维度是检测到的物体数目,第四维度是类别
        end_forward = time.time()
        buf = cPickle.dumps(
            {
                'output_fine': output_fine,
                'output_coarse': output_coarse,
                'output_det': output_det
            },
            protocol=cPickle.HIGHEST_PROTOCOL)
        msgs_out = []
        for i in range(len(msgs.msgs)):
            msgs_out.append(
                pb.ForwardMsg(network_output_buf=buf,
                              meta={
                                  "data":
                                  json.dumps({
                                      'image_index': i
                                  }).encode('utf8')
                              }))
        log.info('{} use time {}, forward time {}, batch_size: {}/{}'.format(
            self.app_name,
            time.time() - start, end_forward - start_forward, len(msgs.msgs),
            self.batch_size))
        return pb.ForwardMsgs(msgs=msgs_out)
Exemple #6
0
    def serve(self):
        max_batch_size = self.batch_size

        log.info('run forward max_batch_size:%s', max_batch_size)
        network_in_context = zmq.Context()
        network_in = network_in_context.socket(zmq.PULL)
        network_in.connect(const.FORWARD_IN)

        network_out_context = zmq.Context()
        network_out = network_out_context.socket(zmq.PUSH)
        network_out.connect(const.FORWARD_OUT)
        inputs = []
        self.monitor_push.send(
            pb.MonitorMetric(
                kind="forward_started_success",
                pid=str(self.pid)).SerializeToString())
        while True:

            def process(buf):
                msg = pb.ForwardMsg()
                msg.ParseFromString(buf)
                inputs.append(msg)

            buf = network_in.recv()
            process(buf)
            while len(inputs) < max_batch_size:
                try:
                    buf = network_in.recv(zmq.NOBLOCK)
                    process(buf)
                except Again:
                    break
            if not inputs:
                continue
            outputs = self.net_inference_wrap(
                pb.ForwardMsgs(msgs=inputs[:max_batch_size]))
            network_out.send(outputs.SerializeToString())
            inputs = inputs[max_batch_size:]
Exemple #7
0
 def inference_msg(self, msg):
     assert isinstance(msg, pb.ForwardMsg)
     r = self.inference_msgs(pb.ForwardMsgs(msgs=[msg])).msgs[0]
     assert isinstance(r, pb.ForwardMsg)
     return r