Example #1
0
    def get_batch(self):
        cur_from = self.cur
        cur_to = min(cur_from + self.batch_size, self.size)
        roidb = [self.roidb[self.index[i]] for i in range(cur_from, cur_to)]
        if self.mode == 'test':
            self.data, self.label = minibatch.get_minibatch(roidb, self.num_classes, self.mode)
        else:
            work_load_list = self.work_load_list
            ctx = self.ctx
            if work_load_list is None:
                work_load_list = [1] * len(ctx)
            assert isinstance(work_load_list, list) and len(work_load_list) == len(ctx), \
                "Invalid settings for work load. "
            slices = _split_input_slice(self.batch_size, work_load_list)

            data_list = []
            label_list = []
            for islice in slices:
                iroidb = [roidb[i] for i in range(islice.start, islice.stop)]
                data, label = minibatch.get_minibatch(iroidb, self.num_classes, self.mode)
                data_list.append(data)
                label_list.append(label)

            # pad data first and then assign anchor (read label)
            data_tensor = tensor_vstack([batch['data'] for batch in data_list])
            for data, data_pad in zip(data_list, data_tensor):
                data['data'] = data_pad[np.newaxis, :]

            new_label_list = []
            for data, label in zip(data_list, label_list):
                # infer label shape
                data_shape = {k: v.shape for k, v in data.items()}
                del data_shape['im_info']
                _, feat_shape, _ = self.feat_sym.infer_shape(**data_shape)
                feat_shape = [int(i) for i in feat_shape[0]]

                # assign anchor for label
                label = minibatch.assign_anchor(feat_shape, label['gt_boxes'], data['im_info'],
                                                self.feat_stride, self.anchor_scales,
                                                self.anchor_ratios, self.allowed_border)
                del data['im_info']
                new_label_list.append(label)

            all_data = dict()
            for key in ['data']:
                all_data[key] = tensor_vstack([batch[key] for batch in data_list])

            all_label = dict()
            all_label['label'] = tensor_vstack([batch['label'] for batch in new_label_list], pad=-1)
            for key in ['bbox_target', 'bbox_inside_weight', 'bbox_outside_weight']:
                all_label[key] = tensor_vstack([batch[key] for batch in new_label_list])

            self.data = [mx.nd.array(all_data['data'])]

            self.label = [mx.nd.array(all_label['label']),
                          mx.nd.array(all_label['bbox_target']),
                          mx.nd.array(all_label['bbox_inside_weight']),
                          mx.nd.array(all_label['bbox_outside_weight'])]
Example #2
0
	def _get_next_minibatch(self):
		if cfg.TRAIN.USE_PREFETCH:
			return self._blob_queue.get()
		else:
			db_inds = self._get_next_minibatch_inds()
			minibatch_db = [self._roidb[i] for i in db_inds]
			return get_minibatch(minibatch_db, self._num_classes)
Example #3
0
 def run(self):
     print 'BlobFetcher started'
     while True:
         db_inds = self._get_next_minibatch_inds()
         minibatch_db = [self._roidb[i] for i in db_inds]
         blobs = get_minibatch(minibatch_db, self._num_classes)
         self._queue.put(blobs)
Example #4
0
 def __getitem__(self, index_tuple):
     index, ratio = index_tuple
     roidb = self._roidb[index]
     roidb = self.data_augmenter(roidb)
     single_db = [roidb]
     # for one image:
     # blobs: {'data': (ndarray)1 x c x h x w, 'im_info': (ndarray)1 x 3,
     #         'bboxes': (ndarray)1 x num_boxes x 4, 'gt_classes': (ndarray)1 x num_boxes}
     blobs = get_minibatch(single_db)
     # squeeze batch dim
     # blobs: {'data': (ndarray)c x h x w, 'im_info': (ndarray)3,
     #         'bboxes': (ndarray)num_boxes x 4, 'gt_classes': (ndarray)num_boxes}
     for key in blobs:
         blobs[key] = blobs[key].squeeze(axis=0)
     # if self._roidb[index]['need_crop']:
     #     self.crop_data(blobs, ratio)
     #     # check bounding box
     #     boxes = blobs['bboxes']
     #     invalid = (boxes[:, 0] == boxes[:, 2]) | (boxes[:, 1] == boxes[:, 3])
     #     valid_inds = np.nonzero(~invalid)[0]
     #     if len(valid_inds) == 0:  # for debug
     #         print(index, 'index')
     #         print(self._roidb[index], 'roidb for this index')
     #         print(boxes, 'boxes')
     #     if len(valid_inds) < len(boxes):
     #         for key in ['bboxes', 'gt_classes']:
     #             if key in blobs:
     #                 blobs[key] = blobs[key][valid_inds]
     return blobs
Example #5
0
    def get_batch(self):
        cur_from = self.cur
        cur_to = min(cur_from + self.batch_size, self.size)
        #print cur_from,cur_to,self.index[cur_from:cur_to]
        imdb = []
        for i in range(cur_from, cur_to):
            idx = self.index[i]
            imdb_ = dict()
            is_flip = False
            if idx >= self.image_num:
                imdb_['flipped'] = True
                is_flip = True
                idx = idx - self.image_num
            else:
                imdb_['flipped'] = False

            annotation = self.imdb[idx].strip().split(' ')
            imdb_['image'] = annotation[0]
            #print(imdb_['image'])
            label = int(annotation[1])
            imdb_['label'] = label

            imdb.append(imdb_)

        data, label = minibatch.get_minibatch(imdb, self.thread_num, self.mode)
        self.data = [mx.nd.array(data['data'])]
        self.label = [mx.nd.array(label[name]) for name in self.label_names]
Example #6
0
    def _get_train_batch(self):
        """
        utilize minibatch sampling, e.g. 2 images and 64 rois per image
        :return: training batch (e.g. 128 samples)
        """
        work_load_list = self.work_load_list
        ctx = self.ctx
        if work_load_list is None:
            work_load_list = [1] * len(ctx)
        assert isinstance(work_load_list, list) and len(work_load_list) == len(ctx), \
            "Invalid settings for work load. "
        slices = _split_input_slice(self.batch_size, work_load_list)

        cur_from = self.cur
        cur_to = cur_from + self.batch_size
        if cur_to <= self.size:
            roidb = [self.roidb[i] for i in range(cur_from, cur_to)]
        else:
            pad = cur_to - self.size
            roidb = self.roidb[cur_from:] + self.roidb[:pad]

        batch_list = []
        for islice in slices:
            num_im = islice.stop - islice.start
            iroidb = [roidb[i] for i in range(islice.start, islice.stop)]
            batch = minibatch.get_minibatch(iroidb, self.num_classes, self.ctx)
            batch_list.append(batch)

        all_batch = dict()
        for key in batch_list[0].keys():
            all_batch[key] = tensor_vstack([batch[key] for batch in batch_list])

        return all_batch
Example #7
0
 def get_batch(self):
     cur_from = self.cur
     cur_to = min(cur_from + self.batch_size, self.size)
     imdb = [self.imdb[self.index[i]] for i in range(cur_from, cur_to)]
     data, label = minibatch.get_minibatch(imdb, self.num_classes, self.im_size)
     self.data = data['data']
     self.label = [label[name] for name in self.label_names]
Example #8
0
 def get_batch(self):
     cur_from = self.cur
     cur_to = min(cur_from + self.batch_size, self.size)
     imdb = [self.imdb[self.index[i]] for i in range(cur_from, cur_to)]
     data, label = minibatch.get_minibatch(imdb, self.num_classes, self.im_size)
     self.data = data['data']
     self.label = [label[name] for name in self.label_names]
Example #9
0
    def get_batch(self):
        cur_from = self.cur
        cur_to = min(cur_from + self.batch_size, self.size)
        roidb = [self.roidb[self.index[i]] for i in range(cur_from, cur_to)]
        if self.mode == 'test':
            self.data, self.label = minibatch.get_minibatch(
                roidb, self.num_classes, self.mode)
        else:
            work_load_list = self.work_load_list
            ctx = self.ctx
            if work_load_list is None:
                work_load_list = [1] * len(ctx)
            assert isinstance(work_load_list, list) and len(work_load_list) == len(ctx), \
                "Invalid settings for work load. "
            slices = _split_input_slice(self.batch_size, work_load_list)

            data_list = []
            label_list = []
            for islice in slices:
                iroidb = [roidb[i] for i in range(islice.start, islice.stop)]
                data, label = minibatch.get_minibatch(iroidb, self.num_classes,
                                                      self.mode)
                data_list.append(data)
                label_list.append(label)

            all_data = dict()
            for key in data_list[0].keys():
                all_data[key] = tensor_vstack(
                    [batch[key] for batch in data_list])

            all_label = dict()
            for key in label_list[0].keys():
                all_label[key] = tensor_vstack(
                    [batch[key] for batch in label_list])

            self.data = [
                mx.nd.array(all_data['data']),
                mx.nd.array(all_data['rois'])
            ]
            self.label = [
                mx.nd.array(all_label['label']),
                mx.nd.array(all_label['bbox_target']),
                mx.nd.array(all_label['bbox_inside_weight']),
                mx.nd.array(all_label['bbox_outside_weight'])
            ]
Example #10
0
    def _get_next_minibatch(self):
        """Return the blobs to be used for the next minibatch.

        If cfg.TRAIN.USE_PREFETCH is True, then blobs will be computed in a
        separate process and made available through self._blob_queue.
        """
        db_inds = self._get_next_minibatch_inds()
        minibatch_db = [self._roidb[i] for i in db_inds]
        return get_minibatch(minibatch_db, self._num_classes)
Example #11
0
 def _get_train_batch(self):
     """
     utilize minibatch sampling, e.g. 2 images and 64 rois per image
     :return: training batch (e.g. 128 samples)
     """
     cur_from = self.cur
     cur_to = min(cur_from + self.batch_size, self.size)
     roidb = [self.roidb[i] for i in range(cur_from, cur_to)]
     batch = minibatch.get_minibatch(roidb, self.num_classes)
     return batch
Example #12
0
 def _get_train_batch(self):
     """
     utilize minibatch sampling, e.g. 2 images and 64 rois per image
     :return: training batch (e.g. 128 samples)
     """
     cur_from = self.cur
     cur_to = min(cur_from + self.batch_size, self.size)
     roidb = [self.roidb[i] for i in range(cur_from, cur_to)]
     batch = minibatch.get_minibatch(roidb, self.num_classes)
     return batch
Example #13
0
    def get_batch(self):
        cur_from = self.cur
        cur_to = min(cur_from + self.batch_size, self.size)
        #print cur_from,cur_to,self.index[cur_from:cur_to]
        imdb = []
        for i in range(cur_from,cur_to):
            idx = self.index[i]
            imdb_ = dict()
            is_flip = False
            if idx >= self.image_num:
                imdb_['flipped'] = True
                is_flip = True
                idx = idx - self.image_num
            else:
                imdb_['flipped'] = False
				
            annotation = self.imdb[idx].strip().split(' ')
            imdb_['image'] = annotation[0]+'.jpg'
            #print(imdb_['image'])
            label = int(annotation[1])
            if self.with_type:
                imdb_['type_label'] = int(label)
               
            if label == 1: #pos
                if self.with_cls:
                    imdb_['label'] = 1
                if self.with_bbox:
                    bbox_target = np.array(annotation[2:],dtype=np.float32)
                    if is_flip:
                        bbox_target[0], bbox_target[2] = -bbox_target[2], -bbox_target[0]
                    imdb_['bbox_target'] = bbox_target
            elif label == 0:              #neg
                if self.with_cls:
                    imdb_['label'] = 0
                if self.with_bbox:
                    imdb_['bbox_target'] = np.zeros((4,))
            elif label == -1:
                if self.with_cls:
                    imdb_['label'] = -1
                if self.with_bbox:
                    bbox_target = np.array(annotation[2:],dtype=np.float32)
                    if is_flip:
                        bbox_target[0], bbox_target[2] = -bbox_target[2], -bbox_target[0]
                    imdb_['bbox_target'] = bbox_target
            elif label == -2:             #landmark
                if self.with_cls:
                    imdb_['label'] = -1
                if self.with_bbox:
                    imdb_['bbox_target'] = np.zeros((4,))

            imdb.append(imdb_)
        
        data, label = minibatch.get_minibatch(imdb, self.num_classes, self.im_size, self.with_type, self.with_cls, self.with_bbox, self.thread_num)
        self.data = [mx.nd.array(data['data'])]
        self.label = [mx.nd.array(label[name]) for name in self.label_names]
Example #14
0
 def _get_train_batch(self):
     """
     utilize minibatch sampling, e.g. 2 images and 64 rois per image
     :return: training batch (e.g. 128 samples)
     """
     cur_from = self.cur
     cur_to = cur_from + self.batch_size
     if cur_to <= self.size:
         roidb = [self.roidb[i] for i in range(cur_from, cur_to)]
     else:
         pad = cur_to - self.size
         roidb = self.roidb[cur_from:] + self.roidb[:pad]
     batch = minibatch.get_minibatch(roidb, self.num_classes, self.ctx, self.work_load_list)
     return batch
Example #15
0
File: loader.py Project: 4ker/mxnet
    def get_batch(self):
        cur_from = self.cur
        cur_to = min(cur_from + self.batch_size, self.size)
        roidb = [self.roidb[self.index[i]] for i in range(cur_from, cur_to)]
        if self.mode == 'test':
            self.data, self.label = minibatch.get_minibatch(roidb, self.num_classes, self.mode)
        else:
            work_load_list = self.work_load_list
            ctx = self.ctx
            if work_load_list is None:
                work_load_list = [1] * len(ctx)
            assert isinstance(work_load_list, list) and len(work_load_list) == len(ctx), \
                "Invalid settings for work load. "
            slices = _split_input_slice(self.batch_size, work_load_list)

            data_list = []
            label_list = []
            for islice in slices:
                iroidb = [roidb[i] for i in range(islice.start, islice.stop)]
                data, label = minibatch.get_minibatch(iroidb, self.num_classes, self.mode)
                data_list.append(data)
                label_list.append(label)

            all_data = dict()
            for key in data_list[0].keys():
                all_data[key] = tensor_vstack([batch[key] for batch in data_list])

            all_label = dict()
            for key in label_list[0].keys():
                all_label[key] = tensor_vstack([batch[key] for batch in label_list])

            self.data = [mx.nd.array(all_data['data']),
                         mx.nd.array(all_data['rois'])]
            self.label = [mx.nd.array(all_label['label']),
                          mx.nd.array(all_label['bbox_target']),
                          mx.nd.array(all_label['bbox_inside_weight']),
                          mx.nd.array(all_label['bbox_outside_weight'])]
Example #16
0
    def _get_next_minibatch(self):
        """Return the blobs to be used for the next minibatch.

        Blobs will be computed in a separate process and made available through
        self._blob_queue.
        """
        db_inds = self._get_next_minibatch_inds()
        minibatch_db = [self._db[i] for i in db_inds]
        return get_minibatch(
            minibatch_db,
            self._is_training,
            self._num_classes,
            self._transformer,
            self._input_name,
            self._image_dims,
            self._crop_dims,
            self._make_full_label_blob,
        )
Example #17
0
 def _get_next_minibatch(self):
     """Return the blobs to be used for the next minibatch."""
     db_inds = self._get_next_minibatch_inds()
     minibatch_db = [self._roidb[i] for i in db_inds]
     return get_minibatch(minibatch_db, self._num_classes)
Example #18
0
 def _get_next_minibatch(self):
     """Return the blobs to be used for the next minibatch.
     """
     db_inds = self._get_next_minibatch_inds()
     minibatch_db = [self._roidb[i] for i in db_inds]
     return get_minibatch(minibatch_db, self._num_classes)
Example #19
0
    def get_batch(self):
        cur_from = self.cur
        cur_to = min(cur_from + self.batch_size, self.size)
        roidb = [self.roidb[self.index[i]] for i in range(cur_from, cur_to)]
        if self.mode == 'test':
            self.data, self.label = minibatch.get_minibatch(
                roidb, self.num_classes, self.mode)
        else:
            work_load_list = self.work_load_list
            ctx = self.ctx
            if work_load_list is None:
                work_load_list = [1] * len(ctx)
            assert isinstance(work_load_list, list) and len(work_load_list) == len(ctx), \
                "Invalid settings for work load. "
            slices = _split_input_slice(self.batch_size, work_load_list)

            data_list = []
            label_list = []
            for islice in slices:
                iroidb = [roidb[i] for i in range(islice.start, islice.stop)]
                data, label = minibatch.get_minibatch(iroidb, self.num_classes,
                                                      self.mode)
                data_list.append(data)
                label_list.append(label)

            # pad data first and then assign anchor (read label)
            data_tensor = tensor_vstack([batch['data'] for batch in data_list])
            for data, data_pad in zip(data_list, data_tensor):
                data['data'] = data_pad[np.newaxis, :]

            new_label_list = []
            for data, label in zip(data_list, label_list):
                # infer label shape
                data_shape = {k: v.shape for k, v in data.items()}
                del data_shape['im_info']
                _, feat_shape, _ = self.feat_sym.infer_shape(**data_shape)
                feat_shape = [int(i) for i in feat_shape[0]]

                # assign anchor for label
                label = minibatch.assign_anchor(feat_shape, label['gt_boxes'],
                                                data['im_info'],
                                                self.feat_stride,
                                                self.anchor_scales,
                                                self.anchor_ratios,
                                                self.allowed_border)
                del data['im_info']
                new_label_list.append(label)

            all_data = dict()
            for key in ['data']:
                all_data[key] = tensor_vstack(
                    [batch[key] for batch in data_list])

            all_label = dict()
            all_label['label'] = tensor_vstack(
                [batch['label'] for batch in new_label_list], pad=-1)
            for key in [
                    'bbox_target', 'bbox_inside_weight', 'bbox_outside_weight'
            ]:
                all_label[key] = tensor_vstack(
                    [batch[key] for batch in new_label_list])

            self.data = [mx.nd.array(all_data['data'])]

            self.label = [
                mx.nd.array(all_label['label']),
                mx.nd.array(all_label['bbox_target']),
                mx.nd.array(all_label['bbox_inside_weight']),
                mx.nd.array(all_label['bbox_outside_weight'])
            ]
Example #20
0
 def _get_next_minibatch(self):
     minibatch_indexes = self._get_next_minibatch_index()
     minibatch_db = [self._imdb[i] for i in minibatch_indexes]
     return get_minibatch(minibatch_db)
Example #21
0
    def train_model(self, cfg, sess, max_iters):
        """Network traininig loop."""
        n_steps = cfg.TRAIN.N_STEPS
        m = self.m
        train_trajdb = self.train_trajdb
        train_roidb = self.train_roidb

        avg_loss = 0.0
        np.random.shuffle(perm)
        self.info("#iterations per epoch: %d" % iters_per_epoch)

        epoch = 0
        for iter in range(max_iters):
            start_time = time.time()

            # Set learning rate
            sess.run(tf.assign(self.lr, cfg.TRAIN.INITIAL_LR))

            # Prepare training batch
            trajdb = train_trajdb[idx]

            # Find corresponding roidb
            video_id = trajdb[0]['video_id']
            roidb = train_roidb[video_id]

            # Get batch data
            batch_data = get_minibatch(cfg, trajdb, roidb,
                                        dtype='conv5', random=True)
            predict_mask = np.ones(n_steps)
            predict_mask[0] = 0
            
            feed_dict = {m.im_inputs: batch_data['im_inputs'],
                        m.roi_inputs: batch_data['roi_inputs'],
                        m.score_targets: batch_data['score_targets'],
                        m.predict_mask: predict_mask,
                        m.keep_prob: 1.0}

            # Run training
            loss, score_preds, final_state, _ =\
            sess.run([m.loss, m.score_preds, m.final_state, self.train_op],
                        feed_dict=feed_dict)

            avg_loss += loss
            time_cost = time.time() - start_time

            self.info('iter: %d / %d, train_loss: %.4f, lr: %f, time: %.1f' %\
                    (iter+1, max_iters, loss, self.lr.eval(), time_cost))

            # Debug
            score_targets = feed_dict[m.score_targets]
            msg = '\n'
            for i in range(score_preds.shape[1]):
                row = ''
                for t in range(score_preds.shape[0]):
                    row += '(%.3f, %.3f)'%(score_targets[t,i], score_preds[t,i])
                msg += row + '\n'
            print(msg)

            # Finish an epoch
            if iter_in_epoch == iters_per_epoch - 1:
                avg_loss /= iters_per_epoch
                self.info('Epoch: %d , avg_loss: %.4f' %\
                            (epoch+1, avg_loss))

                self.saver.save(sess, self.ckpt_path)
                self.info("Save checkpoint.")

                np.random.shuffle(perm)
                train_err = 0.0
                epoch += 1
                avg_loss = 0