コード例 #1
0
 def graph_fn():
   corners1 = tf.constant([[4.0, 3.0, 7.0, 5.0], [5.0, 6.0, 10.0, 7.0]])
   corners2 = tf.constant([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0],
                           [0.0, 0.0, 20.0, 20.0]])
   boxes1 = box_list.BoxList(corners1)
   boxes2 = box_list.BoxList(corners2)
   boxes_empty = box_list.BoxList(tf.zeros((0, 4)))
   iou_empty_1 = box_list_ops.iou(boxes1, boxes_empty)
   iou_empty_2 = box_list_ops.iou(boxes_empty, boxes2)
   iou_empty_3 = box_list_ops.iou(boxes_empty, boxes_empty)
   return iou_empty_1, iou_empty_2, iou_empty_3
コード例 #2
0
 def test_iouworks_on_empty_inputs(self):
     corners1 = tf.constant([[4.0, 3.0, 7.0, 5.0], [5.0, 6.0, 10.0, 7.0]])
     corners2 = tf.constant([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0],
                             [0.0, 0.0, 20.0, 20.0]])
     boxes1 = box_list.BoxList(corners1)
     boxes2 = box_list.BoxList(corners2)
     boxes_empty = box_list.BoxList(tf.zeros((0, 4)))
     iou_empty_1 = box_list_ops.iou(boxes1, boxes_empty)
     iou_empty_2 = box_list_ops.iou(boxes_empty, boxes2)
     iou_empty_3 = box_list_ops.iou(boxes_empty, boxes_empty)
     with self.test_session() as sess:
         iou_output_1, iou_output_2, iou_output_3 = sess.run(
             [iou_empty_1, iou_empty_2, iou_empty_3])
         self.assertAllEqual(iou_output_1.shape, (2, 0))
         self.assertAllEqual(iou_output_2.shape, (0, 3))
         self.assertAllEqual(iou_output_3.shape, (0, 0))
コード例 #3
0
def match_and_select_feature(groundtruth_boxes, anchors, feature_maps):
    """ Select features on the groundtruth box locations
    
    Args: 
        groundtruth_boxes: a tensor of batch_size x 4
        anchors: generated anchor Box list
        feature_maps: a list of feature pyramid, each element is a 
            tensor of batch_size x height_i x width_i x channel
    
    Returns:
        selected_features: a tensor of batch_size x 1 x 1 x channel

    """
    groundtruth_boxeslists = [
        box_list.BoxList(tf.expand_dims(box, 0))
        for box in tf.unstack(groundtruth_boxes, axis=0)
    ]

    feature_maps = [
        tf.reshape(m, [m.get_shape()[0].value, -1,
                       m.get_shape()[-1].value]) for m in feature_maps
    ]
    feature_maps = tf.unstack(tf.concat(feature_maps, axis=1), axis=0)
    num_anchors_per_location = anchors.get().get_shape(
    )[0].value / feature_maps[0].get_shape()[0].value
    selected_feature = list()
    for groundtruth_boxes, feature_map in zip(groundtruth_boxeslists,
                                              feature_maps):
        iou = box_list_ops.iou(groundtruth_boxes, anchors)
        max_ind = tf.argmax(iou, axis=1) / num_anchors_per_location
        selected_feature.append(tf.gather(feature_map, max_ind))
    selected_feature = tf.concat(selected_feature, axis=0)
    selected_feature = tf.expand_dims(tf.expand_dims(selected_feature, axis=1),
                                      axis=1)
    return selected_feature
コード例 #4
0
 def test_iouworks_on_empty_inputs(self):
   corners1 = tf.constant([[4.0, 3.0, 7.0, 5.0], [5.0, 6.0, 10.0, 7.0]])
   corners2 = tf.constant([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0],
                           [0.0, 0.0, 20.0, 20.0]])
   boxes1 = box_list.BoxList(corners1)
   boxes2 = box_list.BoxList(corners2)
   boxes_empty = box_list.BoxList(tf.zeros((0, 4)))
   iou_empty_1 = box_list_ops.iou(boxes1, boxes_empty)
   iou_empty_2 = box_list_ops.iou(boxes_empty, boxes2)
   iou_empty_3 = box_list_ops.iou(boxes_empty, boxes_empty)
   with self.test_session() as sess:
     iou_output_1, iou_output_2, iou_output_3 = sess.run(
         [iou_empty_1, iou_empty_2, iou_empty_3])
     self.assertAllEqual(iou_output_1.shape, (2, 0))
     self.assertAllEqual(iou_output_2.shape, (0, 3))
     self.assertAllEqual(iou_output_3.shape, (0, 0))
コード例 #5
0
 def _compare(self, boxlist1, boxlist2):
     """Compute pairwise IOU similarity between the two BoxLists.
     Args:
       boxlist1: BoxList holding N boxes.
       boxlist2: BoxList holding M boxes.
     Returns:
       A tensor with shape [N, M] representing pairwise iou scores.
     """
     return box_list_ops.iou(boxlist1, boxlist2)
コード例 #6
0
 def graph_fn():
   corners1 = tf.constant([[4.0, 3.0, 7.0, 5.0], [5.0, 6.0, 10.0, 7.0]])
   corners1 = tf.constant([[4.0, 3.0, 7.0, 5.0], [5.0, 6.0, 10.0, 7.0]])
   corners2 = tf.constant([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0],
                           [0.0, 0.0, 20.0, 20.0]])
   boxes1 = box_list.BoxList(corners1)
   boxes2 = box_list.BoxList(corners2)
   iou = box_list_ops.iou(boxes1, boxes2)
   return iou
コード例 #7
0
 def test_iou(self):
   corners1 = tf.constant([[4.0, 3.0, 7.0, 5.0], [5.0, 6.0, 10.0, 7.0]])
   corners2 = tf.constant([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0],
                           [0.0, 0.0, 20.0, 20.0]])
   exp_output = [[2.0 / 16.0, 0, 6.0 / 400.0], [1.0 / 16.0, 0.0, 5.0 / 400.0]]
   boxes1 = box_list.BoxList(corners1)
   boxes2 = box_list.BoxList(corners2)
   iou = box_list_ops.iou(boxes1, boxes2)
   with self.test_session() as sess:
     iou_output = sess.run(iou)
     self.assertAllClose(iou_output, exp_output)
コード例 #8
0
  def _compare(self, boxlist1, boxlist2):
    """Compute pairwise IOU similarity between the two BoxLists.

    Args:
      boxlist1: BoxList holding N boxes.
      boxlist2: BoxList holding M boxes.

    Returns:
      A tensor with shape [N, M] representing pairwise iou scores.
    """
    return box_list_ops.iou(boxlist1, boxlist2)
コード例 #9
0
 def graph_fn():
   boxes = box_list.BoxList(
       tf.constant([[0.1, 0.1, 0.4, 0.4],
                    [0.1, 0.1, 0.5, 0.5],
                    [0.6, 0.6, 0.8, 0.8],
                    [0.2, 0.2, 0.3, 0.3]], tf.float32))
   sampled_boxes = box_list_ops.sample_boxes_by_jittering(
       boxlist=boxes, num_boxes_to_sample=10)
   iou = box_list_ops.iou(boxes, sampled_boxes)
   iou_max = tf.reduce_max(iou, axis=0)
   return sampled_boxes.get(), iou_max
コード例 #10
0
 def test_iou(self):
   corners1 = tf.constant([[4.0, 3.0, 7.0, 5.0], [5.0, 6.0, 10.0, 7.0]])
   corners2 = tf.constant([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0],
                           [0.0, 0.0, 20.0, 20.0]])
   exp_output = [[2.0 / 16.0, 0, 6.0 / 400.0], [1.0 / 16.0, 0.0, 5.0 / 400.0]]
   boxes1 = box_list.BoxList(corners1)
   boxes2 = box_list.BoxList(corners2)
   iou = box_list_ops.iou(boxes1, boxes2)
   with self.test_session() as sess:
     iou_output = sess.run(iou)
     self.assertAllClose(iou_output, exp_output)
コード例 #11
0
 def test_sample_boxes_by_jittering(self):
   boxes = box_list.BoxList(
       tf.constant([[0.1, 0.1, 0.4, 0.4],
                    [0.1, 0.1, 0.5, 0.5],
                    [0.6, 0.6, 0.8, 0.8],
                    [0.2, 0.2, 0.3, 0.3]], tf.float32))
   sampled_boxes = box_list_ops.sample_boxes_by_jittering(
       boxlist=boxes, num_boxes_to_sample=10)
   iou = box_list_ops.iou(boxes, sampled_boxes)
   iou_max = tf.reduce_max(iou, axis=0)
   with self.test_session() as sess:
     (np_sampled_boxes, np_iou_max) = sess.run([sampled_boxes.get(), iou_max])
     self.assertAllEqual(np_sampled_boxes.shape, [10, 4])
     self.assertAllGreater(np_iou_max, 0.5)
コード例 #12
0
 def test_sample_boxes_by_jittering(self):
   boxes = box_list.BoxList(
       tf.constant([[0.1, 0.1, 0.4, 0.4],
                    [0.1, 0.1, 0.5, 0.5],
                    [0.6, 0.6, 0.8, 0.8],
                    [0.2, 0.2, 0.3, 0.3]], tf.float32))
   sampled_boxes = box_list_ops.sample_boxes_by_jittering(
       boxlist=boxes, num_boxes_to_sample=10)
   iou = box_list_ops.iou(boxes, sampled_boxes)
   iou_max = tf.reduce_max(iou, axis=0)
   with self.test_session() as sess:
     (np_sampled_boxes, np_iou_max) = sess.run([sampled_boxes.get(), iou_max])
     self.assertAllEqual(np_sampled_boxes.shape, [10, 4])
     self.assertAllGreater(np_iou_max, 0.5)
コード例 #13
0
    def _compare(self, boxlist1, boxlist2):
        """Compute pairwise IOU similarity between the two BoxLists and score.

    Args:
      boxlist1: BoxList holding N boxes. Must have a score field.
      boxlist2: BoxList holding M boxes.

    Returns:
      A tensor with shape [N, M] representing scores threholded by pairwise
      iou scores.
    """
        ious = box_list_ops.iou(boxlist1, boxlist2)
        scores = boxlist1.get_field(fields.BoxListFields.scores)
        scores = tf.expand_dims(scores, axis=1)
        row_replicated_scores = tf.tile(scores, [1, tf.shape(ious)[-1]])
        thresholded_ious = tf.where(ious > self._iou_threshold,
                                    row_replicated_scores, tf.zeros_like(ious))

        return thresholded_ious
コード例 #14
0
  def _compare(self, boxlist1, boxlist2):
    """Compute pairwise IOU similarity between the two BoxLists and score.

    Args:
      boxlist1: BoxList holding N boxes. Must have a score field.
      boxlist2: BoxList holding M boxes.

    Returns:
      A tensor with shape [N, M] representing scores threholded by pairwise
      iou scores.
    """
    ious = box_list_ops.iou(boxlist1, boxlist2)
    scores = boxlist1.get_field(fields.BoxListFields.scores)
    scores = tf.expand_dims(scores, axis=1)
    row_replicated_scores = tf.tile(scores, [1, tf.shape(ious)[-1]])
    thresholded_ious = tf.where(ious > self._iou_threshold,
                                row_replicated_scores, tf.zeros_like(ious))

    return thresholded_ious
コード例 #15
0
    def export_boxes_csv(sess, detection_graph, data_filename,
                         output_filename):
        columns = description_features + ['num_objects'] + [
            'num_detections', 'xmin', 'ymin', 'xmax', 'ymax', 'iou',
            'detected', 'label', 'difficulty'
        ]

        def _restart_res():
            return pd.DataFrame(columns=columns)

        res = _restart_res()
        res.to_csv(output_filename)
        dataset = tf.data.TFRecordDataset(data_filename, compression_type='')

        num_images_saved = 0

        image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
        for i, data in enumerate(tqdm(dataset)):

            tf_record = parse_camera_tfrecord_example(data)
            features = get_features(tf_record)

            image = tf.image.decode_jpeg(tf_record["image/encoded"]).numpy()
            image_expanded = np.expand_dims(np.array(image), axis=0)
            detection_boxes = detection_graph.get_tensor_by_name(
                'detection_boxes:0')
            num_detections = detection_graph.get_tensor_by_name(
                'num_detections:0')

            (boxes,
             n_detections) = sess.run([detection_boxes, num_detections],
                                      feed_dict={image_tensor: image_expanded})

            boxes = np.squeeze(boxes)[:int(n_detections)]
            gt_boxes = tf.concat([
                tf.expand_dims(tf_record["image/object/bbox/ymin"], axis=1),
                tf.expand_dims(tf_record["image/object/bbox/xmin"], axis=1),
                tf.expand_dims(tf_record["image/object/bbox/ymax"], axis=1),
                tf.expand_dims(tf_record["image/object/bbox/xmax"], axis=1)
            ],
                                 axis=1)
            gt_boxes_list = BoxList(gt_boxes)
            boxes_list = BoxList(tf.constant(boxes))
            iou_values = box_list_ops.iou(gt_boxes_list, boxes_list)
            iou_values = tf.math.reduce_max(iou_values, axis=1).numpy()

            features['num_detections'] = int(n_detections)

            labels = tf_record["image/object/class/label"].numpy()
            difficulty_ls = tf_record["image/object/difficult"].numpy()

            missed_boxes = []
            for box, iou, label, difficulty, xmin, ymin, xmax, ymax in zip(
                    gt_boxes.numpy(), iou_values, labels, difficulty_ls,
                    tf_record["image/object/bbox/xmin"],
                    tf_record["image/object/bbox/ymin"],
                    tf_record["image/object/bbox/xmax"],
                    tf_record["image/object/bbox/ymax"]):
                _row = features.copy()
                _row['xmin'] = xmin.numpy().astype(np.float16)
                _row['ymin'] = ymin.numpy().astype(np.float16)
                _row['xmax'] = xmax.numpy().astype(np.float16)
                _row['ymax'] = ymax.numpy().astype(np.float16)
                _row['iou'] = iou.astype(np.float16)
                iou_threshold = {1: 0.7, 2: 0.5, 3: 0.5}
                _row['detected'] = float(iou) >= iou_threshold[int(label)]
                _row['label'] = label.astype(np.int8)
                _row['difficulty'] = difficulty.astype(np.int8)
                res = res.append(_row, ignore_index=True)

                if num_images_saved < NUMBER_IMAGES_TO_SAVE and float(
                        iou) < iou_threshold[int(label)]:
                    missed_boxes.append([
                        float(xmin.numpy()),
                        float(ymin.numpy()),
                        float(xmax.numpy()),
                        float(ymax.numpy())
                    ])

            if i % 5 and num_images_saved < NUMBER_IMAGES_TO_SAVE and missed_boxes:
                num_images_saved += 1
                fig = plot_image_with_boxes(image, missed_boxes)
                fig_name = tf_record["image/source_id"].numpy().decode(
                ) + '.png'
                fig.tight_layout()
                fig.savefig(os.path.join(SAVE_IMAGES_DIR, fig_name))

            res.to_csv(output_filename, mode='a', header=False)
            res = _restart_res()

        return res
コード例 #16
0
ファイル: faster_rcnn_index.py プロジェクト: e271141/models
        ii: decoded_boxes = self._box_coder.decode(rpn_box_encodings, box_list.BoxList(anchors))
                            --> faster_rcnn_box_coder.FasterRcnnBoxCoder._decode()
            objectness_scores = tf.nn.softmax(rpn_objectness_predictions_with_background)
        iii:proposal_boxlist = post_processing.multiclass_non_max_suppression()
        iv: padded_proposals = box_list_ops.pad_or_clip_box_list()
	  	II: self._compute_second_stage_input_feature_maps()
	  	III: box_classifier_features = self._feature_extractor.extract_box_classifier_features()
	  	IV: box_predictions = self._mask_rcnn_box_predictor.predict()
	  	V: absolute_proposal_boxes = ops.normalized_to_image_coordinates()

B: losses_dict = detection_model.loss
	a. _loss_rpn
	  1. target_assigner.batch_assign_targets()
      I: target_assigner.assign()
        i: match_quality_matrix = self._similarity_calc.compare(groundtruth_boxes,anchors)
                -->sim_calc.IouSimilarity()-->box_list_ops.iou()
        ii: match = self._matcher.match(match_quality_matrix, **params)
                -->argmax_matcher.ArgMaxMatcher._match()
        iii: reg_targets = self._create_regression_targets(anchors,groundtruth_boxes,match)
        iv: cls_targets = self._create_classification_targets(groundtruth_labels,match)
        v: reg_weights = self._create_regression_weights(match)
        vi: cls_weights = self._create_classification_weights(
                        match, self._positive_class_weight, self._negative_class_weight)
    2. localization_losses = self._first_stage_localization_loss
          -->losses.WeightedSmoothL1LocalizationLoss._compute_loss()
	  3. objectness_losses = self._first_stage_objectness_loss
          -->losses.WeightedSoftmaxClassificationLoss._compute_loss()
	b. _loss_box_classifier
	  1. paddings_indicator = self._padded_batched_proposals_indicator
	  2. second_stage_loc_losses = self._second_stage_localization_loss
          -->losses.WeightedSmoothL1LocalizationLoss._compute_loss()
コード例 #17
0
tf.enable_eager_execution()

os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
# ymin, xmin, ymax, xmax
gt_boxes_array = tf.convert_to_tensor([[0.0, 0.0, 1.0, 1.0],
                                       [0.0, 0.0, 0.5, 0.5]])
anchors_array = tf.convert_to_tensor([[0.0, 0.0, 0.9,
                                       0.9], [0.0, 0.0, 0.5, 0.5],
                                      [0.0, 0.0, 0.7, 0.7],
                                      [0.1, 0.1, 0.2, 0.2]])
#anchors = [[0.0, 0.0, 0.5, 0.5],[0.0, 0.0, 0.25, 0.25] ]
gt_boxes = BoxList(tf.convert_to_tensor(gt_boxes_array))
anchors = BoxList(tf.convert_to_tensor(anchors_array))

iou_values = box_list_ops.iou(gt_boxes, anchors)
max_iou_values = tf.math.reduce_max(iou_values, axis=1).numpy()

target_assigner = create_target_assigner('FasterRCNN',
                                         'detection',
                                         negative_class_weight=1.0,
                                         use_matmul_gather=False)

# Each row is a ground truth box, and each column is an anchor (proposal)
match_quality_matrix = target_assigner._similarity_calc.compare(
    gt_boxes, anchors)

match = target_assigner._matcher.match(match_quality_matrix)

cls_targets, cls_weights, reg_targets, reg_weights, match_results = \
    target_assigner.assign(anchors, gt_boxes, groundtruth_labels=None,