Esempio n. 1
0
    def test_rpn_scriptability(self):
        cfg = RCNNConfig()
        proposal_generator = RPN(cfg, {
            "res4": ShapeSpec(channels=1024, stride=16)
        }).eval()
        num_images = 2
        images_tensor = torch.rand(num_images, 30, 40)
        image_sizes = [(32, 32), (30, 40)]
        images = ImageList(images_tensor, image_sizes)
        features = {"res4": torch.rand(num_images, 1024, 1, 2)}

        fields = {"proposal_boxes": "Boxes", "objectness_logits": "Tensor"}
        proposal_generator_ts = export_torchscript_with_instances(
            proposal_generator, fields)  # noqa

        proposals, _ = proposal_generator(images, features)
        proposals_ts, _ = proposal_generator_ts(images, features)

        for proposal, proposal_ts in zip(proposals, proposals_ts):
            self.assertEqual(proposal.image_size, proposal_ts.image_size)
            self.assertTrue(
                torch.equal(proposal.proposal_boxes.tensor,
                            proposal_ts.proposal_boxes.tensor))
            self.assertTrue(
                torch.equal(proposal.objectness_logits,
                            proposal_ts.objectness_logits))
Esempio n. 2
0
    def test_roi_heads(self):
        torch.manual_seed(121)
        cfg = RCNNConfig()
        # PROPOSAL_GENERATOR: "RPN"
        # ROI_HEADS: "StandardROIHeads"
        # ROI_BOX_HEAD: "FastRCNNConvFCHead"
        cfg.MODEL.RESNETS.DEPTH = 50
        cfg.MODEL.ROI_BOX_HEAD.NUM_FC = 2
        cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE = "ROIAlignV2"
        cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10, 10, 5, 5)

        def build_box_head(cfg, input_shape):
            return FastRCNNConvFCHead(cfg, input_shape)
        cfg.build_box_head = build_box_head

        backbone = build_backbone(cfg)
        num_images = 2
        images_tensor = torch.rand(num_images, 20, 30)
        image_sizes = [(10, 10), (20, 30)]
        images = ImageList(images_tensor, image_sizes)
        num_channels = 1024
        features = {"res4": torch.rand(num_images, num_channels, 1, 2)}

        image_shape = (15, 15)
        gt_boxes0 = torch.tensor([[1, 1, 3, 3], [2, 2, 6, 6]], dtype=torch.float32)
        gt_instance0 = Instances(image_shape)
        gt_instance0.gt_boxes = Boxes(gt_boxes0)
        gt_instance0.gt_classes = torch.tensor([2, 1])
        gt_boxes1 = torch.tensor([[1, 5, 2, 8], [7, 3, 10, 5]], dtype=torch.float32)
        gt_instance1 = Instances(image_shape)
        gt_instance1.gt_boxes = Boxes(gt_boxes1)
        gt_instance1.gt_classes = torch.tensor([1, 2])
        gt_instances = [gt_instance0, gt_instance1]

        proposal_generator = RPN(cfg, backbone.output_shape())
        roi_heads = StandardROIHeads(cfg, backbone.output_shape())

        with EventStorage():  # capture events in a new storage to discard them
            proposals, proposal_losses = proposal_generator(images, features, gt_instances)
            _, detector_losses = roi_heads(images, features, proposals, gt_instances)

        expected_losses = {
            "loss_cls": torch.tensor(4.4236516953),
            "loss_box_reg": torch.tensor(0.0091214813),
        }
        for name in expected_losses.keys():
            self.assertTrue(torch.allclose(detector_losses[name], expected_losses[name]))
Esempio n. 3
0
def build_proposal_generator(cfg, input_shape):
    return RPN(cfg, input_shape)
Esempio n. 4
0
    def test_rpn(self):
        torch.manual_seed(121)
        cfg = RCNNConfig()
        # PROPOSAL_GENERATOR: "RPN"
        # ANCHOR_GENERATOR: "DefaultAnchorGenerator"
        cfg.MODEL.RESNETS.DEPTH = 50
        cfg.MODEL.RPN.BBOX_REG_WEIGHTS = (1, 1, 1, 1)
        backbone = build_backbone(cfg)
        proposal_generator = RPN(cfg, backbone.output_shape())
        num_images = 2
        images_tensor = torch.rand(num_images, 20, 30)
        image_sizes = [(10, 10), (20, 30)]
        images = ImageList(images_tensor, image_sizes)
        image_shape = (15, 15)
        num_channels = 1024
        features = {"res4": torch.rand(num_images, num_channels, 1, 2)}
        gt_boxes = torch.tensor([[1, 1, 3, 3], [2, 2, 6, 6]], dtype=torch.float32)
        gt_instances = Instances(image_shape)
        gt_instances.gt_boxes = Boxes(gt_boxes)
        with EventStorage():  # capture events in a new storage to discard them
            proposals, proposal_losses = proposal_generator(
                images, features, [gt_instances[0], gt_instances[1]]
            )

        expected_losses = {
            "loss_rpn_cls": torch.tensor(0.0804563984),
            "loss_rpn_loc": torch.tensor(0.0990132466),
        }
        for name in expected_losses.keys():
            err_msg = "proposal_losses[{}] = {}, expected losses = {}".format(
                name, proposal_losses[name], expected_losses[name]
            )
            self.assertTrue(torch.allclose(proposal_losses[name], expected_losses[name]), err_msg)

        expected_proposal_boxes = [
            Boxes(torch.tensor([[0, 0, 10, 10], [7.3365392685, 0, 10, 10]])),
            Boxes(
                torch.tensor(
                    [
                        [0, 0, 30, 20],
                        [0, 0, 16.7862777710, 13.1362524033],
                        [0, 0, 30, 13.3173446655],
                        [0, 0, 10.8602609634, 20],
                        [7.7165775299, 0, 27.3875980377, 20],
                    ]
                )
            ),
        ]

        expected_objectness_logits = [
            torch.tensor([0.1225359365, -0.0133192837]),
            torch.tensor([0.1415634006, 0.0989848152, 0.0565387346, -0.0072308783, -0.0428492837]),
        ]

        for proposal, expected_proposal_box, im_size, expected_objectness_logit in zip(
            proposals, expected_proposal_boxes, image_sizes, expected_objectness_logits
        ):
            self.assertEqual(len(proposal), len(expected_proposal_box))
            self.assertEqual(proposal.image_size, im_size)
            self.assertTrue(
                torch.allclose(proposal.proposal_boxes.tensor, expected_proposal_box.tensor)
            )
            self.assertTrue(torch.allclose(proposal.objectness_logits, expected_objectness_logit))