Exemplo n.º 1
0
    def test_resnet_creation(self, model_id):
        """Test creation of ResNet models."""

        network = backbones.ResNet(model_id=model_id,
                                   se_ratio=0.0,
                                   norm_momentum=0.99,
                                   norm_epsilon=1e-5)

        backbone_config = backbones_cfg.Backbone(type='resnet',
                                                 resnet=backbones_cfg.ResNet(
                                                     model_id=model_id,
                                                     se_ratio=0.0))
        norm_activation_config = common_cfg.NormActivation(norm_momentum=0.99,
                                                           norm_epsilon=1e-5,
                                                           use_sync_bn=False)

        factory_network = factory.build_backbone(
            input_specs=tf.keras.layers.InputSpec(shape=[None, None, None, 3]),
            backbone_config=backbone_config,
            norm_activation_config=norm_activation_config)

        network_config = network.get_config()
        factory_network_config = factory_network.get_config()

        self.assertEqual(network_config, factory_network_config)
  def test_model_creation(self, project_dim, num_proj_layers, ft_proj_idx):
    input_size = 224
    inputs = np.random.rand(2, input_size, input_size, 3)
    input_specs = tf.keras.layers.InputSpec(
        shape=[None, input_size, input_size, 3])

    tf.keras.backend.set_image_data_format('channels_last')

    backbone = backbones.ResNet(model_id=50, activation='relu',
                                input_specs=input_specs)
    projection_head = simclr_head.ProjectionHead(
        proj_output_dim=project_dim,
        num_proj_layers=num_proj_layers,
        ft_proj_idx=ft_proj_idx
    )
    num_classes = 10
    supervised_head = simclr_head.ClassificationHead(
        num_classes=10
    )

    model = simclr_model.SimCLRModel(
        input_specs=input_specs,
        backbone=backbone,
        projection_head=projection_head,
        supervised_head=supervised_head,
        mode=simclr_model.PRETRAIN
    )
    outputs = model(inputs)
    projection_outputs = outputs[simclr_model.PROJECTION_OUTPUT_KEY]
    supervised_outputs = outputs[simclr_model.SUPERVISED_OUTPUT_KEY]

    self.assertAllEqual(projection_outputs.shape.as_list(),
                        [2, project_dim])
    self.assertAllEqual([2, num_classes],
                        supervised_outputs.numpy().shape)
Exemplo n.º 3
0
    def test_serialize_deserialize(self, level, low_level, shared_decoder):
        """Validate the network can be serialized and deserialized."""
        num_classes = 10
        backbone = backbones.ResNet(model_id=50)

        semantic_decoder = aspp.ASPP(level=level, dilation_rates=[6, 12, 18])

        if shared_decoder:
            instance_decoder = semantic_decoder
        else:
            instance_decoder = aspp.ASPP(level=level,
                                         dilation_rates=[6, 12, 18])

        semantic_head = panoptic_deeplab_heads.SemanticHead(
            num_classes,
            level=level,
            low_level=low_level,
            low_level_num_filters=(64, 32))

        instance_head = panoptic_deeplab_heads.InstanceHead(
            level=level, low_level=low_level, low_level_num_filters=(64, 32))

        post_processor = panoptic_deeplab_merge.PostProcessor(
            output_size=[640, 640],
            center_score_threshold=0.1,
            thing_class_ids=[1, 2, 3, 4],
            label_divisor=[256],
            stuff_area_limit=4096,
            ignore_label=0,
            nms_kernel=41,
            keep_k_centers=41,
            rescale_predictions=True)

        model = panoptic_deeplab_model.PanopticDeeplabModel(
            backbone=backbone,
            semantic_decoder=semantic_decoder,
            instance_decoder=instance_decoder,
            semantic_head=semantic_head,
            instance_head=instance_head,
            post_processor=post_processor)

        config = model.get_config()
        new_model = panoptic_deeplab_model.PanopticDeeplabModel.from_config(
            config)

        # Validate that the config can be forced to JSON.
        _ = new_model.to_json()

        # If the serialization was successful, the new config should match the old.
        self.assertAllEqual(model.get_config(), new_model.get_config())
Exemplo n.º 4
0
  def test_sync_bn_multiple_devices(self, strategy, use_sync_bn):
    """Test for sync bn on TPU and GPU devices."""
    inputs = np.random.rand(64, 128, 128, 3)

    tf.keras.backend.set_image_data_format('channels_last')

    with strategy.scope():
      backbone = backbones.ResNet(model_id=50, use_sync_bn=use_sync_bn)

      model = classification_model.ClassificationModel(
          backbone=backbone,
          num_classes=1000,
          dropout_rate=0.2,
      )
      _ = model(inputs)
Exemplo n.º 5
0
  def test_serialize_deserialize(self):
    """Validate the classification net can be serialized and deserialized."""

    tf.keras.backend.set_image_data_format('channels_last')
    backbone = backbones.ResNet(model_id=50)

    model = classification_model.ClassificationModel(
        backbone=backbone, num_classes=1000)

    config = model.get_config()
    new_model = classification_model.ClassificationModel.from_config(config)

    # Validate that the config can be forced to JSON.
    _ = new_model.to_json()

    # If the serialization was successful, the new config should match the old.
    self.assertAllEqual(model.get_config(), new_model.get_config())
Exemplo n.º 6
0
  def test_data_format_gpu(self, strategy, data_format, input_dim):
    """Test for different data formats on GPU devices."""
    if data_format == 'channels_last':
      inputs = np.random.rand(2, 128, 128, input_dim)
    else:
      inputs = np.random.rand(2, input_dim, 128, 128)
    input_specs = tf.keras.layers.InputSpec(shape=inputs.shape)

    tf.keras.backend.set_image_data_format(data_format)

    with strategy.scope():
      backbone = backbones.ResNet(model_id=50, input_specs=input_specs)

      model = classification_model.ClassificationModel(
          backbone=backbone,
          num_classes=1000,
          input_specs=input_specs,
      )
      _ = model(inputs)
Exemplo n.º 7
0
  def test_resnet_network_creation(
      self, input_size, resnet_model_id, activation):
    """Test for creation of a ResNet-50 classifier."""
    inputs = np.random.rand(2, input_size, input_size, 3)

    tf.keras.backend.set_image_data_format('channels_last')

    backbone = backbones.ResNet(
        model_id=resnet_model_id, activation=activation)
    self.assertEqual(backbone.count_params(), 23561152)

    num_classes = 1000
    model = classification_model.ClassificationModel(
        backbone=backbone,
        num_classes=num_classes,
        dropout_rate=0.2,
    )
    self.assertEqual(model.count_params(), 25610152)

    logits = model(inputs)
    self.assertAllEqual([2, num_classes], logits.numpy().shape)
  def test_serialize_deserialize(self):
    """Validate the network can be serialized and deserialized."""
    num_classes = 3
    backbone = backbones.ResNet(model_id=50)
    decoder = fpn.FPN(
        input_specs=backbone.output_specs, min_level=3, max_level=7)
    head = segmentation_heads.SegmentationHead(num_classes, level=3)
    model = segmentation_model.SegmentationModel(
        backbone=backbone,
        decoder=decoder,
        head=head
    )

    config = model.get_config()
    new_model = segmentation_model.SegmentationModel.from_config(config)

    # Validate that the config can be forced to JSON.
    _ = new_model.to_json()

    # If the serialization was successful, the new config should match the old.
    self.assertAllEqual(model.get_config(), new_model.get_config())
  def test_segmentation_network_creation(
      self, input_size, level):
    """Test for creation of a segmentation network."""
    num_classes = 10
    inputs = np.random.rand(2, input_size, input_size, 3)
    tf.keras.backend.set_image_data_format('channels_last')
    backbone = backbones.ResNet(model_id=50)

    decoder = fpn.FPN(
        input_specs=backbone.output_specs, min_level=2, max_level=7)
    head = segmentation_heads.SegmentationHead(num_classes, level=level)

    model = segmentation_model.SegmentationModel(
        backbone=backbone,
        decoder=decoder,
        head=head,
        mask_scoring_head=None,
    )

    outputs = model(inputs)
    self.assertAllEqual(
        [2, input_size // (2**level), input_size // (2**level), num_classes],
        outputs['logits'].numpy().shape)
Exemplo n.º 10
0
    def test_panoptic_deeplab_network_creation(self, input_size, level,
                                               low_level, shared_decoder,
                                               training):
        """Test for creation of a panoptic deeplab network."""
        batch_size = 2 if training else 1
        num_classes = 10
        inputs = np.random.rand(batch_size, input_size, input_size, 3)

        image_info = tf.convert_to_tensor([[[input_size, input_size],
                                            [input_size, input_size], [1, 1],
                                            [0, 0]]])
        image_info = tf.tile(image_info, [batch_size, 1, 1])

        tf.keras.backend.set_image_data_format('channels_last')
        backbone = backbones.ResNet(model_id=50)

        semantic_decoder = aspp.ASPP(level=level, dilation_rates=[6, 12, 18])

        if shared_decoder:
            instance_decoder = semantic_decoder
        else:
            instance_decoder = aspp.ASPP(level=level,
                                         dilation_rates=[6, 12, 18])

        semantic_head = panoptic_deeplab_heads.SemanticHead(
            num_classes,
            level=level,
            low_level=low_level,
            low_level_num_filters=(64, 32))

        instance_head = panoptic_deeplab_heads.InstanceHead(
            level=level, low_level=low_level, low_level_num_filters=(64, 32))

        post_processor = panoptic_deeplab_merge.PostProcessor(
            output_size=[input_size, input_size],
            center_score_threshold=0.1,
            thing_class_ids=[1, 2, 3, 4],
            label_divisor=[256],
            stuff_area_limit=4096,
            ignore_label=0,
            nms_kernel=41,
            keep_k_centers=41,
            rescale_predictions=True)

        model = panoptic_deeplab_model.PanopticDeeplabModel(
            backbone=backbone,
            semantic_decoder=semantic_decoder,
            instance_decoder=instance_decoder,
            semantic_head=semantic_head,
            instance_head=instance_head,
            post_processor=post_processor)

        outputs = model(inputs=inputs,
                        image_info=image_info,
                        training=training)

        if training:
            self.assertIn('segmentation_outputs', outputs)
            self.assertIn('instance_centers_heatmap', outputs)
            self.assertIn('instance_centers_offset', outputs)

            self.assertAllEqual([
                2, input_size // (2**low_level[-1]), input_size //
                (2**low_level[-1]), num_classes
            ], outputs['segmentation_outputs'].numpy().shape)
            self.assertAllEqual([
                2, input_size // (2**low_level[-1]), input_size //
                (2**low_level[-1]), 1
            ], outputs['instance_centers_heatmap'].numpy().shape)
            self.assertAllEqual([
                2, input_size // (2**low_level[-1]), input_size //
                (2**low_level[-1]), 2
            ], outputs['instance_centers_offset'].numpy().shape)

        else:
            self.assertIn('panoptic_outputs', outputs)
            self.assertIn('category_mask', outputs)
            self.assertIn('instance_mask', outputs)
            self.assertIn('instance_centers', outputs)
            self.assertIn('instance_scores', outputs)
            self.assertIn('segmentation_outputs', outputs)