Exemplo n.º 1
0
def build_backbone(features, config):
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'relu_fn': tf.nn.swish,
            'batch_norm': utils.batch_norm_class(is_training_bn),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        _, endpoints = efficientnet_builder.build_model_base(
            features,
            backbone_name,
            training=is_training_bn,
            override_params=override_params)
        u2 = endpoints['reduction_2']
        u3 = endpoints['reduction_3']
        u4 = endpoints['reduction_4']
        u5 = endpoints['reduction_5']
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))
    return {2: u2, 3: u3, 4: u4, 5: u5}
Exemplo n.º 2
0
 def test_bottleneck_block(self):
   """Test for creating a model with bottleneck block arguments."""
   images = tf.zeros((10, 128, 128, 3), dtype=tf.float32)
   global_params = efficientnet_model.GlobalParams(
       1.0,
       1.0,
       0,
       'channels_last',
       num_classes=10,
       batch_norm=utils.batch_norm_class(False))
   blocks_args = [
       efficientnet_model.BlockArgs(
           kernel_size=3,
           num_repeat=3,
           input_filters=3,
           output_filters=6,
           expand_ratio=6,
           id_skip=True,
           strides=[2, 2],
           conv_type=0,
           fused_conv=0,
           super_pixel=0)
   ]
   model = efficientnet_model.Model(blocks_args, global_params)
   outputs = model(images, training=True)
   self.assertEqual((10, 10), outputs.shape)
Exemplo n.º 3
0
def build_backbone(features, config):
    """Builds backbone model.

  Args:
   features: input tensor.
   config: config for backbone, such as is_training and backbone name.

  Returns:
    A dict from levels to the feature maps from the output of the backbone model
    with strides of 8, 16 and 32.

  Raises:
    ValueError: if backbone_name is not supported.
  """
    backbone_name = config.backbone_name
    is_training = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'batch_norm':
            utils.batch_norm_class(is_training, config.strategy),
            'relu_fn':
            functools.partial(utils.activation_fn, act_type=config.act_type),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        override_params['data_format'] = config.data_format
        model_builder = backbone_factory.get_model_builder(backbone_name)
        _, endpoints = model_builder.build_model_base(
            features,
            backbone_name,
            training=is_training,
            override_params=override_params)

        all_feats = [
            features,
            endpoints['reduction_1'],
            endpoints['reduction_2'],
            endpoints['reduction_3'],
            endpoints['reduction_4'],
            endpoints['reduction_5'],
        ]
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))

    # Only return features within the expected levels.
    return all_feats[config.min_level:config.max_level + 1]
Exemplo n.º 4
0
def build_backbone(features, config):
    """Builds backbone model.

  Args:
   features: input tensor.
   config: config for backbone, such as is_training_bn and backbone name.

  Returns:
    A dict from levels to the feature maps from the output of the backbone model
    with strides of 8, 16 and 32.

  Raises:
    ValueError: if backbone_name is not supported.
  """
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'batch_norm':
            utils.batch_norm_class(is_training_bn, config.strategy),
            'relu_fn':
            functools.partial(utils.activation_fn, act_type=config.act_type),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        override_params['data_format'] = config.data_format
        model_builder = backbone_factory.get_model_builder(backbone_name)
        _, endpoints = model_builder.build_model_base(
            features,
            backbone_name,
            training=is_training_bn,
            override_params=override_params)
        print(endpoints.keys())
        print(backbone_name)
        u1 = endpoints['reduction_1']
        u2 = endpoints['reduction_2']
        u3 = endpoints['reduction_3']
        u4 = endpoints['reduction_4']
        u5 = endpoints['reduction_5']
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))
    return {0: features, 1: u1, 2: u2, 3: u3, 4: u4, 5: u5}
Exemplo n.º 5
0
def build_batch_norm(is_training_bn: bool,
                     beta_initializer: Text = 'zeros',
                     gamma_initializer: Text = 'ones',
                     data_format: Text = 'channels_last',
                     momentum: float = 0.99,
                     epsilon: float = 1e-3,
                     strategy: Text = None,
                     name: Text = 'tpu_batch_normalization'):
    """Build a batch normalization layer.

  Args:
    is_training_bn: `bool` for whether the model is training.
    beta_initializer: `str`, beta initializer.
    gamma_initializer: `str`, gamma initializer.
    data_format: `str` either "channels_first" for `[batch, channels, height,
      width]` or "channels_last for `[batch, height, width, channels]`.
    momentum: `float`, momentume of batch norm.
    epsilon: `float`, small value for numerical stability.
    strategy: `str`, whether to use tpu, horovod or other version of batch norm.
    name: the name of the batch normalization layer

  Returns:
    A normalized `Tensor` with the same `data_format`.
  """
    axis = 1 if data_format == 'channels_first' else -1
    if is_training_bn:
        if strategy in ('gpus', ):
            batch_norm_class = tf.keras.layers.experimental.SyncBatchNormalization
        else:
            # TODO(tanmingxing): compare them on TPU.
            batch_norm_class = utils.batch_norm_class(is_training_bn, strategy)
    else:
        batch_norm_class = tf.keras.layers.BatchNormalization

    bn_layer = batch_norm_class(axis=axis,
                                momentum=momentum,
                                epsilon=epsilon,
                                center=True,
                                scale=True,
                                beta_initializer=beta_initializer,
                                gamma_initializer=gamma_initializer,
                                name=name)

    return bn_layer
Exemplo n.º 6
0
def batch_normalization(is_training_bn: bool,
                        init_zero: bool = False,
                        data_format: Text = 'channels_last',
                        momentum: float = 0.99,
                        epsilon: float = 1e-3,
                        use_tpu: bool = False,
                        name: Text = None):
    """Performs a batch normalization followed by a non-linear activation.

  Args:
    is_training_bn: `bool` for whether the model is training.
    init_zero: `bool` if True, initializes scale parameter of batch
      normalization with 0 instead of 1 (default).
    data_format: `str` either "channels_first" for `[batch, channels, height,
      width]` or "channels_last for `[batch, height, width, channels]`.
    momentum: `float`, momentume of batch norm.
    epsilon: `float`, small value for numerical stability.
    use_tpu: `bool`, whether to use tpu version of batch norm.
    name: the name of the batch normalization layer

  Returns:
    A normalized `Tensor` with the same `data_format`.
  """
    if init_zero:
        gamma_initializer = tf.zeros_initializer()
    else:
        gamma_initializer = tf.ones_initializer()

    if data_format == 'channels_first':
        axis = 1
    else:
        axis = 3

    batch_normalization = batch_norm_class(is_training_bn, use_tpu)(
        axis=axis,
        momentum=momentum,
        epsilon=epsilon,
        center=True,
        scale=True,
        gamma_initializer=gamma_initializer,
        name=name)

    return batch_normalization
Exemplo n.º 7
0
def build_backbone(features, config):
    """Builds backbone model.

  Args:
   features: input tensor.
   config: config for backbone, such as is_training_bn and backbone name.

  Returns:
    A dict from levels to the feature maps from the output of the backbone model
    with strides of 8, 16 and 32.

  Raises:
    ValueError: if backbone_name is not supported.
  """
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'relu_fn': utils.backbone_relu_fn,
            'batch_norm': utils.batch_norm_class(is_training_bn),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        _, endpoints = efficientnet_builder.build_model_base(
            features,
            backbone_name,
            training=is_training_bn,
            override_params=override_params)
        u2 = endpoints['reduction_2']
        u3 = endpoints['reduction_3']
        u4 = endpoints['reduction_4']
        u5 = endpoints['reduction_5']
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))
    return {2: u2, 3: u3, 4: u4, 5: u5}
Exemplo n.º 8
0
def build_batch_norm(is_training_bn: bool,
                     strategy: Text = None,
                     init_zero: bool = False,
                     data_format: Text = 'channels_last',
                     momentum: float = 0.99,
                     epsilon: float = 1e-3,
                     name: Text = 'tpu_batch_normalization'):
    """Build a batch normalization layer.

    Args:
      is_training_bn: `bool` for whether the model is training.
      strategy: `str`, whether to use tpu, horovod or other version of batch norm.
      init_zero: `bool` if True, initializes scale parameter of batch
        normalization with 0 instead of 1 (default).
      data_format: `str` either "channels_first" for `[batch, channels, height,
        width]` or "channels_last for `[batch, height, width, channels]`.
      momentum: `float`, momentume of batch norm.
      epsilon: `float`, small value for numerical stability.
      name: the name of the batch normalization layer

    Returns:
      A normalized `Tensor` with the same `data_format`.
    """
    if init_zero:
        gamma_initializer = tf.zeros_initializer()
    else:
        gamma_initializer = tf.ones_initializer()

    axis = 1 if data_format == 'channels_first' else -1
    batch_norm_class = utils.batch_norm_class(is_training_bn, strategy)
    bn_layer = batch_norm_class(axis=axis,
                                momentum=momentum,
                                epsilon=epsilon,
                                center=True,
                                scale=True,
                                gamma_initializer=gamma_initializer,
                                name=name)

    return bn_layer
Exemplo n.º 9
0
  def __init__(self, model_name=None, config=None, name=''):
    """Initialize model."""
    super().__init__(name=name)

    config = config or hparams_config.get_efficientdet_config(model_name)
    self.config = config

    # Backbone.
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
      override_params = {
          'batch_norm':
              utils.batch_norm_class(is_training_bn, config.strategy),
          'relu_fn':
              functools.partial(utils.activation_fn, act_type=config.act_type),
      }
      if 'b0' in backbone_name:
        override_params['survival_prob'] = 0.0
      if config.backbone_config is not None:
        override_params['blocks_args'] = (
            efficientnet_builder.BlockDecoder().encode(
                config.backbone_config.blocks))
      override_params['data_format'] = config.data_format
      self.backbone = backbone_factory.get_model(
          backbone_name, override_params=override_params)

    # Feature network.
    self.resample_layers = []  # additional resampling layers.
    for level in range(6, config.max_level + 1):
      # Adds a coarser level by downsampling the last feature map.
      self.resample_layers.append(
          ResampleFeatureMap(
              feat_level=(level - config.min_level),
              target_num_channels=config.fpn_num_filters,
              apply_bn=config.apply_bn_for_resampling,
              is_training_bn=config.is_training_bn,
              conv_after_downsample=config.conv_after_downsample,
              strategy=config.strategy,
              data_format=config.data_format,
              name='resample_p%d' % level,
          ))
    self.fpn_cells = FPNCells(config)

    # class/box output prediction network.
    num_anchors = len(config.aspect_ratios) * config.num_scales
    num_filters = config.fpn_num_filters
    for head in config.heads:
      if head == 'object_detection':
        self.class_net = ClassNet(
            num_classes=config.num_classes,
            num_anchors=num_anchors,
            num_filters=num_filters,
            min_level=config.min_level,
            max_level=config.max_level,
            is_training_bn=config.is_training_bn,
            act_type=config.act_type,
            repeats=config.box_class_repeats,
            separable_conv=config.separable_conv,
            survival_prob=config.survival_prob,
            strategy=config.strategy,
            data_format=config.data_format)

        self.box_net = BoxNet(
            num_anchors=num_anchors,
            num_filters=num_filters,
            min_level=config.min_level,
            max_level=config.max_level,
            is_training_bn=config.is_training_bn,
            act_type=config.act_type,
            repeats=config.box_class_repeats,
            separable_conv=config.separable_conv,
            survival_prob=config.survival_prob,
            strategy=config.strategy,
            data_format=config.data_format)

      if head == 'segmentation':
        self.seg_head = SegmentationHead(
            num_classes=config.seg_num_classes,
            num_filters=num_filters,
            min_level=config.min_level,
            max_level=config.max_level,
            is_training_bn=config.is_training_bn,
            act_type=config.act_type,
            strategy=config.strategy,
            data_format=config.data_format)