Esempio n. 1
0
def build_backbone(features, config):
    """Builds backbone model.

  Args:
   features: input tensor.
   config: config for backbone, such as is_training_bn and backbone name.

  Returns:
    A dict from levels to the feature maps from the output of the backbone model
    with strides of 8, 16 and 32.

  Raises:
    ValueError: if backbone_name is not supported.
  """
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'batch_norm':
            utils.batch_norm_class(is_training_bn, config.strategy),
            'relu_fn':
            functools.partial(utils.activation_fn, act_type=config.act_type),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        override_params['data_format'] = config.data_format
        model_builder = backbone_factory.get_model_builder(backbone_name)
        _, endpoints = model_builder.build_model_base(
            features,
            backbone_name,
            training=is_training_bn,
            override_params=override_params)
        u1 = endpoints[0]
        u2 = endpoints[1]
        u3 = endpoints[2]
        u4 = endpoints[3]
        u5 = endpoints[4]
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))
    return {0: features, 1: u1, 2: u2, 3: u3, 4: u4, 5: u5}
Esempio n. 2
0
def build_batch_norm(is_training_bn: bool,
                     beta_initializer: Text = 'zeros',
                     gamma_initializer: Text = 'ones',
                     data_format: Text = 'channels_last',
                     momentum: float = 0.99,
                     epsilon: float = 1e-3,
                     strategy: Optional[Text] = None,
                     name: Text = 'tpu_batch_normalization'):
  """Build a batch normalization layer.

  Args:
    is_training_bn: `bool` for whether the model is training.
    beta_initializer: `str`, beta initializer.
    gamma_initializer: `str`, gamma initializer.
    data_format: `str` either "channels_first" for `[batch, channels, height,
      width]` or "channels_last for `[batch, height, width, channels]`.
    momentum: `float`, momentume of batch norm.
    epsilon: `float`, small value for numerical stability.
    strategy: `str`, whether to use tpu, gpus or other version of batch norm.
    name: the name of the batch normalization layer

  Returns:
    A normalized `Tensor` with the same `data_format`.
  """
  axis = 1 if data_format == 'channels_first' else -1
  batch_norm_class = utils.batch_norm_class(is_training_bn, strategy)

  bn_layer = batch_norm_class(
      axis=axis,
      momentum=momentum,
      epsilon=epsilon,
      center=True,
      scale=True,
      beta_initializer=beta_initializer,
      gamma_initializer=gamma_initializer,
      name=name)

  return bn_layer
Esempio n. 3
0
    def __init__(self,
                 model_name=None,
                 config=None,
                 name='',
                 feature_only=False):
        """Initialize model."""
        super().__init__(name=name)

        config = config or hparams_config.get_efficientdet_config(model_name)
        self.config = config

        # Backbone.
        backbone_name = config.backbone_name
        is_training_bn = config.is_training_bn
        if 'efficientnet' in backbone_name:
            override_params = {
                'batch_norm':
                utils.batch_norm_class(is_training_bn, config.strategy),
                'relu_fn':
                functools.partial(utils.activation_fn,
                                  act_type=config.act_type),
                'grad_checkpoint':
                self.config.grad_checkpoint
            }
            if 'b0' in backbone_name:
                override_params['survival_prob'] = 0.0
            if config.backbone_config is not None:
                override_params['blocks_args'] = (
                    efficientnet_builder.BlockDecoder().encode(
                        config.backbone_config.blocks))
            override_params['data_format'] = config.data_format
            self.backbone = backbone_factory.get_model(
                backbone_name, override_params=override_params)

        # Feature network.
        self.resample_layers = []  # additional resampling layers.
        for level in range(6, config.max_level + 1):
            # Adds a coarser level by downsampling the last feature map.
            self.resample_layers.append(
                ResampleFeatureMap(
                    feat_level=(level - config.min_level),
                    target_num_channels=config.fpn_num_filters,
                    apply_bn=config.apply_bn_for_resampling,
                    is_training_bn=config.is_training_bn,
                    conv_after_downsample=config.conv_after_downsample,
                    strategy=config.strategy,
                    data_format=config.data_format,
                    model_optimizations=config.model_optimizations,
                    name='resample_p%d' % level,
                ))
        self.fpn_cells = FPNCells(config)

        # class/box output prediction network.
        num_anchors = len(config.aspect_ratios) * config.num_scales
        num_filters = config.fpn_num_filters
        for head in config.heads:
            if head == 'object_detection':
                self.class_net = ClassNet(
                    num_classes=config.num_classes,
                    num_anchors=num_anchors,
                    num_filters=num_filters,
                    min_level=config.min_level,
                    max_level=config.max_level,
                    is_training_bn=config.is_training_bn,
                    act_type=config.act_type,
                    repeats=config.box_class_repeats,
                    separable_conv=config.separable_conv,
                    survival_prob=config.survival_prob,
                    strategy=config.strategy,
                    grad_checkpoint=config.grad_checkpoint,
                    data_format=config.data_format,
                    feature_only=feature_only)

                self.box_net = BoxNet(num_anchors=num_anchors,
                                      num_filters=num_filters,
                                      min_level=config.min_level,
                                      max_level=config.max_level,
                                      is_training_bn=config.is_training_bn,
                                      act_type=config.act_type,
                                      repeats=config.box_class_repeats,
                                      separable_conv=config.separable_conv,
                                      survival_prob=config.survival_prob,
                                      strategy=config.strategy,
                                      grad_checkpoint=config.grad_checkpoint,
                                      data_format=config.data_format,
                                      feature_only=feature_only)

            if head == 'segmentation':
                self.seg_head = SegmentationHead(
                    num_classes=config.seg_num_classes,
                    num_filters=num_filters,
                    min_level=config.min_level,
                    max_level=config.max_level,
                    is_training_bn=config.is_training_bn,
                    act_type=config.act_type,
                    strategy=config.strategy,
                    data_format=config.data_format)