Exemplo n.º 1
0
    def __init__(
        self,
        min_level=3,
        max_level=7,
        fpn_feat_dims=256,
        use_separable_conv=False,
        activation='relu',
        use_batch_norm=True,
        norm_activation=nn_ops.norm_activation_builder(activation='relu')):
        """FPN initialization function.

    Args:
      min_level: `int` minimum level in FPN output feature maps.
      max_level: `int` maximum level in FPN output feature maps.
      fpn_feat_dims: `int` number of filters in FPN layers.
      use_separable_conv: `bool`, if True use separable convolution for
        convolution in FPN layers.
      activation: the activation function.
      use_batch_norm: 'bool', indicating whether batchnorm layers are added.
      norm_activation: an operation that includes a normalization layer
        followed by an optional activation layer.
    """
        self._min_level = min_level
        self._max_level = max_level
        self._fpn_feat_dims = fpn_feat_dims
        if use_separable_conv:
            self._conv2d_op = functools.partial(
                tf.keras.layers.SeparableConv2D, depth_multiplier=1)
        else:
            self._conv2d_op = tf.keras.layers.Conv2D
        if activation == 'relu':
            self._activation_op = tf.nn.relu
        elif activation == 'swish':
            self._activation_op = tf.nn.swish
        else:
            raise ValueError('Unsupported activation `{}`.'.format(activation))
        self._use_batch_norm = use_batch_norm
        self._norm_activation = norm_activation

        self._norm_activations = {}
        self._lateral_conv2d_op = {}
        self._post_hoc_conv2d_op = {}
        self._coarse_conv2d_op = {}
        for level in range(self._min_level, self._max_level + 1):
            if self._use_batch_norm:
                self._norm_activations[level] = norm_activation(
                    use_activation=False, name='p%d-bn' % level)
            self._lateral_conv2d_op[level] = self._conv2d_op(
                filters=self._fpn_feat_dims,
                kernel_size=(1, 1),
                padding='same',
                name='l%d' % level)
            self._post_hoc_conv2d_op[level] = self._conv2d_op(
                filters=self._fpn_feat_dims,
                strides=(1, 1),
                kernel_size=(3, 3),
                padding='same',
                name='post_hoc_d%d' % level)
            self._coarse_conv2d_op[level] = self._conv2d_op(
                filters=self._fpn_feat_dims,
                strides=(2, 2),
                kernel_size=(3, 3),
                padding='same',
                name='p%d' % level)
Exemplo n.º 2
0
    def __init__(
            self,
            resnet_depth,
            activation='relu',
            norm_activation=nn_ops.norm_activation_builder(activation='relu'),
            data_format='channels_last'):
        """ResNet initialization function.

    Args:
      resnet_depth: `int` depth of ResNet backbone model.
      activation: the activation function.
      norm_activation: an operation that includes a normalization layer followed
        by an optional activation layer.
      data_format: `str` either "channels_first" for `[batch, channels, height,
        width]` or "channels_last for `[batch, height, width, channels]`.
    """
        self._resnet_depth = resnet_depth
        if activation == 'relu':
            self._activation_op = tf.nn.relu
        elif activation == 'swish':
            self._activation_op = tf.nn.swish
        else:
            raise ValueError('Unsupported activation `{}`.'.format(activation))
        self._norm_activation = norm_activation
        self._data_format = data_format

        model_params = {
            10: {
                'block': self.residual_block,
                'layers': [1, 1, 1, 1]
            },
            18: {
                'block': self.residual_block,
                'layers': [2, 2, 2, 2]
            },
            34: {
                'block': self.residual_block,
                'layers': [3, 4, 6, 3]
            },
            50: {
                'block': self.bottleneck_block,
                'layers': [3, 4, 6, 3]
            },
            101: {
                'block': self.bottleneck_block,
                'layers': [3, 4, 23, 3]
            },
            152: {
                'block': self.bottleneck_block,
                'layers': [3, 8, 36, 3]
            },
            200: {
                'block': self.bottleneck_block,
                'layers': [3, 24, 36, 3]
            }
        }

        if resnet_depth not in model_params:
            valid_resnet_depths = ', '.join(
                [str(depth) for depth in sorted(model_params.keys())])
            raise ValueError(
                'The resnet_depth should be in [%s]. Not a valid resnet_depth:'
                % (valid_resnet_depths), self._resnet_depth)
        params = model_params[resnet_depth]
        self._resnet_fn = self.resnet_v1_generator(params['block'],
                                                   params['layers'])
Exemplo n.º 3
0
def norm_activation_generator(params):
    return nn_ops.norm_activation_builder(
        momentum=params.batch_norm_momentum,
        epsilon=params.batch_norm_epsilon,
        trainable=params.batch_norm_trainable,
        activation=params.activation)