Exemple #1
0
 def call(self, new_node, training):
   if not self.conv_bn_act_pattern:
     new_node = activation_builder.activation_fn(new_node, self.act_type)
   new_node = self.conv_op(new_node)
   new_node = self.bn(new_node, training=training)
   if self.conv_bn_act_pattern:
     new_node = activation_builder.activation_fn(new_node, self.act_type)
   return new_node
Exemple #2
0
  def call(self, feats, training):
    x = feats[-1]
    skips = list(reversed(feats[:-1]))

    for con2d_t, con2d_t_bn, skip in zip(self.con2d_ts, self.con2d_t_bns,
                                         skips):
      x = con2d_t(x)
      x = con2d_t_bn(x, training)
      x = activation_builder.activation_fn(x, self.act_type)
      x = tf.concat([x, skip], axis=-1)

    # This is the last layer of the model
    return self.head_transpose(x)  # 64x64 -> 128x128
Exemple #3
0
def batch_norm_act(inputs,
                   is_training_bn: bool,
                   act_type: Union[Text, None],
                   init_zero: bool = False,
                   data_format: Text = 'channels_last',
                   momentum: float = 0.99,
                   epsilon: float = 1e-3,
                   name: Text = None):
    """Performs a batch normalization followed by a non-linear activation.

  Args:
    inputs: `Tensor` of shape `[batch, channels, ...]`.
    is_training_bn: `bool` for whether the model is training.
    act_type: non-linear relu function type. If None, omits the relu operation.
    init_zero: `bool` if True, initializes scale parameter of batch
      normalization with 0 instead of 1 (default).
    data_format: `str` either "channels_first" for `[batch, channels, height,
      width]` or "channels_last for `[batch, height, width, channels]`.
    momentum: `float`, momentume of batch norm.
    epsilon: `float`, small value for numerical stability.
    name: the name of the batch normalization layer

  Returns:
    A normalized `Tensor` with the same `data_format`.
  """
    if init_zero:
        gamma_initializer = tf.zeros_initializer()
    else:
        gamma_initializer = tf.ones_initializer()

    if data_format == 'channels_first':
        axis = 1
    else:
        axis = 3

    inputs = batch_normalization(inputs=inputs,
                                 axis=axis,
                                 momentum=momentum,
                                 epsilon=epsilon,
                                 center=True,
                                 scale=True,
                                 training=is_training_bn,
                                 gamma_initializer=gamma_initializer,
                                 name=name)

    if act_type:
        inputs = activation_builder.activation_fn(inputs, act_type)
    return inputs
Exemple #4
0
  def call(self, inputs, training):
    """Call boxnet."""
    box_outputs = []
    for level_id in range(0, self.max_level - self.min_level + 1):
      image = inputs[level_id]
      for i in range(self.repeats):
        original_image = image
        image = self.conv_ops[i](image)
        image = self.bns[i][level_id](image, training=training)
        if self.act_type:
          image = activation_builder.activation_fn(image, self.act_type)
        if i > 0 and self.survival_prob:
          image = model_utils.drop_connect(image, training, self.survival_prob)
          image = image + original_image

      box_outputs.append(self.boxes(image))

    return box_outputs