Ejemplo n.º 1
0
def res_block(x, in_features, out_features, stride, activate_before_res=False):

    if activate_before_res:
        with tf.variable_scope('shared_activation'):
            x = ops.batch_norm(x, scope='init_bn')
            x = tf.nn.relu(x)
            orig_x = x
    else:
        orig_x = x

    block_x = x
    if not activate_before_res:
        with tf.variable_scope('res_only_activation'):
            block_x = ops.batch_norm(block_x, scope='init_bn')
            block_x = tf.nn.relu(block_x)

    with tf.variable_scope('sub1'):
        block_x = ops.conv2d(block_x,
                             out_features,
                             3,
                             stride=stride,
                             scope='conv1')

    with tf.variable_scope('sub2'):
        block_x = ops.batch_norm(block_x, scope='bn2')
        block_x = tf.nn.relu(block_x)
        block_x = ops.conv2d(block_x, out_features, 3, stride=1, scope='conv2')

    with tf.variable_scope('sub_add'):
        if in_features != out_features:
            orig_x = ops.avg_pool(orig_x, stride, stride)
            orig_x = ops.zero_pad(orig_x, in_features, out_features)
    output_data = orig_x + block_x
    return output_data
Ejemplo n.º 2
0
def res_add(x, orig_x, in_features, out_features, stride):
    if in_features != out_features:
        orig_x = ops.avg_pool(orig_x, stride, stride)
        orig_x = ops.zero_pad(orig_x, in_features, out_features)
    x = x + orig_x
    orig_x = x
    return x, orig_x
Ejemplo n.º 3
0
def residual_block(x,
                   in_filter,
                   out_filter,
                   stride,
                   activate_before_residual=False):
    """Adds residual connection to `x` in addition to applying BN->ReLU->3x3 Conv.

  Args:
    x: Tensor that is the output of the previous layer in the model.
    in_filter: Number of filters `x` has.
    out_filter: Number of filters that the output of this layer will have.
    stride: Integer that specified what stride should be applied to `x`.
    activate_before_residual: Boolean on whether a BN->ReLU should be applied
      to x before the convolution is applied.

  Returns:
    A Tensor that is the result of applying two sequences of BN->ReLU->3x3 Conv
    and then adding that Tensor to `x`.
  """

    if activate_before_residual:  # Pass up RELU and BN activation for resnet
        with tf.variable_scope('shared_activation'):
            x = ops.batch_norm(x, scope='init_bn')
            x = tf.nn.relu(x)
            orig_x = x
    else:
        orig_x = x

    block_x = x
    if not activate_before_residual:
        with tf.variable_scope('residual_only_activation'):
            block_x = ops.batch_norm(block_x, scope='init_bn')
            block_x = tf.nn.relu(block_x)

    with tf.variable_scope('sub1'):
        block_x = ops.conv2d(block_x,
                             out_filter,
                             3,
                             stride=stride,
                             scope='conv1')

    with tf.variable_scope('sub2'):
        block_x = ops.batch_norm(block_x, scope='bn2')
        block_x = tf.nn.relu(block_x)
        block_x = ops.conv2d(block_x, out_filter, 3, stride=1, scope='conv2')

    with tf.variable_scope(
            'sub_add'):  # If number of filters do not agree then zero pad them
        if in_filter != out_filter:
            orig_x = ops.avg_pool(orig_x, stride, stride)
            orig_x = ops.zero_pad(orig_x, in_filter, out_filter)
    x = orig_x + block_x
    return x
Ejemplo n.º 4
0
def shortcut(x, output_filters, stride):
  """Applies strided avg pool or zero padding to make output_filters match x."""
  num_filters = int(x.shape[3])
  if stride == 2:
    x = ops.avg_pool(x, 2, stride=stride, padding='SAME')
  if num_filters != output_filters:
    diff = output_filters - num_filters
    assert diff > 0
    # Zero padd diff zeros
    padding = [[0, 0], [0, 0], [0, 0], [0, diff]]
    x = tf.pad(x, padding)
  return x
Ejemplo n.º 5
0
def residual_block(
    x, in_filter, out_filter, stride, activate_before_residual=False):
  """Adds residual connection to `x` in addition to applying BN->ReLU->3x3 Conv.

  Args:
    x: Tensor that is the output of the previous layer in the model.
    in_filter: Number of filters `x` has.
    out_filter: Number of filters that the output of this layer will have.
    stride: Integer that specified what stride should be applied to `x`.
    activate_before_residual: Boolean on whether a BN->ReLU should be applied
      to x before the convolution is applied.

  Returns:
    A Tensor that is the result of applying two sequences of BN->ReLU->3x3 Conv
    and then adding that Tensor to `x`.
  """

  if activate_before_residual:  # Pass up RELU and BN activation for resnet
    with tf.variable_scope('shared_activation'):
      x = ops.batch_norm(x, scope='init_bn')
      x = tf.nn.relu(x)
      orig_x = x
  else:
    orig_x = x

  block_x = x
  if not activate_before_residual:
    with tf.variable_scope('residual_only_activation'):
      block_x = ops.batch_norm(block_x, scope='init_bn')
      block_x = tf.nn.relu(block_x)

  with tf.variable_scope('sub1'):
    block_x = ops.conv2d(
        block_x, out_filter, 3, stride=stride, scope='conv1')

  with tf.variable_scope('sub2'):
    block_x = ops.batch_norm(block_x, scope='bn2')
    block_x = tf.nn.relu(block_x)
    block_x = ops.conv2d(
        block_x, out_filter, 3, stride=1, scope='conv2')

  with tf.variable_scope(
      'sub_add'):  # If number of filters do not agree then zero pad them
    if in_filter != out_filter:
      orig_x = ops.avg_pool(orig_x, stride, stride)
      orig_x = ops.zero_pad(orig_x, in_filter, out_filter)
  x = orig_x + block_x
  return x
Ejemplo n.º 6
0
def no_relu_residual_block(x, in_filter, out_filter, stride, weight_decay=0.0):
    orig_x = x
    block_x = x
    with tf.variable_scope('sub1'):
        block_x = ops.conv2d(block_x,
                             out_filter,
                             3,
                             stride=stride,
                             scope='conv1',
                             weight_decay=weight_decay)
        block_x = ops.batch_norm(block_x, 0.9, scope='bn1')
    with tf.variable_scope('sub2'):
        block_x = tf.nn.relu(block_x)
        block_x = ops.conv2d(block_x,
                             out_filter,
                             3,
                             stride=1,
                             scope='conv2',
                             weight_decay=weight_decay)
        block_x = ops.batch_norm(block_x, 0.9, scope='bn2')

    with tf.variable_scope(
            'sub_add'):  # If number of filters do not agree then zero pad them
        if in_filter != out_filter or stride != 1:
            orig_x = ops.avg_pool(orig_x, 1, stride)
            orig_x = ops.zero_pad(orig_x, in_filter, out_filter)
        #orig_x = ops.conv2d(orig_x, out_filter, 1, stride=stride, scope='shortcut')
        # orig_x = ops.batch_norm(orig_x, 0.9, scope='bn3')
    x = tf.nn.relu(orig_x + block_x)
    #    with tf.variable_scope('sub1'):
    #        block_x = ops.batch_norm(block_x, 0.9, scope='bn1')
    #        block_x = tf.nn.relu(block_x)
    #        block_x = ops.conv2d(block_x, out_filter, 3, stride=stride, scope='conv1', weight_decay=weight_decay)
    #    with tf.variable_scope('sub2'):
    #        block_x = ops.batch_norm(block_x, 0.9, scope='bn2')
    #        block_x = tf.nn.relu(block_x)
    #        block_x = ops.conv2d(block_x, out_filter, 3, stride=1, scope='conv2', weight_decay=weight_decay)
    #    with tf.variable_scope('sub_add'):  # If number of filters do not agree then zero pad them
    #        if in_filter != out_filter or stride != 1:
    #            #orig_x = ops.conv2d(orig_x, out_filter, 1, stride=stride, scope='shortcut', weight_decay=weight_decay)
    #            #orig_x = ops.batch_norm(orig_x, 0.9, scope='bn3')
    #            orig_x =
    #            x = tf.nn.relu(orig_x + block_x)
    return x
Ejemplo n.º 7
0
def _res_add(in_filter, out_filter, stride, x, orig_x):
  """Adds `x` with `orig_x`, both of which are layers in the model.
  Args:
    in_filter: Number of filters in `orig_x`.
    out_filter: Number of filters in `x`.
    stride: Integer specifying the stide that should be applied `orig_x`.
    x: Tensor that is the output of the previous layer.
    orig_x: Tensor that is the output of an earlier layer in the network.
  Returns:
    A Tensor that is the result of `x` and `orig_x` being added after
    zero padding and striding are applied to `orig_x` to get the shapes
    to match.
  """
  if in_filter != out_filter:
    orig_x = ops.avg_pool(orig_x, stride, stride)
    orig_x = ops.zero_pad(orig_x, in_filter, out_filter)
  x = x + orig_x
  orig_x = x
  return x, orig_x
Ejemplo n.º 8
0
def _res_add(in_filter, out_filter, stride, x, orig_x):
  """Adds `x` with `orig_x`, both of which are layers in the model.

  Args:
    in_filter: Number of filters in `orig_x`.
    out_filter: Number of filters in `x`.
    stride: Integer specifying the stide that should be applied `orig_x`.
    x: Tensor that is the output of the previous layer.
    orig_x: Tensor that is the output of an earlier layer in the network.

  Returns:
    A Tensor that is the result of `x` and `orig_x` being added after
    zero padding and striding are applied to `orig_x` to get the shapes
    to match.
  """
  if in_filter != out_filter:
    orig_x = ops.avg_pool(orig_x, stride, stride)
    orig_x = ops.zero_pad(orig_x, in_filter, out_filter)
  x = x + orig_x
  orig_x = x
  return x, orig_x