Example #1
0
def _pre_conv(x, is_training,
              num_outputs=None,
              kernel_size=1,
              stride=1,
              padding='VALID',
              activation='linear',
              trainable=True,
              scope='preconv',
              reuse=None):
    '''
    Args:
        num_outputs: If num_outputs is None, the input dimension is used.
    '''
    # TODO: Support multi-scale.
    x = cnn.as_tensor(x)
    if not num_outputs:
        num_outputs = x.value.shape[-1].value
    return cnn.slim_conv2d(x, num_outputs, kernel_size,
                           stride=stride,
                           padding=padding,
                           activation_fn=helpers.get_act(activation),
                           normalizer_fn=None,  # No batch-norm.
                           trainable=trainable,
                           scope=scope,
                           reuse=reuse)
Example #2
0
def slim_alexnet_v2(x,
                    is_training,
                    trainable=True,
                    variables_collections=None,
                    weight_decay=0.0005,
                    conv_padding='VALID',
                    pool_padding='VALID',
                    conv1_stride=4,
                    output_layer='conv5',
                    output_act='linear',
                    freeze_until_layer=None):
    if not trainable:
        raise NotImplementedError('trainable not supported')
    # TODO: Support variables_collections.

    with slim.arg_scope(
            alexnet_pkg.alexnet_v2_arg_scope(weight_decay=weight_decay,
                                             conv_padding=conv_padding,
                                             pool_padding=pool_padding)):
        return alexnet_pkg.alexnet_v2(
            x,
            is_training=is_training,
            conv1_stride=conv1_stride,
            output_layer=output_layer,
            output_activation_fn=helpers.get_act(output_act),
            freeze_until_layer=freeze_until_layer)
Example #3
0
def alexnet(x,
            is_training,
            trainable=True,
            variables_collections=None,
            weight_decay=0,
            output_layer='conv5',
            output_act='linear',
            freeze_until_layer=None,
            padding='VALID',
            enable_bnorm=True):
    with slim.arg_scope(
            feature_arg_scope(weight_decay=weight_decay,
                              enable_bnorm=enable_bnorm,
                              padding=padding)):
        return _alexnet_layers(
            x,
            is_training,
            trainable,
            variables_collections,
            output_layer=output_layer,
            output_activation_fn=helpers.get_act(output_act),
            freeze_until_layer=freeze_until_layer)
Example #4
0
def _extra_conv(x,
                is_training,
                trainable,
                variables_collections,
                num_outputs=None,
                kernel_size=1,
                stride=1,
                padding='VALID',
                activation='linear'):
    if not trainable:
        raise NotImplementedError('trainable not supported')

    x = cnn.as_tensor(x)
    if num_outputs is None:
        num_outputs = x.value.shape[-1].value
    with slim.arg_scope([slim.batch_norm], is_training=is_training):
        with slim.arg_scope([cnn.slim_conv2d],
                            variables_collections=variables_collections):
            return cnn.slim_conv2d(x,
                                   num_outputs,
                                   kernel_size,
                                   stride=stride,
                                   padding=padding,
                                   activation_fn=helpers.get_act(activation))