コード例 #1
0
def small_vgg(input_image, num_channels, num_classes):
    def __vgg__(ipt, num_filter, times, dropouts, num_channels_=None):
        return img_conv_group(input=ipt,
                              num_channels=num_channels_,
                              pool_size=2,
                              pool_stride=2,
                              conv_num_filter=[num_filter] * times,
                              conv_filter_size=3,
                              conv_act=ReluActivation(),
                              conv_with_batchnorm=True,
                              conv_batchnorm_drop_rate=dropouts,
                              pool_type=MaxPooling())

    tmp = __vgg__(input_image, 64, 2, [0.3, 0], num_channels)
    tmp = __vgg__(tmp, 128, 2, [0.4, 0])
    tmp = __vgg__(tmp, 256, 3, [0.4, 0.4, 0])
    tmp = __vgg__(tmp, 512, 3, [0.4, 0.4, 0])
    tmp = img_pool_layer(input=tmp,
                         stride=2,
                         pool_size=2,
                         pool_type=MaxPooling())
    tmp = dropout_layer(input=tmp, dropout_rate=0.5)
    tmp = fc_layer(input=tmp,
                   size=512,
                   layer_attr=ExtraAttr(drop_rate=0.5),
                   act=LinearActivation())
    tmp = batch_norm_layer(input=tmp, act=ReluActivation())
    return fc_layer(input=tmp, size=num_classes, act=SoftmaxActivation())
コード例 #2
0
ファイル: networks.py プロジェクト: zhangjcqq/Paddle
def vgg_16_network(input_image, num_channels, num_classes=1000):
    """
    Same model from https://gist.github.com/ksimonyan/211839e770f7b538e2d8

    :param num_classes:
    :param input_image:
    :type input_image: LayerOutput
    :param num_channels:
    :type num_channels: int
    :return:
    """

    tmp = img_conv_group(input=input_image, num_channels=num_channels,
                         conv_padding=1, conv_num_filter=[64, 64],
                         conv_filter_size=3,
                         conv_act=ReluActivation(), pool_size=2,
                         pool_stride=2,
                         pool_type=MaxPooling())

    tmp = img_conv_group(input=tmp, conv_num_filter=[128, 128], conv_padding=1,
                         conv_filter_size=3, conv_act=ReluActivation(),
                         pool_stride=2, pool_type=MaxPooling(),
                         pool_size=2)

    tmp = img_conv_group(input=tmp, conv_num_filter=[256, 256, 256],
                         conv_padding=1,
                         conv_filter_size=3, conv_act=ReluActivation(),
                         pool_stride=2, pool_type=MaxPooling(), pool_size=2)

    tmp = img_conv_group(input=tmp, conv_num_filter=[512, 512, 512],
                         conv_padding=1,
                         conv_filter_size=3, conv_act=ReluActivation(),
                         pool_stride=2, pool_type=MaxPooling(), pool_size=2)
    tmp = img_conv_group(input=tmp, conv_num_filter=[512, 512, 512],
                         conv_padding=1,
                         conv_filter_size=3, conv_act=ReluActivation(),
                         pool_stride=2, pool_type=MaxPooling(), pool_size=2)

    tmp = fc_layer(input=tmp, size=4096, act=ReluActivation(),
                   layer_attr=ExtraAttr(drop_rate=0.5))

    tmp = fc_layer(input=tmp, size=4096, act=ReluActivation(),
                   layer_attr=ExtraAttr(drop_rate=0.5))

    return fc_layer(input=tmp, size=num_classes, act=SoftmaxActivation())
コード例 #3
0
ファイル: networks.py プロジェクト: zhangjcqq/Paddle
def dropout_layer(input, dropout_rate, name=None):
    """
    @TODO(yuyang18): Add comments.

    :param name:
    :param input:
    :param dropout_rate:
    :return:
    """
    return addto_layer(name=name, input=input, act=LinearActivation(),
                       bias_attr=False,
                       layer_attr=ExtraAttr(drop_rate=dropout_rate))
コード例 #4
0
def img_conv_group(input,
                   conv_num_filter,
                   pool_size,
                   num_channels=None,
                   conv_padding=1,
                   conv_filter_size=3,
                   conv_act=None,
                   conv_with_batchnorm=False,
                   conv_batchnorm_drop_rate=0,
                   pool_stride=1,
                   pool_type=None):
    """
    Image Convolution Group, Used for vgg net.

    TODO(yuyang18): Complete docs

    :param conv_batchnorm_drop_rate:
    :param input:
    :param conv_num_filter:
    :param pool_size:
    :param num_channels:
    :param conv_padding:
    :param conv_filter_size:
    :param conv_act:
    :param conv_with_batchnorm:
    :param pool_stride:
    :param pool_type:
    :return:
    """
    tmp = input

    # Type checks
    assert isinstance(tmp, LayerOutput)
    assert isinstance(conv_num_filter, list) or isinstance(
        conv_num_filter, tuple)
    for each_num_filter in conv_num_filter:
        assert isinstance(each_num_filter, int)

    assert isinstance(pool_size, int)

    def __extend_list__(obj):
        if not hasattr(obj, '__len__'):
            return [obj] * len(conv_num_filter)
        else:
            return obj

    conv_padding = __extend_list__(conv_padding)
    conv_filter_size = __extend_list__(conv_filter_size)
    conv_act = __extend_list__(conv_act)
    conv_with_batchnorm = __extend_list__(conv_with_batchnorm)
    conv_batchnorm_drop_rate = __extend_list__(conv_batchnorm_drop_rate)

    for i in xrange(len(conv_num_filter)):
        extra_kwargs = dict()
        if num_channels is not None:
            extra_kwargs['num_channels'] = num_channels
            num_channels = None
        if conv_with_batchnorm[i]:
            extra_kwargs['act'] = LinearActivation()
        else:
            extra_kwargs['act'] = conv_act[i]

        tmp = img_conv_layer(input=tmp,
                             padding=conv_padding[i],
                             filter_size=conv_filter_size[i],
                             num_filters=conv_num_filter[i],
                             **extra_kwargs)

        # logger.debug("tmp.num_filters = %d" % tmp.num_filters)

        if conv_with_batchnorm[i]:
            dropout = conv_batchnorm_drop_rate[i]
            if dropout == 0 or abs(dropout) < 1e-5:  # dropout not set
                tmp = batch_norm_layer(input=tmp, act=conv_act[i])
            else:
                tmp = batch_norm_layer(input=tmp,
                                       act=conv_act[i],
                                       layer_attr=ExtraAttr(drop_rate=dropout))

    return img_pool_layer(input=tmp,
                          stride=pool_stride,
                          pool_size=pool_size,
                          pool_type=pool_type)