def __init__(self,
                 ch_in,
                 ch_out,
                 filter_size,
                 stride,
                 name_adapter,
                 act=None,
                 norm_type='bn',
                 norm_decay=0.,
                 freeze_norm=True,
                 lr=1.0,
                 name=None):
        super(ConvNormLayer, self).__init__()
        assert norm_type in ['bn', 'sync_bn']
        self.norm_type = norm_type
        self.act = act

        self.conv = Conv2D(in_channels=ch_in,
                           out_channels=ch_out,
                           kernel_size=filter_size,
                           stride=stride,
                           padding=(filter_size - 1) // 2,
                           groups=1,
                           weight_attr=ParamAttr(learning_rate=lr,
                                                 name=name + "_weights"),
                           bias_attr=False)

        bn_name = name_adapter.fix_conv_norm_name(name)
        norm_lr = 0. if freeze_norm else lr
        param_attr = ParamAttr(learning_rate=norm_lr,
                               regularizer=L2Decay(norm_decay),
                               name=bn_name + "_scale",
                               trainable=False if freeze_norm else True)
        bias_attr = ParamAttr(learning_rate=norm_lr,
                              regularizer=L2Decay(norm_decay),
                              name=bn_name + "_offset",
                              trainable=False if freeze_norm else True)

        global_stats = True if freeze_norm else False
        self.norm = BatchNorm(ch_out,
                              act=act,
                              param_attr=param_attr,
                              bias_attr=bias_attr,
                              use_global_stats=global_stats,
                              moving_mean_name=bn_name + '_mean',
                              moving_variance_name=bn_name + '_variance')
        norm_params = self.norm.parameters()

        if freeze_norm:
            for param in norm_params:
                param.stop_gradient = True
示例#2
0
 def __init__(self, num_channels, num_filters):
     super(DoubleConv, self).__init__()
     self.conv0 = Conv2D(num_channels=num_channels,
                         num_filters=num_filters,
                         filter_size=3,
                         stride=1,
                         padding=1)
     self.bn0 = BatchNorm(num_filters)
     self.conv1 = Conv2D(num_channels=num_filters,
                         num_filters=num_filters,
                         filter_size=3,
                         stride=1,
                         padding=1)
     self.bn1 = BatchNorm(num_filters)
示例#3
0
 def __init__(self,
              num_channels,
              num_filters,
              filter_size,
              stride=1,
              groups=1,
              is_vd_mode=False,
              act=None,
              lr_mult=1.0,
              data_format="NCHW"):
     super().__init__()
     self.is_vd_mode = is_vd_mode
     self.act = act
     self.avg_pool = AvgPool2D(
         kernel_size=2, stride=2, padding=0, ceil_mode=True)
     self.conv = Conv2D(
         in_channels=num_channels,
         out_channels=num_filters,
         kernel_size=filter_size,
         stride=stride,
         padding=(filter_size - 1) // 2,
         groups=groups,
         weight_attr=ParamAttr(learning_rate=lr_mult),
         bias_attr=False,
         data_format=data_format)
     self.bn = BatchNorm(
         num_filters,
         param_attr=ParamAttr(learning_rate=lr_mult),
         bias_attr=ParamAttr(learning_rate=lr_mult),
         data_layout=data_format)
     self.relu = nn.ReLU()
示例#4
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 is_vd_mode=False,
                 act=None,
                 name=None):
        super(ConvBNLayer, self).__init__()

        self.is_vd_mode = is_vd_mode
        self._pool2d_avg = AvgPool2D(kernel_size=2,
                                     stride=2,
                                     padding=0,
                                     ceil_mode=True)
        self._conv = Conv2D(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=(filter_size - 1) // 2,
                            groups=groups,
                            weight_attr=ParamAttr(name=name + "_weights"),
                            bias_attr=False)
        bn_name = name + '_bn'
        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(name=bn_name + '_scale'),
            bias_attr=ParamAttr(bn_name + '_offset'),
            moving_mean_name=bn_name + '_mean',
            moving_variance_name=bn_name + '_variance')
示例#5
0
    def __init__(self,
                 num_channels: int,
                 filter_size: int,
                 num_filters: int,
                 stride: int,
                 padding: int,
                 channels: int = None,
                 num_groups: int = 1,
                 if_act: bool = True,
                 act: str = 'relu',
                 name: str = None):
        super(ConvBNLayer, self).__init__()
        self._if_act = if_act
        assert act in ['relu', 'swish'], \
            "supported act are {} but your act is {}".format(
                ['relu', 'swish'], act)
        self._act = act
        self._conv = Conv2d(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=padding,
                            groups=num_groups,
                            weight_attr=ParamAttr(initializer=MSRA(),
                                                  name=name + "_weights"),
                            bias_attr=False)

        self._batch_norm = BatchNorm(
            num_filters,
            param_attr=ParamAttr(name=name + "_bn_scale"),
            bias_attr=ParamAttr(name=name + "_bn_offset"),
            moving_mean_name=name + "_bn_mean",
            moving_variance_name=name + "_bn_variance")
示例#6
0
    def __init__(self,
                 num_channels,
                 filter_size,
                 num_filters,
                 stride,
                 padding,
                 channels=None,
                 num_groups=1,
                 act='hard_swish'):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2D(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=padding,
                            groups=num_groups,
                            weight_attr=ParamAttr(initializer=KaimingNormal()),
                            bias_attr=False)

        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(regularizer=L2Decay(0.0)),
            bias_attr=ParamAttr(regularizer=L2Decay(0.0)))
示例#7
0
    def __init__(self,
                 input_channels,
                 output_channels,
                 filter_size,
                 stride=1,
                 padding=0,
                 act=None,
                 name=None):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2D(num_channels=input_channels,
                            num_filters=output_channels,
                            filter_size=filter_size,
                            stride=stride,
                            padding=padding,
                            param_attr=ParamAttr(name=name + "/weights"),
                            bias_attr=False)
        self._bn = BatchNorm(
            num_features=output_channels,
            epsilon=1e-3,
            momentum=0.99,
            weight_attr=ParamAttr(name=name + "/BatchNorm/gamma"),
            bias_attr=ParamAttr(name=name + "/BatchNorm/beta"))

        self._act_op = layer_utils.Activation(act=act)
    def __init__(self,
                 input_channels,
                 output_channels,
                 filter_size,
                 stride=1,
                 padding=0,
                 act=None,
                 name=None):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2D(
            in_channels=input_channels,
            out_channels=output_channels,
            kernel_size=filter_size,
            stride=stride,
            padding=padding,
            weight_attr=ParamAttr(name=name + "/weights"),
            bias_attr=False)
        self._bn = BatchNorm(
            num_channels=output_channels,
            act=act,
            epsilon=1e-3,
            momentum=0.99,
            param_attr=ParamAttr(name=name + "/BatchNorm/gamma"),
            bias_attr=ParamAttr(name=name + "/BatchNorm/beta"),
            moving_mean_name=name + "/BatchNorm/moving_mean",
            moving_variance_name=name + "/BatchNorm/moving_variance")
示例#9
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 act=None,
                 name=None,
                 data_format="NCHW"):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2D(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=(filter_size - 1) // 2,
                            groups=groups,
                            weight_attr=ParamAttr(name=name + "_weights"),
                            bias_attr=False,
                            data_format=data_format)
        if name == "conv1":
            bn_name = "bn_" + name
        else:
            bn_name = "bn" + name[3:]
        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(name=bn_name + "_scale"),
            bias_attr=ParamAttr(bn_name + "_offset"),
            moving_mean_name=bn_name + "_mean",
            moving_variance_name=bn_name + "_variance",
            data_layout=data_format)
示例#10
0
 def __init__(self,
              in_c,
              out_c,
              filter_size,
              stride,
              padding,
              num_groups=1,
              if_act=True,
              act=None,
              use_cudnn=True,
              name=""):
     super(ConvBNLayer, self).__init__()
     self.if_act = if_act
     self.act = act
     self.conv = Conv2D(in_channels=in_c,
                        out_channels=out_c,
                        kernel_size=filter_size,
                        stride=stride,
                        padding=padding,
                        groups=num_groups,
                        weight_attr=ParamAttr(name=name + "_weights"),
                        bias_attr=False)
     self.bn = BatchNorm(num_channels=out_c,
                         act=None,
                         param_attr=ParamAttr(name=name + "_bn_scale",
                                              regularizer=L2Decay(0.0)),
                         bias_attr=ParamAttr(name=name + "_bn_offset",
                                             regularizer=L2Decay(0.0)),
                         moving_mean_name=name + "_bn_mean",
                         moving_variance_name=name + "_bn_variance")
示例#11
0
 def __init__(self,
              in_channels,
              num_filters,
              filter_size,
              stride=1,
              padding=0,
              groups=1,
              act='relu',
              name=None):
     super(ConvBNLayer, self).__init__()
     self.conv = Conv2D(num_channels=in_channels,
                        num_filters=num_filters,
                        filter_size=filter_size,
                        stride=stride,
                        padding=padding,
                        groups=groups,
                        act=None,
                        param_attr=paddle.ParamAttr(name=name +
                                                    ".conv.weight"),
                        bias_attr=False)
     self.bn = BatchNorm(num_filters,
                         act=act,
                         epsilon=0.001,
                         param_attr=paddle.ParamAttr(name=name +
                                                     ".bn.weight"),
                         bias_attr=paddle.ParamAttr(name=name + ".bn.bias"),
                         moving_mean_name=name + '.bn.running_mean',
                         moving_variance_name=name + '.bn.running_var')
示例#12
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 act=None,
                 name=None,
                 data_format='NCHW'):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2D(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=(filter_size - 1) // 2,
                            groups=groups,
                            weight_attr=ParamAttr(name=name + "_weights"),
                            bias_attr=False,
                            data_format=data_format)
        bn_name = name + '_bn'
        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(name=bn_name + '_scale'),
            bias_attr=ParamAttr(bn_name + '_offset'),
            moving_mean_name=bn_name + '_mean',
            moving_variance_name=bn_name + '_variance',
            data_layout=data_format)
示例#13
0
    def __init__(self,
                 num_channels: int,
                 num_filters: int,
                 filter_size: int,
                 stride: int = 1,
                 groups: int = 1,
                 act: str = None,
                 name: str = None):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2d(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=(filter_size - 1) // 2,
                            groups=groups,
                            weight_attr=ParamAttr(name=name + "_weights"),
                            bias_attr=False)
        if name == "conv1":
            bn_name = "bn_" + name
        else:
            bn_name = "bn" + name[3:]
        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(name=bn_name + '_scale'),
            bias_attr=ParamAttr(bn_name + '_offset'),
            moving_mean_name=bn_name + '_mean',
            moving_variance_name=bn_name + '_variance')
示例#14
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 pad=0,
                 groups=1,
                 act="relu",
                 name=None):
        super(BNACConvLayer, self).__init__()
        self.num_channels = num_channels

        self._batch_norm = BatchNorm(
            num_channels,
            act=act,
            param_attr=ParamAttr(name=name + '_bn_scale'),
            bias_attr=ParamAttr(name + '_bn_offset'),
            moving_mean_name=name + '_bn_mean',
            moving_variance_name=name + '_bn_variance')

        self._conv = Conv2D(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=pad,
                            groups=groups,
                            weight_attr=ParamAttr(name=name + "_weights"),
                            bias_attr=False)
示例#15
0
    def __init__(self,
                 num_channels: int,
                 filter_size: int,
                 num_filters: int,
                 stride: int,
                 padding: int,
                 channels: int = None,
                 num_groups: int = 1,
                 act: str = 'relu',
                 name: str = None):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2d(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=padding,
                            groups=num_groups,
                            weight_attr=ParamAttr(initializer=MSRA(),
                                                  name=name + "_weights"),
                            bias_attr=False)

        self._batch_norm = BatchNorm(num_filters,
                                     act=act,
                                     param_attr=ParamAttr(name + "_bn_scale"),
                                     bias_attr=ParamAttr(name + "_bn_offset"),
                                     moving_mean_name=name + "_bn_mean",
                                     moving_variance_name=name +
                                     "_bn_variance")
示例#16
0
    def __init__(self,
                 num_channels,
                 filter_size,
                 num_filters,
                 stride,
                 padding,
                 channels=None,
                 num_groups=1,
                 name=None,
                 use_cudnn=True):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2D(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=padding,
                            groups=num_groups,
                            weight_attr=ParamAttr(name=name + "_weights"),
                            bias_attr=False)

        self._batch_norm = BatchNorm(
            num_filters,
            param_attr=ParamAttr(name=name + "_bn_scale"),
            bias_attr=ParamAttr(name=name + "_bn_offset"),
            moving_mean_name=name + "_bn_mean",
            moving_variance_name=name + "_bn_variance")
示例#17
0
    def __init__(self, num_classes=59, backbone='resnet50'):
        super(PSPNet, self).__init__()

        res = ResNet101(pretrained=False)
        # stem: res.conv, res.pool2d_max
        self.layer0 = Sequential(
            res.conv,
            res.pool2d_max
        )
        self.layer1 = res.layer1
        self.layer2 = res.layer2
        self.layer3 = res.layer3
        self.layer4 = res.layer4

        num_channels = 2048
        # psp: 2048 -> 2048*2
        self.pspmoduls = PSPModule(num_channels, [1,2,3,6])
        num_channels *= 2
        # cls: 2048*2 -> 512 -> num_classes
        self.classifier = Sequential(
            Conv2D(num_channels,512,kernel_size=3,padding=1),
            BatchNorm(512,act='relu'),
            Dropout(0.1),
            Conv2D(512,num_classes,kernel_size=1)
        )
示例#18
0
文件: layers.py 项目: zdqf/hapi
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 padding=0,
                 stddev=0.02,
                 norm=True,
                 act='leaky_relu',
                 relufactor=0.0,
                 use_bias=False):
        super(ConvBN, self).__init__()

        pattr = paddle.ParamAttr(
            initializer=nn.initializer.Normal(loc=0.0, scale=stddev))
        self.conv = Conv2d(in_channels=num_channels,
                           out_channels=num_filters,
                           kernel_size=filter_size,
                           stride=stride,
                           padding=padding,
                           weight_attr=pattr,
                           bias_attr=use_bias)
        if norm:
            self.bn = BatchNorm(
                num_filters,
                param_attr=paddle.ParamAttr(
                    initializer=nn.initializer.Normal(1.0, 0.02)),
                bias_attr=paddle.ParamAttr(
                    initializer=nn.initializer.Constant(0.0)),
                is_test=False,
                trainable_statistics=True)
        self.relufactor = relufactor
        self.norm = norm
        self.act = act
示例#19
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 padding=0,
                 act=None,
                 name=None):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2D(
            in_channels=num_channels,
            out_channels=num_filters,
            kernel_size=filter_size,
            stride=stride,
            padding=padding,
            groups=groups,
            weight_attr=ParamAttr(name=name + ".conv2d.output.1.w_0"),
            bias_attr=ParamAttr(name=name + ".conv2d.output.1.b_0"))
        bn_name = name + "_bn"
        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(name=bn_name + ".output.1.w_0"),
            bias_attr=ParamAttr(bn_name + ".output.1.b_0"),
            moving_mean_name=bn_name + "_mean",
            moving_variance_name=bn_name + "_variance")
示例#20
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 stride=1,
                 groups=1,
                 act="relu",
                 name=None):
        super(ConvBNLayer, self).__init__()
        self._conv = Conv2D(in_channels=in_channels,
                            out_channels=out_channels,
                            kernel_size=kernel_size,
                            stride=stride,
                            padding=(kernel_size - 1) // 2,
                            groups=groups,
                            weight_attr=ParamAttr(initializer=KaimingNormal(),
                                                  name=name + "_weights"),
                            bias_attr=False)
        bn_name = name + "_bn"

        self._batch_norm = BatchNorm(
            num_channels=out_channels,
            act=act,
            param_attr=ParamAttr(name=bn_name + "_scale",
                                 regularizer=L2Decay(0.0)),
            bias_attr=ParamAttr(name=bn_name + "_offset",
                                regularizer=L2Decay(0.0)),
            moving_mean_name=bn_name + "_mean",
            moving_variance_name=bn_name + "_variance")
示例#21
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 dilation=1,
                 groups=1,
                 act=None,
                 name=None):
        super(ConvBNLayer, self).__init__()

        bn_decay = 0.0

        self._conv = Conv2D(in_channels=num_channels,
                            out_channels=num_filters,
                            kernel_size=filter_size,
                            stride=stride,
                            padding=(filter_size - 1) // 2,
                            dilation=dilation,
                            groups=groups,
                            weight_attr=ParamAttr(name=name + "_weight"),
                            bias_attr=False)
        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(name=name + "_scale",
                                 regularizer=L2Decay(bn_decay)),
            bias_attr=ParamAttr(name + "_offset",
                                regularizer=L2Decay(bn_decay)),
            moving_mean_name=name + "_mean",
            moving_variance_name=name + "_variance")
示例#22
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 act="relu",
                 name=None):
        super(ConvBNLayer, self).__init__()

        self._conv = Conv2D(num_channels=num_channels,
                            num_filters=num_filters,
                            filter_size=filter_size,
                            stride=stride,
                            padding=(filter_size - 1) // 2,
                            groups=groups,
                            param_attr=ParamAttr(
                                initializer=Normal(scale=0.001),
                                name=name + "_weights"),
                            bias_attr=False)
        bn_name = name + '_bn'
        self._batch_norm = BatchNorm(
            num_filters,
            weight_attr=ParamAttr(name=bn_name + '_scale',
                                  initializer=fluid.initializer.Constant(1.0)),
            bias_attr=ParamAttr(bn_name + '_offset',
                                initializer=fluid.initializer.Constant(0.0)))
        self.act = act
示例#23
0
 def __init__(self,
              input_channels,
              output_channels,
              filter_size,
              stride=1,
              groups=1,
              act=None,
              name=None):
     super(ConvBNLayer, self).__init__()
     if "downsample" in name:
         conv_name = name + ".0"
     else:
         conv_name = name
     self._conv = Conv2D(in_channels=input_channels,
                         out_channels=output_channels,
                         kernel_size=filter_size,
                         stride=stride,
                         padding=(filter_size - 1) // 2,
                         groups=groups,
                         weight_attr=ParamAttr(name=conv_name + ".weight"),
                         bias_attr=False)
     if "downsample" in name:
         bn_name = name[:9] + "downsample.1"
     else:
         if "conv1" == name:
             bn_name = "bn" + name[-1]
         else:
             bn_name = (name[:10] if name[7:9].isdigit() else
                        name[:9]) + "bn" + name[-1]
     self._bn = BatchNorm(num_channels=output_channels,
                          act=act,
                          param_attr=ParamAttr(name=bn_name + ".weight"),
                          bias_attr=ParamAttr(name=bn_name + ".bias"),
                          moving_mean_name=bn_name + ".running_mean",
                          moving_variance_name=bn_name + ".running_var")
示例#24
0
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 act=None,
                 dilation=1,
                 padding=None,
                 name=None):
        super(ConvBNLayer, self).__init__(name)

        if padding is None:
            padding = (filter_size - 1) // 2
        else:
            padding = padding

        self.conv = Conv2D(in_channels=num_channels,
                           out_channels=num_filters,
                           kernel_size=filter_size,
                           stride=stride,
                           padding=padding,
                           groups=groups,
                           dilation=dilation,
                           bias_attr=False)
        self.bn = BatchNorm(num_filters, act=act)
示例#25
0
    def __init__(
        self,
        in_channels,
        out_channels,
        kernel_size,
        stride,
        padding,
        groups=1,
        act=None,
        name=None,
    ):
        super(ConvBNLayer, self).__init__()
        self._conv = Conv2D(in_channels=in_channels,
                            out_channels=out_channels,
                            kernel_size=kernel_size,
                            stride=stride,
                            padding=padding,
                            groups=groups,
                            weight_attr=ParamAttr(initializer=KaimingNormal(),
                                                  name=name + "_weights"),
                            bias_attr=False)

        self._batch_norm = BatchNorm(
            out_channels,
            param_attr=ParamAttr(name=name + "_bn_scale"),
            bias_attr=ParamAttr(name=name + "_bn_offset"),
            act=act,
            moving_mean_name=name + "_bn_mean",
            moving_variance_name=name + "_bn_variance")
示例#26
0
    def __init__(self,
                 in_c,
                 out_c,
                 filter_size,
                 stride,
                 padding,
                 num_groups=1,
                 if_act=True,
                 act=None):
        super().__init__()

        self.conv = Conv2D(
            in_channels=in_c,
            out_channels=out_c,
            kernel_size=filter_size,
            stride=stride,
            padding=padding,
            groups=num_groups,
            bias_attr=False)
        self.bn = BatchNorm(
            num_channels=out_c,
            act=None,
            param_attr=ParamAttr(regularizer=L2Decay(0.0)),
            bias_attr=ParamAttr(regularizer=L2Decay(0.0)))
        self.if_act = if_act
        self.act = _create_act(act)
示例#27
0
    def __init__(self,
                 input_channels,
                 output_channels,
                 stride,
                 filter,
                 dilation=1,
                 act=None,
                 name=None):
        super(Seperate_Conv, self).__init__()

        self._conv1 = Conv2D(
            in_channels=input_channels,
            out_channels=input_channels,
            kernel_size=filter,
            stride=stride,
            groups=input_channels,
            padding=(filter) // 2 * dilation,
            dilation=dilation,
            weight_attr=ParamAttr(name=name + "/depthwise/weights"),
            bias_attr=False)
        self._bn1 = BatchNorm(
            input_channels,
            act=act,
            epsilon=1e-3,
            momentum=0.99,
            param_attr=ParamAttr(name=name + "/depthwise/BatchNorm/gamma"),
            bias_attr=ParamAttr(name=name + "/depthwise/BatchNorm/beta"),
            moving_mean_name=name + "/depthwise/BatchNorm/moving_mean",
            moving_variance_name=name + "/depthwise/BatchNorm/moving_variance")
        self._conv2 = Conv2D(
            input_channels,
            output_channels,
            1,
            stride=1,
            groups=1,
            padding=0,
            weight_attr=ParamAttr(name=name + "/pointwise/weights"),
            bias_attr=False)
        self._bn2 = BatchNorm(
            output_channels,
            act=act,
            epsilon=1e-3,
            momentum=0.99,
            param_attr=ParamAttr(name=name + "/pointwise/BatchNorm/gamma"),
            bias_attr=ParamAttr(name=name + "/pointwise/BatchNorm/beta"),
            moving_mean_name=name + "/pointwise/BatchNorm/moving_mean",
            moving_variance_name=name + "/pointwise/BatchNorm/moving_variance")
示例#28
0
    def __init__(self,
                 input_channels,
                 output_channels,
                 stride,
                 filter,
                 dilation=1,
                 act=None,
                 name=None):
        super(Seperate_Conv, self).__init__()

        self._conv1 = Conv2D(num_channels=input_channels,
                             num_filters=input_channels,
                             filter_size=filter,
                             stride=stride,
                             groups=input_channels,
                             padding=(filter) // 2 * dilation,
                             dilation=dilation,
                             param_attr=ParamAttr(name=name +
                                                  "/depthwise/weights"),
                             bias_attr=False)
        self._bn1 = BatchNorm(
            input_channels,
            epsilon=1e-3,
            momentum=0.99,
            weight_attr=ParamAttr(name=name + "/depthwise/BatchNorm/gamma"),
            bias_attr=ParamAttr(name=name + "/depthwise/BatchNorm/beta"))

        self._act_op1 = layer_utils.Activation(act=act)

        self._conv2 = Conv2D(input_channels,
                             output_channels,
                             1,
                             stride=1,
                             groups=1,
                             padding=0,
                             param_attr=ParamAttr(name=name +
                                                  "/pointwise/weights"),
                             bias_attr=False)
        self._bn2 = BatchNorm(
            output_channels,
            epsilon=1e-3,
            momentum=0.99,
            weight_attr=ParamAttr(name=name + "/pointwise/BatchNorm/gamma"),
            bias_attr=ParamAttr(name=name + "/pointwise/BatchNorm/beta"))

        self._act_op2 = layer_utils.Activation(act=act)
示例#29
0
    def __init__(self, num_classes=10, classifier_activation='softmax'):
        super(ImperativeLenet, self).__init__()
        conv2d_w1_attr = fluid.ParamAttr(name="conv2d_w_1")
        conv2d_w2_attr = fluid.ParamAttr(name="conv2d_w_2")
        fc_w1_attr = fluid.ParamAttr(name="fc_w_1")
        fc_w2_attr = fluid.ParamAttr(name="fc_w_2")
        fc_w3_attr = fluid.ParamAttr(name="fc_w_3")
        conv2d_b1_attr = fluid.ParamAttr(name="conv2d_b_1")
        conv2d_b2_attr = fluid.ParamAttr(name="conv2d_b_2")
        fc_b1_attr = fluid.ParamAttr(name="fc_b_1")
        fc_b2_attr = fluid.ParamAttr(name="fc_b_2")
        fc_b3_attr = fluid.ParamAttr(name="fc_b_3")
        self.features = Sequential(
            Conv2D(in_channels=1,
                   out_channels=6,
                   kernel_size=3,
                   stride=1,
                   padding=1,
                   weight_attr=conv2d_w1_attr,
                   bias_attr=conv2d_b1_attr), BatchNorm(6), ReLU(),
            Pool2D(pool_size=2, pool_type='max', pool_stride=2),
            Conv2D(in_channels=6,
                   out_channels=16,
                   kernel_size=5,
                   stride=1,
                   padding=0,
                   weight_attr=conv2d_w2_attr,
                   bias_attr=conv2d_b2_attr), BatchNorm(16), ReLU6(),
            Pool2D(pool_size=2, pool_type='max', pool_stride=2))

        self.fc = Sequential(
            Linear(in_features=400,
                   out_features=120,
                   weight_attr=fc_w1_attr,
                   bias_attr=fc_b1_attr), LeakyReLU(),
            Linear(in_features=120,
                   out_features=84,
                   weight_attr=fc_w2_attr,
                   bias_attr=fc_b2_attr), Sigmoid(),
            Linear(in_features=84,
                   out_features=num_classes,
                   weight_attr=fc_w3_attr,
                   bias_attr=fc_b3_attr), Softmax())
示例#30
0
 def __init__(self, num_channels, bin_size_list):
     super(PSPModule, self).__init__()
     self.bn_size_list = bin_size_list
     num_filters = num_channels // len(bin_size_list)
     self.features = []
     for i in range(len(bin_size_list)):
         self.features.append(
             Sequential(
                 AdaptiveMaxPool2D(self.bn_size_list[i]),
                 Conv2D(in_channels=num_channels,out_channels=num_filters,kernel_size=1),
                 BatchNorm(num_filters,act='relu')
             )
         )