Example #1
0
    def __init__(self, inplanes, planes, stride=1):
        super().__init__()
        self.conv1 = nn.Conv2D(inplanes, planes, 1, bias_attr=False)
        self.bn1 = nn.BatchNorm2D(planes)

        self.conv2 = nn.Conv2D(planes, planes, 3, padding=1, bias_attr=False)
        self.bn2 = nn.BatchNorm2D(planes)

        self.avgpool = nn.AvgPool2D(stride) if stride > 1 else Identity()

        self.conv3 = nn.Conv2D(planes, planes * self.expansion, 1, bias_attr=False)
        self.bn3 = nn.BatchNorm2D(planes * self.expansion)

        self.relu = nn.ReLU()
        self.downsample = None
        self.stride = stride

        if stride > 1 or inplanes != planes * Bottleneck.expansion:
            self.downsample = nn.Sequential(
                ("-1", nn.AvgPool2D(stride)),
                (
                    "0",
                    nn.Conv2D(
                        inplanes, planes * self.expansion, 1, stride=1, bias_attr=False
                    ),
                ),
                ("1", nn.BatchNorm2D(planes * self.expansion)),
            )
    def __init__(
            self,
            in_channels,
            out_channels,
            kernel_size,
            stride=1,
            groups=1,
            is_vd_mode=False,
            act=None,
            name=None, ):
        super(ConvBNLayer, self).__init__()

        self.is_vd_mode = is_vd_mode
        self._pool2d_avg = nn.AvgPool2D(
            kernel_size=2, stride=2, padding=0, ceil_mode=True)
        self._conv = nn.Conv2D(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            stride=stride,
            padding=(kernel_size - 1) // 2,
            groups=groups,
            weight_attr=ParamAttr(name=name + "_weights"),
            bias_attr=False)
        if name == "conv1":
            bn_name = "bn_" + name
        else:
            bn_name = "bn" + name[3:]
        self._batch_norm = nn.BatchNorm(
            out_channels,
            act=act,
            param_attr=ParamAttr(name=bn_name + '_scale'),
            bias_attr=ParamAttr(bn_name + '_offset'),
            moving_mean_name=bn_name + '_mean',
            moving_variance_name=bn_name + '_variance')
Example #3
0
 def __init__(self, in_channels, num_classes, name=None):
     super(InceptionAux, self).__init__()
     self.num_classes = num_classes
     self.pool0 = nn.AvgPool2D(kernel_size=5, stride=3)
     self.conv0 = ConvBNLayer(in_channels, 128, 1, name=name + '.conv0')
     self.conv1 = ConvBNLayer(128, 768, 5, name=name + '.conv1')
     self.pool1 = nn.AdaptiveAvgPool2D(1)
Example #4
0
 def __init__(self, channels, kernel_size, stride):
     super(Involution, self).__init__()
     self.kernel_size = kernel_size
     self.stride = stride
     self.channels = channels
     reduction_ratio = 4
     self.group_channels = 16
     self.groups = self.channels // self.group_channels
     self.conv1 = nn.Sequential(
         (
             "conv",
             nn.Conv2D(
                 in_channels=channels,
                 out_channels=channels // reduction_ratio,
                 kernel_size=1,
                 bias_attr=False,
             ),
         ),
         ("bn", nn.BatchNorm2D(channels // reduction_ratio)),
         ("activate", nn.ReLU()),
     )
     self.conv2 = nn.Sequential((
         "conv",
         nn.Conv2D(
             in_channels=channels // reduction_ratio,
             out_channels=kernel_size**2 * self.groups,
             kernel_size=1,
             stride=1,
         ),
     ))
     if stride > 1:
         self.avgpool = nn.AvgPool2D(stride, stride)
Example #5
0
    def __init__(
        self,
        in_channels,
        out_channels,
        kernel_size,
        stride=1,
        dilation=1,
        groups=1,
        is_vd_mode=False,
        act=None,
    ):
        super(ConvBNLayer, self).__init__()

        self.is_vd_mode = is_vd_mode
        self._pool2d_avg = nn.AvgPool2D(kernel_size=2,
                                        stride=2,
                                        padding=0,
                                        ceil_mode=True)
        self._conv = nn.Conv2D(in_channels=in_channels,
                               out_channels=out_channels,
                               kernel_size=kernel_size,
                               stride=stride,
                               padding=(kernel_size - 1) //
                               2 if dilation == 1 else 0,
                               dilation=dilation,
                               groups=groups,
                               bias_attr=False)

        self._batch_norm = layers.SyncBatchNorm(out_channels)
        self._act_op = layers.Activation(act=act)
Example #6
0
 def __init__(self, num_classes=751):
     super(Net, self).__init__()
     # 3 128 64
     self.conv = nn.Sequential(
         nn.Conv2D(3, 64, 3, stride=1, padding=1),
         nn.BatchNorm2D(64),
         nn.ReLU(),
         # nn.Conv2d(32,32,3,stride=1,padding=1),
         # nn.BatchNorm2d(32),
         # nn.ReLU(inplace=True),
         nn.MaxPool2D(3, 2, padding=1),
     )
     # 32 64 32
     self.layer1 = make_layers(64, 64, 2, False)
     # 32 64 32
     self.layer2 = make_layers(64, 128, 2, True)
     # 64 32 16
     self.layer3 = make_layers(128, 256, 2, True)
     # 128 16 8
     self.layer4 = make_layers(256, 512, 2, True)
     # 256 8 4
     self.avgpool = nn.AvgPool2D((8, 4), 1)
     # 256 1 1
     self.classifier = nn.Sequential(
         nn.Linear(512, 256),
         nn.BatchNorm1D(256),
         nn.ReLU(),
         nn.Dropout(),
         nn.Linear(256, num_classes),
     )
Example #7
0
 def __init__(self, num_classes):
     super(ResidualAttentionModel_92_32input_update, self).__init__()
     self.conv1 = nn.Sequential(
         nn.Conv2D(3,
                   32,
                   kernel_size=3,
                   stride=1,
                   padding=1,
                   bias_attr=False), nn.BatchNorm2D(32), nn.ReLU())  # 32*32
     self.residual_block1 = ResidualBlock(32, 128)  # 32*32
     self.attention_module1 = AttentionModule_stage1_cifar(
         128, 128, size1=(32, 32), size2=(16, 16))  # 32*32
     self.residual_block2 = ResidualBlock(128, 256, 2)  # 16*16
     self.attention_module2 = AttentionModule_stage2_cifar(
         256, 256, size=(16, 16))  # 16*16
     self.attention_module2_2 = AttentionModule_stage2_cifar(
         256, 256, size=(16, 16))  # 16*16 # tbq add
     self.residual_block3 = ResidualBlock(256, 512, 2)  # 4*4
     self.attention_module3 = AttentionModule_stage3_cifar(512, 512)  # 8*8
     self.attention_module3_2 = AttentionModule_stage3_cifar(
         512, 512)  # 8*8 # tbq add
     self.attention_module3_3 = AttentionModule_stage3_cifar(
         512, 512)  # 8*8 # tbq add
     self.residual_block4 = ResidualBlock(512, 1024)  # 8*8
     self.residual_block5 = ResidualBlock(1024, 1024)  # 8*8
     self.residual_block6 = ResidualBlock(1024, 1024)  # 8*8
     self.mpool2 = nn.Sequential(nn.BatchNorm2D(1024), nn.ReLU(),
                                 nn.AvgPool2D(kernel_size=8))
     self.fc = nn.Linear(1024, num_classes)
Example #8
0
 def __init__(self, num_classes):
     super(ResidualAttentionModel_448input, self).__init__()
     self.conv1 = nn.Sequential(
         nn.Conv2D(3,
                   64,
                   kernel_size=7,
                   stride=2,
                   padding=3,
                   bias_attr=False), nn.BatchNorm2D(64), nn.ReLU())
     self.mpool1 = nn.MaxPool2D(kernel_size=3, stride=2, padding=1)
     # tbq add
     # 112*112
     self.residual_block0 = ResidualBlock(64, 128)
     self.attention_module0 = AttentionModule_stage0(128, 128)
     # tbq add end
     self.residual_block1 = ResidualBlock(128, 256, 2)
     # 56*56
     self.attention_module1 = AttentionModule_stage1(256, 256)
     self.residual_block2 = ResidualBlock(256, 512, 2)
     self.attention_module2 = AttentionModule_stage2(512, 512)
     self.attention_module2_2 = AttentionModule_stage2(512, 512)  # tbq add
     self.residual_block3 = ResidualBlock(512, 1024, 2)
     self.attention_module3 = AttentionModule_stage3(1024, 1024)
     self.attention_module3_2 = AttentionModule_stage3(1024,
                                                       1024)  # tbq add
     self.attention_module3_3 = AttentionModule_stage3(1024,
                                                       1024)  # tbq add
     self.residual_block4 = ResidualBlock(1024, 2048, 2)
     self.residual_block5 = ResidualBlock(2048, 2048)
     self.residual_block6 = ResidualBlock(2048, 2048)
     self.mpool2 = nn.Sequential(nn.BatchNorm2D(2048), nn.ReLU(),
                                 nn.AvgPool2D(kernel_size=7, stride=1))
     self.fc = nn.Linear(2048, num_classes)
Example #9
0
    def __init__(self, layers, output_dim, heads, input_resolution=224, width=64):
        super().__init__()
        self.output_dim = output_dim
        self.input_resolution = input_resolution

        self.conv1 = nn.Conv2D(
            3, width // 2, kernel_size=3, stride=2, padding=1, bias_attr=False
        )
        self.bn1 = nn.BatchNorm2D(width // 2)

        self.conv2 = nn.Conv2D(
            width // 2, width // 2, kernel_size=3, padding=1, bias_attr=False
        )
        self.bn2 = nn.BatchNorm2D(width // 2)

        self.conv3 = nn.Conv2D(
            width // 2, width, kernel_size=3, padding=1, bias_attr=False
        )
        self.bn3 = nn.BatchNorm2D(width)

        self.avgpool = nn.AvgPool2D(2)
        self.relu = nn.ReLU()

        # residual layers
        self._inplanes = width
        self.layer1 = self._make_layer(width, layers[0])
        self.layer2 = self._make_layer(width * 2, layers[1], stride=2)
        self.layer3 = self._make_layer(width * 4, layers[2], stride=2)
        self.layer4 = self._make_layer(width * 8, layers[3], stride=2)

        embed_dim = width * 32
        self.attnpool = AttentionPool2D(
            input_resolution // 32, embed_dim, heads, output_dim
        )
Example #10
0
 def add_block(
     self, i, group_1x1, group_3x3, group_trans, bottleneck, activation, use_se
 ):
     # Check if ith is the last one
     last = i == len(self.stages) - 1
     block = _SFR_DenseBlock(
         num_layers=self.stages[i],
         in_channels=self.num_features,
         growth_rate=self.growth[i],
         group_1x1=group_1x1,
         group_3x3=group_3x3,
         group_trans=group_trans,
         bottleneck=bottleneck,
         activation=activation,
         use_se=use_se,
     )
     self.features.add_sublayer("denseblock_%d" % (i + 1), block)
     self.num_features += self.stages[i] * self.growth[i]
     if not last:
         trans = _Transition()
         self.features.add_sublayer("transition_%d" % (i + 1), trans)
     else:
         self.features.add_sublayer("norm_last", nn.BatchNorm2D(self.num_features))
         self.features.add_sublayer("relu_last", nn.ReLU())
         self.features.add_sublayer("pool_last", nn.AvgPool2D(self.pool_size))
         # if useSE:
         self.features.add_sublayer(
             "se_last", SELayer(self.num_features, reduction=self.last_se_reduction)
         )
Example #11
0
    def __init__(self, inplanes, outplanes, dw_stride, act_layer=nn.GELU,
                 norm_layer=partial(nn.LayerNorm, epsilon=1e-6)):
        super(FCUDown, self).__init__()
        self.dw_stride = dw_stride

        self.conv_project = nn.Conv2D(inplanes, outplanes, kernel_size=1, stride=1, padding=0)
        self.sample_pooling = nn.AvgPool2D(kernel_size=dw_stride, stride=dw_stride)

        self.ln = norm_layer(outplanes)
        self.act = act_layer()
Example #12
0
 def __init__(self, in_channels, ds=8, activation=nn.ReLU):
     super(BAM, self).__init__()
     self.key_channel = in_channels //8
     self.activation = activation
     self.ds = ds
     self.pool = nn.AvgPool2D(self.ds)
     self.query_conv = nn.Conv2D(in_channels=in_channels, out_channels=in_channels // 8, kernel_size=1)
     self.key_conv = nn.Conv2D(in_channels=in_channels, out_channels=in_channels // 8, kernel_size=1)
     self.value_conv = nn.Conv2D(in_channels=in_channels, out_channels=in_channels, kernel_size=1)
     self.gamma = nn.ParameterList([paddle.create_parameter(shape=[1], dtype='float32', default_initializer=nn.initializer.Constant(value=0))])
     self.softmax = nn.Softmax(axis=-1)
Example #13
0
    def __init__(self, inplanes, planes, stride=1):
        super().__init__()

        # all conv layers have stride 1. an avgpool is performed after the second convolution when stride > 1
        self.conv1 = nn.Conv2D(inplanes, planes, 1, bias_attr=False)
        self.bn1 = nn.BatchNorm2D(planes)

        self.conv2 = nn.Conv2D(planes, planes, 3, padding=1, bias_attr=False)
        self.bn2 = nn.BatchNorm2D(planes)

        self.avgpool = nn.AvgPool2D(stride) if stride > 1 else Identity()

        self.conv3 = nn.Conv2D(planes,
                               planes * self.expansion,
                               1,
                               bias_attr=False)
        self.bn3 = nn.BatchNorm2D(planes * self.expansion)

        self.relu = nn.ReLU()
        self.downsample = None
        self.stride = stride

        if stride > 1 or inplanes != planes * Bottleneck.expansion:
            # downsampling layer is prepended with an avgpool, and the subsequent convolution has stride 1
            #             self.downsample = nn.Sequential(OrderedDict([
            #                 ("-1", nn.AvgPool2D(stride)),
            #                 ("0", nn.Conv2D(inplanes, planes * self.expansion, 1, stride=1, bias_attr=False)),
            #                 ("1", nn.BatchNorm2D(planes * self.expansion))
            #             ]))
            self.downsample = nn.Sequential(
                ("-1", nn.AvgPool2D(stride)),
                ("0",
                 nn.Conv2D(inplanes,
                           planes * self.expansion,
                           1,
                           stride=1,
                           bias_attr=False)),
                ("1", nn.BatchNorm2D(planes * self.expansion)))
Example #14
0
 def __init__(self,
              in_features,
              out_features,
              kernel_size=3,
              padding=1,
              groups=1):
     super(DownBlock2d, self).__init__()
     self.conv = nn.Conv2D(in_channels=in_features,
                           out_channels=out_features,
                           kernel_size=kernel_size,
                           padding=padding,
                           groups=groups)
     self.norm = nn.BatchNorm2D(out_features)
     self.pool = nn.AvgPool2D(kernel_size=(2, 2))
Example #15
0
 def __init__(self, in_channels, key_channels, value_channels, scale=1, ds=1):
     super(_PAMBlock, self).__init__()
     self.scale = scale
     self.ds = ds
     self.pool = nn.AvgPool2D(self.ds)
     self.in_channels = in_channels
     self.key_channels = key_channels
     self.value_channels = value_channels
     self.f_key = nn.Sequential(
         nn.Conv2D(in_channels=self.in_channels, out_channels=self.key_channels, kernel_size=1, stride=1, padding=0),
         nn.BatchNorm2D(self.key_channels)
     )
     self.f_query = nn.Sequential(
         nn.Conv2D(in_channels=self.in_channels, out_channels=self.key_channels, kernel_size=1, stride=1, padding=0),
         nn.BatchNorm2D(self.key_channels)
     )
     self.f_value = nn.Conv2D(in_channels=self.in_channels, out_channels=self.value_channels, kernel_size=1, stride=1, padding=0)
Example #16
0
    def __init__(self, in_channels, c7, name=None):
        super(Fid_inceptionC, self).__init__()
        self.branch1x1 = ConvBNLayer(in_channels,
                                     192,
                                     1,
                                     name=name + '.branch1x1')

        self.branch7x7_1 = ConvBNLayer(in_channels,
                                       c7,
                                       1,
                                       name=name + '.branch7x7_1')
        self.branch7x7_2 = ConvBNLayer(c7,
                                       c7, (1, 7),
                                       padding=(0, 3),
                                       name=name + '.branch7x7_2')
        self.branch7x7_3 = ConvBNLayer(c7,
                                       192, (7, 1),
                                       padding=(3, 0),
                                       name=name + '.branch7x7_3')

        self.branch7x7dbl_1 = ConvBNLayer(in_channels,
                                          c7,
                                          1,
                                          name=name + '.branch7x7dbl_1')
        self.branch7x7dbl_2 = ConvBNLayer(c7,
                                          c7, (7, 1),
                                          padding=(3, 0),
                                          name=name + '.branch7x7dbl_2')
        self.branch7x7dbl_3 = ConvBNLayer(c7,
                                          c7, (1, 7),
                                          padding=(0, 3),
                                          name=name + '.branch7x7dbl_3')
        self.branch7x7dbl_4 = ConvBNLayer(c7,
                                          c7, (7, 1),
                                          padding=(3, 0),
                                          name=name + '.branch7x7dbl_4')
        self.branch7x7dbl_5 = ConvBNLayer(c7,
                                          192, (1, 7),
                                          padding=(0, 3),
                                          name=name + '.branch7x7dbl_5')

        self.branch_pool0 = nn.AvgPool2D(kernel_size=3, stride=1, padding=1)
        self.branch_pool = ConvBNLayer(in_channels,
                                       192,
                                       1,
                                       name=name + '.branch_pool')
Example #17
0
    def __init__(self, out_dim):
        super().__init__()

        self.db_branch_keep = nn.Sequential(
            layers.DepthwiseConvBN(out_dim, out_dim, 3),
            nn.Conv2D(out_dim, out_dim, 1))

        self.db_branch_down = nn.Sequential(
            layers.ConvBN(out_dim, out_dim, 3, stride=2),
            nn.AvgPool2D(kernel_size=3, stride=2, padding=1))

        self.sb_branch_keep = nn.Sequential(
            layers.DepthwiseConvBN(out_dim, out_dim, 3),
            nn.Conv2D(out_dim, out_dim, 1), layers.Activation(act='sigmoid'))

        self.sb_branch_up = layers.ConvBN(out_dim, out_dim, 3)

        self.conv = layers.ConvBN(out_dim, out_dim, 3)
 def __init__(self,
              in_channel,
              out_channel,
              sigma_mapping=None,
              kernel_size=7,
              stride=1,
              groups=1,
              reduce_ratio=1,
              dilation=1,
              padding=3):
     """
     Initialization
     """
     super(Involution2D, self).__init__()
     self.in_channel = in_channel
     self.out_channel = out_channel
     self.kernel_size = kernel_size
     self.stride = stride
     self.groups = groups
     self.reduce_ratio = reduce_ratio
     self.dilation = dilation
     self.padding = padding
     self.sigma_mapping = nn.Sequential(
         nn.BatchNorm2D(num_features=self.out_channel // self.reduce_ratio),
         nn.ReLU())
     self.initial_mapping = nn.Conv2D(in_channels=self.in_channel,
                                      out_channels=self.out_channel,
                                      kernel_size=1,
                                      stride=1,
                                      padding=0)
     self.o_mapping = nn.AvgPool2D(kernel_size=self.stride,
                                   stride=self.stride)
     self.reduce_mapping = nn.Conv2D(in_channels=self.in_channel,
                                     out_channels=self.out_channel //
                                     self.reduce_ratio,
                                     kernel_size=1,
                                     stride=1,
                                     padding=0)
     self.span_mapping = nn.Conv2D(
         in_channels=self.out_channel // self.reduce_ratio,
         out_channels=self.kernel_size * self.kernel_size * self.groups,
         kernel_size=1,
         stride=1,
         padding=0)
Example #19
0
    def __init__(self, in_channels, name=None):
        super(Fid_inceptionE_1, self).__init__()
        self.branch1x1 = ConvBNLayer(in_channels,
                                     320,
                                     1,
                                     name=name + '.branch1x1')

        self.branch3x3_1 = ConvBNLayer(in_channels,
                                       384,
                                       1,
                                       name=name + '.branch3x3_1')
        self.branch3x3_2a = ConvBNLayer(384,
                                        384, (1, 3),
                                        padding=(0, 1),
                                        name=name + '.branch3x3_2a')
        self.branch3x3_2b = ConvBNLayer(384,
                                        384, (3, 1),
                                        padding=(1, 0),
                                        name=name + '.branch3x3_2b')

        self.branch3x3dbl_1 = ConvBNLayer(in_channels,
                                          448,
                                          1,
                                          name=name + '.branch3x3dbl_1')
        self.branch3x3dbl_2 = ConvBNLayer(448,
                                          384,
                                          3,
                                          padding=1,
                                          name=name + '.branch3x3dbl_2')
        self.branch3x3dbl_3a = ConvBNLayer(384,
                                           384, (1, 3),
                                           padding=(0, 1),
                                           name=name + '.branch3x3dbl_3a')
        self.branch3x3dbl_3b = ConvBNLayer(384,
                                           384, (3, 1),
                                           padding=(1, 0),
                                           name=name + '.branch3x3dbl_3b')

        self.branch_pool0 = nn.AvgPool2D(kernel_size=3, stride=1, padding=1)
        self.branch_pool = ConvBNLayer(in_channels,
                                       192,
                                       1,
                                       name=name + '.branch_pool')
Example #20
0
    def __init__(self,
                 ni: int,
                 nf: int = None,
                 scale: int = 2,
                 blur: bool = False,
                 norm_type='Weight',
                 leaky: float = None):
        super().__init__()
        nf = ifnone(nf, ni)
        self.conv = conv_layer(ni,
                               nf * (scale**2),
                               ks=1,
                               norm_type=norm_type,
                               use_activ=False)

        self.shuf = PixelShuffle(scale)

        self.pad = ReplicationPad2d([1, 0, 1, 0])
        self.blur = nn.AvgPool2D(2, stride=1)
        self.relu = relu(True, leaky=leaky)
Example #21
0
    def __init__(self,
                 ni: int,
                 nf: int = None,
                 scale: int = 2,
                 blur: bool = False,
                 leaky: float = None,
                 **kwargs):
        super().__init__()
        nf = ifnone(nf, ni)
        self.conv = custom_conv_layer(ni,
                                      nf * (scale**2),
                                      ks=1,
                                      use_activ=False,
                                      **kwargs)

        self.shuf = PixelShuffle(scale)

        self.pad = ReplicationPad2d([1, 0, 1, 0])
        self.blur = nn.AvgPool2D(2, stride=1)
        self.relu = nn.LeakyReLU(
            leaky) if leaky is not None else nn.ReLU()  #relu(True, leaky=leaky)
Example #22
0
    def pyramidal_make_layer(self, block, block_depth, stride=1):
        downsample = None
        if stride != 1:  # or self.inplanes != int(round(featuremap_dim_1st)) * block.outchannel_ratio:
            downsample = nn.AvgPool2D((2, 2), stride=(2, 2), ceil_mode=True)

        layers = []
        self.featuremap_dim = self.featuremap_dim + self.addrate
        layers.append(
            block(self.input_featuremap_dim, int(round(self.featuremap_dim)),
                  stride, downsample))
        for i in range(1, block_depth):
            temp_featuremap_dim = self.featuremap_dim + self.addrate
            layers.append(
                block(
                    int(round(self.featuremap_dim)) * block.outchannel_ratio,
                    int(round(temp_featuremap_dim)), 1))
            self.featuremap_dim = temp_featuremap_dim
        self.input_featuremap_dim = int(round(
            self.featuremap_dim)) * block.outchannel_ratio

        return nn.Sequential(*layers)
Example #23
0
    def __init__(self, n_blocks, in_channels, ch_list, gr, grmul, n_layers):
        super().__init__()
        self.skip_connection_channels = []
        self.shortcut_layers = []
        self.blks = nn.LayerList()
        ch = in_channels
        for i in range(n_blocks):
            blk = HarDBlock(ch, gr[i], grmul, n_layers[i])
            ch = blk.get_out_ch()
            self.skip_connection_channels.append(ch)
            self.blks.append(blk)
            if i < n_blocks - 1:
                self.shortcut_layers.append(len(self.blks) - 1)
            self.blks.append(
                layers.ConvBNReLU(
                    ch, ch_list[i], kernel_size=1, bias_attr=False))

            ch = ch_list[i]
            if i < n_blocks - 1:
                self.blks.append(nn.AvgPool2D(kernel_size=2, stride=2))
        self.out_channels = ch
Example #24
0
    def __init__(self, classes_num=1000, **kwargs):
        super().__init__()

        def conv_bn(inp, oup, stride):
            return nn.Sequential(
                nn.Conv2D(
                    inp, oup, 3, stride, 1, bias_attr=False),
                nn.BatchNorm2D(oup),
                nn.ReLU())

        def conv_dw(inp, oup, stride):
            return nn.Sequential(
                nn.Conv2D(
                    inp, inp, 3, stride, 1, groups=inp, bias_attr=False),
                nn.BatchNorm2D(inp),
                nn.ReLU(),
                nn.Conv2D(
                    inp, oup, 1, 1, 0, bias_attr=False),
                nn.BatchNorm2D(oup),
                nn.ReLU(), )

        self.model = nn.Sequential(
            conv_bn(3, 32, 2),
            conv_dw(32, 64, 1),
            conv_dw(64, 128, 2),
            conv_dw(128, 128, 1),
            conv_dw(128, 256, 2),
            conv_dw(256, 256, 1),
            conv_dw(256, 512, 2),
            conv_dw(512, 512, 1),
            conv_dw(512, 512, 1),
            conv_dw(512, 512, 1),
            conv_dw(512, 512, 1),
            conv_dw(512, 512, 1),
            conv_dw(512, 1024, 2),
            conv_dw(1024, 1024, 1),
            nn.AvgPool2D(7), )
        self.fc = nn.Linear(1024, classes_num)

        self.init_params()
Example #25
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 stride=1,
                 groups=1,
                 is_vd_mode=False,
                 act=None):
        super(ConvBNLayer, self).__init__()

        self.is_vd_mode = is_vd_mode
        self._pool2d_avg = nn.AvgPool2D(kernel_size=2,
                                        stride=2,
                                        padding=0,
                                        ceil_mode=True)
        self._conv = nn.Conv2D(in_channels=in_channels,
                               out_channels=out_channels,
                               kernel_size=kernel_size,
                               stride=stride,
                               padding=(kernel_size - 1) // 2,
                               groups=groups,
                               bias_attr=False)
        self._batch_norm = nn.BatchNorm(out_channels, act=act)
Example #26
0
    def __init__(self,
                 in_features,
                 out_features,
                 kernel_size=3,
                 padding=1,
                 groups=1):
        super(MobileDownBlock2d, self).__init__()
        self.conv = nn.Conv2D(in_channels=in_features,
                              out_channels=in_features,
                              kernel_size=kernel_size,
                              padding=padding,
                              groups=in_features,
                              bias_attr=False)
        self.norm = SyncBatchNorm(in_features)
        self.pool = nn.AvgPool2D(kernel_size=(2, 2))

        self.conv1 = nn.Conv2D(in_features,
                               out_features,
                               kernel_size=1,
                               padding=0,
                               stride=1,
                               bias_attr=False)
        self.norm1 = SyncBatchNorm(out_features)
Example #27
0
    def __init__(self, in_channels, pool_features, name=None):
        super(Fid_inceptionA, self).__init__()
        self.branch1x1 = ConvBNLayer(in_channels,
                                     64,
                                     1,
                                     name=name + '.branch1x1')

        self.branch5x5_1 = ConvBNLayer(in_channels,
                                       48,
                                       1,
                                       name=name + '.branch5x5_1')
        self.branch5x5_2 = ConvBNLayer(48,
                                       64,
                                       5,
                                       padding=2,
                                       name=name + '.branch5x5_2')

        self.branch3x3dbl_1 = ConvBNLayer(in_channels,
                                          64,
                                          1,
                                          name=name + '.branch3x3dbl_1')
        self.branch3x3dbl_2 = ConvBNLayer(64,
                                          96,
                                          3,
                                          padding=1,
                                          name=name + '.branch3x3dbl_2')
        self.branch3x3dbl_3 = ConvBNLayer(96,
                                          96,
                                          3,
                                          padding=1,
                                          name=name + '.branch3x3dbl_3')

        self.branch_pool0 = nn.AvgPool2D(kernel_size=3, stride=1, padding=1)
        self.branch_pool = ConvBNLayer(in_channels,
                                       pool_features,
                                       1,
                                       name=name + '.branch_pool')
Example #28
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 stride=1,
                 dilation=1,
                 groups=1,
                 is_vd_mode=False,
                 act=None,
                 data_format='NCHW'):
        super(ConvBNLayer, self).__init__()
        if dilation != 1 and kernel_size != 3:
            raise RuntimeError("When the dilation isn't 1," \
                "the kernel_size should be 3.")

        self.is_vd_mode = is_vd_mode
        self._pool2d_avg = nn.AvgPool2D(kernel_size=2,
                                        stride=2,
                                        padding=0,
                                        ceil_mode=True,
                                        data_format=data_format)
        self._conv = nn.Conv2D(
            in_channels=in_channels,
            out_channels=out_channels,
            kernel_size=kernel_size,
            stride=stride,
            padding=(kernel_size - 1) // 2 \
                if dilation == 1 else dilation,
            dilation=dilation,
            groups=groups,
            bias_attr=False,
            data_format=data_format)

        self._batch_norm = layers.SyncBatchNorm(out_channels,
                                                data_format=data_format)
        self._act_op = layers.Activation(act=act)
Example #29
0
 def __init__(self,
              in_channels,
              out_channels,
              branches=4,
              kernel_size_maximum=9,
              shortcut=True):
     super().__init__()
     if out_channels < in_channels:
         raise RuntimeError(
             "The out_channes for DownSampler should be bigger than in_channels, but got in_channles={}, out_channels={}"
             .format(in_channels, out_channels))
     self.eesp = EESP(in_channels,
                      out_channels - in_channels,
                      stride=2,
                      branches=branches,
                      kernel_size_maximum=kernel_size_maximum,
                      down_method='avg')
     self.avg = nn.AvgPool2D(kernel_size=3, padding=1, stride=2)
     if shortcut:
         self.shortcut_layer = nn.Sequential(
             layers.ConvBNPReLU(3, 3, 3, stride=1, bias_attr=False),
             layers.ConvBN(3, out_channels, 1, stride=1, bias_attr=False),
         )
     self._act = nn.PReLU()
Example #30
0
    def __init__(self,
                 ch_in,
                 ch_out,
                 stride,
                 shortcut,
                 variant='b',
                 groups=1,
                 base_width=4,
                 lr=1.0,
                 norm_type='bn',
                 norm_decay=0.,
                 freeze_norm=True,
                 dcn_v2=False,
                 std_senet=False):
        super(BottleNeck, self).__init__()
        if variant == 'a':
            stride1, stride2 = stride, 1
        else:
            stride1, stride2 = 1, stride

        # ResNeXt
        width = int(ch_out * (base_width / 64.)) * groups

        self.shortcut = shortcut
        if not shortcut:
            if variant == 'd' and stride == 2:
                self.short = nn.Sequential()
                self.short.add_sublayer(
                    'pool',
                    nn.AvgPool2D(kernel_size=2,
                                 stride=2,
                                 padding=0,
                                 ceil_mode=True))
                self.short.add_sublayer(
                    'conv',
                    ConvNormLayer(ch_in=ch_in,
                                  ch_out=ch_out * self.expansion,
                                  filter_size=1,
                                  stride=1,
                                  norm_type=norm_type,
                                  norm_decay=norm_decay,
                                  freeze_norm=freeze_norm,
                                  lr=lr))
            else:
                self.short = ConvNormLayer(ch_in=ch_in,
                                           ch_out=ch_out * self.expansion,
                                           filter_size=1,
                                           stride=stride,
                                           norm_type=norm_type,
                                           norm_decay=norm_decay,
                                           freeze_norm=freeze_norm,
                                           lr=lr)

        self.branch2a = ConvNormLayer(ch_in=ch_in,
                                      ch_out=width,
                                      filter_size=1,
                                      stride=stride1,
                                      groups=1,
                                      act='relu',
                                      norm_type=norm_type,
                                      norm_decay=norm_decay,
                                      freeze_norm=freeze_norm,
                                      lr=lr)

        self.branch2b = ConvNormLayer(ch_in=width,
                                      ch_out=width,
                                      filter_size=3,
                                      stride=stride2,
                                      groups=groups,
                                      act='relu',
                                      norm_type=norm_type,
                                      norm_decay=norm_decay,
                                      freeze_norm=freeze_norm,
                                      lr=lr,
                                      dcn_v2=dcn_v2)

        self.branch2c = ConvNormLayer(ch_in=width,
                                      ch_out=ch_out * self.expansion,
                                      filter_size=1,
                                      stride=1,
                                      groups=1,
                                      norm_type=norm_type,
                                      norm_decay=norm_decay,
                                      freeze_norm=freeze_norm,
                                      lr=lr)

        self.std_senet = std_senet
        if self.std_senet:
            self.se = SELayer(ch_out * self.expansion)