Esempio n. 1
0
 def __init__(self, cin, cout, stride=1, dilation=1):
     super(BasicBlock, self).__init__()
     self.conv_bn_act = nn.Conv2dBnAct(cin,
                                       cout,
                                       kernel_size=3,
                                       stride=stride,
                                       pad_mode='pad',
                                       padding=dilation,
                                       has_bias=False,
                                       dilation=dilation,
                                       has_bn=True,
                                       momentum=BN_MOMENTUM,
                                       activation='relu',
                                       after_fake=False)
     self.conv_bn = nn.Conv2dBnAct(cout,
                                   cout,
                                   kernel_size=3,
                                   stride=1,
                                   pad_mode='same',
                                   has_bias=False,
                                   dilation=dilation,
                                   has_bn=True,
                                   momentum=BN_MOMENTUM,
                                   activation=None)
     self.relu = ops.ReLU()
Esempio n. 2
0
 def __init__(self, cin, cout, up_f=2, enable_dcn=False):
     super(IDAUp, self).__init__()
     self.enable_dcn = enable_dcn
     if enable_dcn:
         self.proj = DeformConv(cin, cout)
         self.node = DeformConv(cout, cout)
     else:
         self.proj = nn.Conv2dBnAct(cin,
                                    cout,
                                    kernel_size=1,
                                    stride=1,
                                    pad_mode='same',
                                    has_bias=False,
                                    has_bn=True,
                                    momentum=BN_MOMENTUM,
                                    activation='relu',
                                    after_fake=False)
         self.node = nn.Conv2dBnAct(2 * cout,
                                    cout,
                                    kernel_size=3,
                                    stride=1,
                                    pad_mode='same',
                                    has_bias=False,
                                    has_bn=True,
                                    momentum=BN_MOMENTUM,
                                    activation='relu',
                                    after_fake=False)
     self.up = nn.Conv2dTranspose(cout,
                                  cout,
                                  up_f * 2,
                                  stride=up_f,
                                  pad_mode='pad',
                                  padding=up_f // 2)
     self.concat = ops.Concat(axis=1)
Esempio n. 3
0
    def __init__(self, num_class=10, channel=1):
        super(LeNet5, self).__init__()
        self.type = "fusion"
        self.num_class = num_class

        # change `nn.Conv2d` to `nn.Conv2dBnAct`
        self.conv1 = nn.Conv2dBnAct(channel,
                                    6,
                                    5,
                                    pad_mode='valid',
                                    has_bn=True,
                                    activation='relu')
        self.conv2 = nn.Conv2dBnAct(6,
                                    16,
                                    5,
                                    pad_mode='valid',
                                    has_bn=True,
                                    activation='relu')
        # change `nn.Dense` to `nn.DenseBnAct`
        self.fc1 = nn.DenseBnAct(16 * 5 * 5, 120, activation='relu')
        self.fc2 = nn.DenseBnAct(120, 84, activation='relu')
        self.fc3 = nn.DenseBnAct(84, self.num_class)

        self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
        self.flatten = nn.Flatten()
    def __init__(self, inp, oup, stride, expend_ratio):
        super(InvertedResidual, self).__init__()
        self.stride = stride
        assert stride in [1, 2]

        hidden_dim = int(inp * expend_ratio)
        self.use_res_connect = self.stride == 1 and inp == oup
        if expend_ratio == 1:
            self.conv = nn.SequentialCell([
                nn.Conv2dBnAct(hidden_dim,
                               hidden_dim,
                               3,
                               stride,
                               group=hidden_dim,
                               batchnorm=True,
                               activation='relu6'),
                nn.Conv2dBnAct(hidden_dim, oup, 1, 1,
                               batchnorm=True)
            ])
        else:
            self.conv = nn.SequentialCell([
                nn.Conv2dBnAct(inp, hidden_dim, 1, 1,
                               batchnorm=True,
                               activation='relu6'),
                nn.Conv2dBnAct(hidden_dim,
                               hidden_dim,
                               3,
                               stride,
                               group=hidden_dim,
                               batchnorm=True,
                               activation='relu6'),
                nn.Conv2dBnAct(hidden_dim, oup, 1, 1,
                               batchnorm=True)
            ])
        self.add = P.TensorAdd()
Esempio n. 5
0
    def __init__(self, in_channel, out_channel, stride=1):
        super(ResidualBlock, self).__init__()

        channel = out_channel // self.expansion
        self.conv1 = ConvBNReLU(in_channel, channel, kernel_size=1, stride=1)
        self.conv2 = ConvBNReLU(channel, channel, kernel_size=3, stride=stride)
        self.conv3 = nn.Conv2dBnAct(channel,
                                    out_channel,
                                    kernel_size=1,
                                    stride=1,
                                    pad_mode='same',
                                    padding=0,
                                    has_bn=True,
                                    activation='relu')

        self.down_sample = False
        if stride != 1 or in_channel != out_channel:
            self.down_sample = True
        self.down_sample_layer = None

        if self.down_sample:
            self.down_sample_layer = nn.Conv2dBnAct(in_channel,
                                                    out_channel,
                                                    kernel_size=1,
                                                    stride=stride,
                                                    pad_mode='same',
                                                    padding=0,
                                                    has_bn=True,
                                                    activation='relu')
        self.add = P.TensorAdd()
        self.relu = P.ReLU()
Esempio n. 6
0
 def __init__(self, num_class=10):
     super(LeNet5, self).__init__()
     self.num_class = num_class
     self.conv1 = nn.Conv2dBnAct(1, 6, kernel_size=5, has_bn=True, activation='relu', pad_mode="valid")
     self.conv2 = nn.Conv2dBnAct(6, 16, kernel_size=5, activation='relu', pad_mode="valid")
     self.fc1 = nn.DenseBnAct(16 * 5 * 5, 120, activation='relu')
     self.fc2 = nn.DenseBnAct(120, 84, activation='relu')
     self.fc3 = nn.DenseBnAct(84, self.num_class)
     self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
     self.flatten = nn.Flatten()
Esempio n. 7
0
 def __init__(self, nclass, activation='prelu', drop_out=.1, phase='train'):
     super(EPRNet, self).__init__()
     self.phase = phase
     self.shape = P.Shape()
     self.stem = nn.Conv2dBnAct(3,
                                16,
                                3,
                                2,
                                'pad',
                                1,
                                weight_init='xavier_uniform',
                                has_bn=True,
                                activation=activation)
     self.layer1 = nn.SequentialCell([
         _EPRModule(16, 32, atrous_rates=(1, 2, 4), activation=activation),
         _EPRModule(32,
                    32,
                    atrous_rates=(1, 2, 4),
                    activation=activation,
                    down_sample=True)
     ])
     self.layer2 = nn.SequentialCell([
         _EPRModule(32, 64, atrous_rates=(3, 6, 9), activation=activation),
         _EPRModule(64, 64, atrous_rates=(3, 6, 9), activation=activation),
         _EPRModule(64, 64, atrous_rates=(3, 6, 9), activation=activation),
         _EPRModule(64,
                    64,
                    atrous_rates=(3, 6, 9),
                    activation=activation,
                    down_sample=True)
     ])
     self.layer3 = nn.SequentialCell([
         _EPRModule(64,
                    128,
                    atrous_rates=(7, 13, 19),
                    activation=activation),
         _EPRModule(128,
                    128,
                    atrous_rates=(13, 25, 37),
                    activation=activation)
     ])
     self.stern_conv1 = nn.Conv2dBnAct(128,
                                       128,
                                       3,
                                       1,
                                       'pad',
                                       1,
                                       weight_init='xavier_uniform',
                                       has_bn=True,
                                       activation=activation)
     self.stern_drop = nn.Dropout(drop_out) if drop_out else None
     self.stern_conv2 = nn.Conv2d(128,
                                  nclass,
                                  1,
                                  weight_init='xavier_uniform')
Esempio n. 8
0
 def __init__(self,
              in_channels,
              out_channles,
              atrous_rates,
              activation='prelu'):
     super(_MPUnit, self).__init__()
     self.mpu0 = nn.Conv2dBnAct(in_channels,
                                out_channles,
                                1,
                                weight_init='xavier_uniform',
                                has_bn=True,
                                activation=activation)
     self.mpu1 = nn.Conv2dBnAct(in_channels,
                                out_channles,
                                3,
                                1,
                                'pad',
                                padding=atrous_rates[0],
                                dilation=atrous_rates[0],
                                group=in_channels,
                                weight_init='xavier_uniform',
                                has_bn=True,
                                activation=activation)
     self.mpu2 = nn.Conv2dBnAct(in_channels,
                                out_channles,
                                3,
                                1,
                                'pad',
                                padding=atrous_rates[1],
                                dilation=atrous_rates[1],
                                group=in_channels,
                                weight_init='xavier_uniform',
                                has_bn=True,
                                activation=activation)
     self.mpu3 = nn.Conv2dBnAct(in_channels,
                                out_channles,
                                3,
                                1,
                                'pad',
                                padding=atrous_rates[2],
                                dilation=atrous_rates[2],
                                group=in_channels,
                                weight_init='xavier_uniform',
                                has_bn=True,
                                activation=activation)
     self.proj = nn.Conv2dBnAct(4 * out_channles,
                                out_channles,
                                1,
                                weight_init='xavier_uniform',
                                has_bn=True,
                                activation=activation)
     self.concat = P.Concat(axis=1)
Esempio n. 9
0
    def __init__(self, inp, oup, stride, expand_ratio):
        super(InvertedResidual, self).__init__()
        assert stride in [1, 2]

        hidden_dim = int(round(inp * expand_ratio))
        self.use_res_connect = stride == 1 and inp == oup

        layers = []
        if expand_ratio != 1:
            layers.append(ConvBNReLU(inp, hidden_dim, kernel_size=1))
        layers.extend([
            # dw
            ConvBNReLU(hidden_dim,
                       hidden_dim,
                       stride=stride,
                       groups=hidden_dim),
            # pw-linear
            nn.Conv2dBnAct(hidden_dim,
                           oup,
                           kernel_size=1,
                           stride=1,
                           pad_mode='pad',
                           padding=0,
                           group=1,
                           has_bn=True)
        ])
        self.conv = nn.SequentialCell(layers)
        self.add = P.TensorAdd()
Esempio n. 10
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 atrous_rates,
                 activation='prelu',
                 down_sample=False):
        super(_EPRModule, self).__init__()
        stride = 2 if down_sample else 1

        self.pyramid = _MPUnit(in_channels, out_channels, atrous_rates,
                               activation)
        self.compact = nn.Conv2dBnAct(out_channels,
                                      out_channels,
                                      3,
                                      stride,
                                      'pad',
                                      1,
                                      weight_init='xavier_uniform',
                                      has_bn=True,
                                      activation=activation)
        if (out_channels != in_channels) or down_sample:
            self.skip = nn.Conv2d(in_channels,
                                  out_channels,
                                  1,
                                  stride=stride,
                                  weight_init='xavier_uniform')
            self.skip_bn = nn.BatchNorm2d(out_channels)
        else:
            self.skip = None
        self.act = nn.ReLU()
        self.add = P.Add()
Esempio n. 11
0
def _conv_bn(in_channel, out_channel, ksize, stride=1):
    """Get a conv2d batchnorm and relu layer."""
    return nn.SequentialCell([
        nn.Conv2dBnAct(in_channel,
                       out_channel,
                       kernel_size=ksize,
                       stride=stride,
                       batchnorm=True)
    ])
Esempio n. 12
0
    def __init__(self,
                 levels,
                 channels,
                 block=BasicBlock,
                 residual_root=False):
        super(DLA34, self).__init__()
        self.channels = channels
        self.base_layer = nn.Conv2dBnAct(3,
                                         channels[0],
                                         kernel_size=7,
                                         stride=1,
                                         pad_mode='same',
                                         has_bias=False,
                                         has_bn=True,
                                         momentum=BN_MOMENTUM,
                                         activation='relu',
                                         after_fake=False)

        self.level0 = self._make_conv_level(channels[0], channels[0],
                                            levels[0])
        self.level1 = self._make_conv_level(channels[0],
                                            channels[1],
                                            levels[1],
                                            stride=2)
        self.level2 = Tree(levels[2],
                           block,
                           channels[1],
                           channels[2],
                           2,
                           level_root=False,
                           root_residual=residual_root)
        self.level3 = Tree(levels[3],
                           block,
                           channels[2],
                           channels[3],
                           2,
                           level_root=True,
                           root_residual=residual_root)
        self.level4 = Tree(levels[4],
                           block,
                           channels[3],
                           channels[4],
                           2,
                           level_root=True,
                           root_residual=residual_root)
        self.level5 = Tree(levels[5],
                           block,
                           channels[4],
                           channels[5],
                           2,
                           level_root=True,
                           root_residual=residual_root)
        self.dla_fn = [
            self.level0, self.level1, self.level2, self.level3, self.level4,
            self.level5
        ]
Esempio n. 13
0
 def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1):
     super(ConvBNReLU, self).__init__()
     padding = (kernel_size - 1) // 2
     self.conv = nn.Conv2dBnAct(in_planes, out_planes, kernel_size,
                                stride=stride,
                                pad_mode='pad',
                                padding=padding,
                                group=groups,
                                has_bn=True,
                                activation='relu')
Esempio n. 14
0
    def __init__(self, in_channels, out_chls, out_channels):
        super(YoloBlock, self).__init__()
        out_chls_2 = out_chls*2

        self.conv0 = _conv_bn_relu(in_channels, out_chls, ksize=1)
        self.conv1 = _conv_bn_relu(out_chls, out_chls_2, ksize=3)

        self.conv2 = _conv_bn_relu(out_chls_2, out_chls, ksize=1)
        self.conv3 = _conv_bn_relu(out_chls, out_chls_2, ksize=3)

        self.conv4 = _conv_bn_relu(out_chls_2, out_chls, ksize=1)
        self.conv5 = _conv_bn_relu(out_chls, out_chls_2, ksize=3)

        self.conv6 = nn.Conv2dBnAct(out_chls_2, out_channels, kernel_size=1, stride=1,
                                    has_bias=True, has_bn=False, activation=None, after_fake=False)
Esempio n. 15
0
def conv_block(in_channels, out_channels, kernel_size, stride, dilation=1):
    """Get a conv2d batchnorm and relu layer"""
    pad_mode = 'same'
    padding = 0

    return nn.Conv2dBnAct(in_channels,
                          out_channels,
                          kernel_size,
                          stride=stride,
                          pad_mode=pad_mode,
                          padding=padding,
                          dilation=dilation,
                          has_bn=True,
                          momentum=0.9,
                          activation='relu')
Esempio n. 16
0
 def _make_conv_level(self, cin, cout, convs, stride=1, dilation=1):
     modules = []
     for i in range(convs):
         modules.append(
             nn.Conv2dBnAct(cin,
                            cout,
                            kernel_size=3,
                            stride=stride if i == 0 else 1,
                            pad_mode='pad',
                            padding=dilation,
                            has_bias=False,
                            dilation=dilation,
                            has_bn=True,
                            momentum=BN_MOMENTUM,
                            activation='relu',
                            after_fake=False))
         cin = cout
     return nn.SequentialCell(modules)
Esempio n. 17
0
def _conv_bn_relu(in_channel,
                  out_channel,
                  ksize,
                  stride=1,
                  padding=0,
                  dilation=1,
                  alpha=0.1,
                  momentum=0.9,
                  eps=1e-5,
                  pad_mode="same"):
    """Get a conv2d batchnorm and relu layer"""
    return nn.Conv2dBnAct(in_channel, out_channel, ksize,
                          stride=stride,
                          pad_mode=pad_mode,
                          padding=padding,
                          dilation=dilation,
                          has_bn=True,
                          momentum=momentum,
                          eps=eps,
                          activation='leakyrelu',
                          alpha=alpha)
Esempio n. 18
0
 def __init__(self,
              in_channels,
              out_channels,
              kernel_size,
              stride=1,
              pad_mode='valid',
              padding=0,
              has_bias=False,
              weight_init="XavierUniform",
              bias_init='zeros'):
     super(Conv2d, self).__init__()
     self.conv = nn.Conv2dBnAct(in_channels,
                                out_channels,
                                kernel_size,
                                stride=stride,
                                pad_mode=pad_mode,
                                padding=padding,
                                weight_init=weight_init,
                                bias_init=bias_init,
                                has_bias=has_bias,
                                has_bn=True,
                                activation="relu")
Esempio n. 19
0
 def __init__(self,
              levels,
              block,
              in_channels,
              out_channels,
              stride=1,
              level_root=False,
              root_dim=0,
              root_kernel_size=1,
              dilation=1,
              root_residual=False):
     super(Tree, self).__init__()
     self.levels = levels
     if root_dim == 0:
         root_dim = 2 * out_channels
     if level_root:
         root_dim += in_channels
     if self.levels == 1:
         self.tree1 = block(in_channels,
                            out_channels,
                            stride,
                            dilation=dilation)
         self.tree2 = block(out_channels,
                            out_channels,
                            1,
                            dilation=dilation)
     else:
         self.tree1 = Tree(levels - 1,
                           block,
                           in_channels,
                           out_channels,
                           stride,
                           root_dim=0,
                           root_kernel_size=root_kernel_size,
                           dilation=dilation,
                           root_residual=root_residual)
         self.tree2 = Tree(levels - 1,
                           block,
                           out_channels,
                           out_channels,
                           root_dim=root_dim + out_channels,
                           root_kernel_size=root_kernel_size,
                           dilation=dilation,
                           root_residual=root_residual)
     self.root = Root(root_dim, out_channels, root_kernel_size,
                      root_residual)
     self.level_root = level_root
     self.root_dim = root_dim
     self.downsample = None
     self.project = None
     if stride > 1:
         self.downsample = nn.MaxPool2d(stride, stride=stride)
     if in_channels != out_channels:
         self.project = nn.Conv2dBnAct(in_channels,
                                       out_channels,
                                       kernel_size=1,
                                       stride=1,
                                       pad_mode='same',
                                       has_bias=False,
                                       has_bn=True,
                                       momentum=BN_MOMENTUM,
                                       activation=None,
                                       after_fake=False)