Exemplo n.º 1
0
 def __init__(self,
              inp,
              hidden_dim,
              oup,
              kernel_size,
              stride,
              use_se=False,
              use_hs=False):
     """Init InvertedResidualSE."""
     super(InvertedResidualSE, self).__init__()
     self.identity = stride == 1 and inp == oup
     self.ir_block = Sequential(
         # pw
         ops.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
         ops.BatchNorm2d(hidden_dim),
         ops.Hswish() if use_hs else ops.Relu(inplace=True),
         # dw
         ops.Conv2d(hidden_dim,
                    hidden_dim,
                    kernel_size,
                    stride, (kernel_size - 1) // 2,
                    groups=hidden_dim,
                    bias=False),
         ops.BatchNorm2d(hidden_dim),
         # Squeeze-and-Excite
         SELayer(hidden_dim) if use_se else ops.Identity(),
         ops.Hswish() if use_hs else ops.Relu(inplace=True),
         # pw-linear
         ops.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
         ops.BatchNorm2d(oup),
     )
Exemplo n.º 2
0
    def __init__(self, encoding):
        super(DNetBackbone, self).__init__()
        op_names = [
            "conv3", "conv1", "conv3_grp2", "conv3_grp4", "conv3_base1",
            "conv3_base32", "conv3_sep"
        ]

        # code with kangning
        block_str, num_channel, macro_str = encoding.split('_')
        curr_channel, index = int(num_channel), 0

        _big_model = "*" in block_str
        if _big_model:
            block_encoding_list = block_str.split('*')

        # stem
        layers = [
            create_op('conv3', 3, curr_channel // 2, stride=2),
            ops.Relu(),
            create_op('conv3', curr_channel // 2, curr_channel // 2),
            ops.Relu(),
            create_op('conv3', curr_channel // 2, curr_channel, stride=2),
            ops.Relu()
        ]

        # body
        if not _big_model:
            while index < len(macro_str):
                stride = 1
                if macro_str[index] == '-':
                    stride = 2
                    index += 1

                channel_increase = int(macro_str[index])
                block = EncodedBlock(block_str, curr_channel, op_names, stride,
                                     channel_increase)
                layers.append(block)
                curr_channel *= channel_increase
                index += 1
        else:
            block_encoding_index = 0
            while index < len(macro_str):
                stride = 1
                if macro_str[index] == '-':
                    stride = 2
                    index += 1
                    block_encoding_index += 1
                channel_increase = int(macro_str[index])
                block_encoding = block_encoding_list[block_encoding_index]
                block = EncodedBlock(block_encoding, curr_channel, op_names,
                                     stride, channel_increase)
                layers.append(block)
                curr_channel *= channel_increase
                index += 1
        layers.append(ops.AdaptiveAvgPool2d((1, 1)))
        self.layers = Sequential(*layers)
Exemplo n.º 3
0
 def __init__(self, in_planes, planes, inner_plane, stride=1):
     """Create BottleConv layer."""
     super(PruneBasicConv, self).__init__()
     self.conv1 = ops.Conv2d(
         in_planes, inner_plane, kernel_size=3, stride=stride, padding=1, bias=False)
     self.bn1 = ops.BatchNorm2d(inner_plane)
     self.relu = ops.Relu()
     self.conv2 = ops.Conv2d(inner_plane, planes, kernel_size=3, stride=1, padding=1, bias=False)
     self.bn2 = ops.BatchNorm2d(planes)
     self.relu2 = ops.Relu()
Exemplo n.º 4
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 expansion,
                 groups,
                 base_width,
                 stride=1,
                 norm_layer={"norm_type": 'BN'},
                 Conv2d='Conv2d'):
        """Create BottleConv layer.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param expansion: expansion
        :type expansion: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BottleConv, self).__init__()
        outchannel = int(outchannel * (base_width / 64.)) * groups
        self.conv1 = build_conv_layer(in_channels=inchannel,
                                      out_channels=outchannel,
                                      kernel_size=1,
                                      stride=1,
                                      bias=False,
                                      Conv2d=Conv2d)
        self.batch1 = build_norm_layer(features=outchannel, **norm_layer)
        self.relu1 = ops.Relu(inplace=True)
        self.conv2 = build_conv_layer(in_channels=outchannel,
                                      out_channels=outchannel,
                                      kernel_size=3,
                                      stride=stride,
                                      padding=1,
                                      groups=groups,
                                      bias=False,
                                      Conv2d=Conv2d)
        self.batch2 = build_norm_layer(features=outchannel, **norm_layer)
        self.relu2 = ops.Relu(inplace=True)
        self.conv3 = build_conv_layer(in_channels=outchannel,
                                      out_channels=outchannel * expansion,
                                      kernel_size=1,
                                      stride=1,
                                      bias=False,
                                      Conv2d=Conv2d)
        self.batch3 = build_norm_layer(features=outchannel * expansion,
                                       **norm_layer)
Exemplo n.º 5
0
    def __init__(self, encoding, n_class=1000):
        super(DNet, self).__init__()
        op_names = ["conv3", "conv1", "conv3_grp2", "conv3_grp4", "conv3_base1", "conv3_base32", "conv3_sep"]
        block_str, num_channel, macro_str = encoding.split('_')
        curr_channel, index = int(num_channel), 0
        _big_model = "*" in block_str
        if _big_model:
            block_encoding_list = block_str.split('*')
        # stem
        self.layers = Sequential(
            create_op('conv3', 3, curr_channel // 2, stride=2),
            ops.Relu(),
            create_op('conv3', curr_channel // 2, curr_channel // 2),
            ops.Relu(),
            create_op('conv3', curr_channel // 2, curr_channel, stride=2),
            ops.Relu()
        )

        # body
        if not _big_model:
            while index < len(macro_str):
                stride = 1
                if macro_str[index] == '-':
                    stride = 2
                    index += 1

                channel_increase = int(macro_str[index])
                block = EncodedBlock(block_str, curr_channel, op_names, stride, channel_increase)
                self.layers.append(block)
                curr_channel *= channel_increase
                index += 1
        else:
            block_encoding_index = 0
            while index < len(macro_str):
                stride = 1
                if macro_str[index] == '-':
                    stride = 2
                    index += 1
                    block_encoding_index += 1
                channel_increase = int(macro_str[index])
                block_encoding = block_encoding_list[block_encoding_index]
                block = EncodedBlock(block_encoding, curr_channel, op_names, stride, channel_increase)
                self.layers.append(block)
                curr_channel *= channel_increase
                index += 1
        self.layers.append(ops.AdaptiveAvgPool2d((1, 1)))
        self.view = ops.View()
        self.fc = ops.Linear(in_features=curr_channel, out_features=n_class)
Exemplo n.º 6
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 groups=1,
                 base_width=64,
                 stride=1,
                 norm_layer={"norm_type": 'BN'},
                 Conv2d='Conv2d'):
        """Create BottleneckBlock layers.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BottleneckBlock, self).__init__()
        bottle_conv = BottleConv(inchannel=inchannel,
                                 outchannel=outchannel,
                                 expansion=self.expansion,
                                 stride=stride,
                                 groups=groups,
                                 base_width=base_width,
                                 norm_layer=norm_layer,
                                 Conv2d=Conv2d)
        shortcut = ShortCut(inchannel=inchannel,
                            outchannel=outchannel,
                            expansion=self.expansion,
                            stride=stride,
                            norm_layer=norm_layer)
        self.block = Add(bottle_conv, shortcut)
        self.relu = ops.Relu()
Exemplo n.º 7
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 groups=1,
                 base_width=64,
                 stride=1):
        """Create BasicConv layer.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BasicConv, self).__init__()
        self.conv = ops.Conv2d(in_channels=inchannel,
                               out_channels=outchannel,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               groups=groups,
                               bias=False)
        self.batch = ops.BatchNorm2d(num_features=outchannel)
        self.relu = ops.Relu(inplace=True)
        self.conv2 = ops.Conv2d(in_channels=outchannel,
                                out_channels=outchannel,
                                kernel_size=3,
                                padding=1,
                                groups=groups,
                                bias=False)
        self.batch2 = ops.BatchNorm2d(num_features=outchannel)
Exemplo n.º 8
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 groups=1,
                 base_width=64,
                 stride=1):
        """Create BasicBlock layers.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BasicBlock, self).__init__()
        base_conv = BasicConv(inchannel=inchannel,
                              outchannel=outchannel,
                              stride=stride,
                              groups=groups,
                              base_width=base_width)
        shortcut = ShortCut(inchannel=inchannel,
                            outchannel=outchannel,
                            expansion=self.expansion,
                            stride=stride)
        self.block = Add(base_conv, shortcut)
        self.relu = ops.Relu()
Exemplo n.º 9
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              dilation,
              affine=True):
     """Construct SepConv class."""
     super(DilConv, self).__init__()
     self.relu = ops.Relu(inplace=False)
     self.conv1 = ops.Conv2d(C_in,
                             C_in,
                             kernel_size=kernel_size,
                             stride=stride,
                             padding=padding,
                             dilation=dilation,
                             groups=C_in,
                             bias=False)
     self.conv2 = ops.Conv2d(C_in,
                             C_out,
                             kernel_size=1,
                             padding=0,
                             bias=False)
     self.batch = ops.BatchNorm2d(C_out, affine=affine)
Exemplo n.º 10
0
 def call(self, x):
     """Call function."""
     out = self.conv1(x)
     out = self.conv2(out)
     out = self.bn(self.conv3(out))
     out += self.short_cut(x)
     return ops.Relu()(out)
Exemplo n.º 11
0
 def __init__(self,
              inplanes,
              planes,
              stride=1,
              dilation=1,
              downsample=None,
              style='pytorch',
              with_cp=False):
     """Init BasicBlock."""
     super(BasicBlock, self).__init__()
     self.expansion = 1
     self.norm1 = ops.BatchNorm2d(planes)
     self.norm2 = ops.BatchNorm2d(planes)
     self.conv1 = ops.Conv2d(inplanes,
                             planes,
                             3,
                             stride=stride,
                             padding=dilation,
                             dilation=dilation,
                             bias=False)
     self.conv2 = ops.Conv2d(planes, planes, 3, padding=1, bias=False)
     self.relu = ops.Relu(inplace=True)
     self.downsample = downsample
     self.inplanes = inplanes
     self.planes = planes
     self.stride = stride
     self.dilation = dilation
     self.style = style
     assert not with_cp
Exemplo n.º 12
0
    def __init__(self, inChannels, growRate, sh_groups, conv_groups, kSize=3):
        """Initialize Block.

        :param inChannels: channel number of input
        :type inChannels: int
        :param growRate: growth rate of block
        :type growRate: int
        :param sh_groups: group number of shuffle operation
        :type sh_groups: int
        :param conv_groups: group number of convolution operation
        :type conv_groups: int
        :param kSize: kernel size of convolution operation
        :type kSize: int
        """
        super(RDB_Conv, self).__init__()
        Cin = inChannels
        G = growRate
        self.shgroup = sh_groups
        self.congroup = conv_groups
        self.conv = Sequential(
            ops.Conv2d(Cin,
                       G,
                       kSize,
                       padding=(kSize - 1) // 2,
                       stride=1,
                       groups=self.congroup), ops.Relu())
Exemplo n.º 13
0
 def __init__(self, in_channels=1, out_channels=16, kernel_size=(3, 3)):
     super(TextConvBlock, self).__init__()
     self.conv1 = ops.Conv2d(in_channels, out_channels=out_channels, kernel_size=kernel_size)
     self.squeeze = ops.Squeeze(3)
     self.relu = ops.Relu()
     self.max_pool = ops.GlobalMaxPool1d()
     self.squeeze2 = ops.Squeeze(-1)
Exemplo n.º 14
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              affine=True,
              activation='relu',
              inplace=False):
     """Construct ConvBnAct class."""
     super(ConvBnAct, self).__init__()
     self.conv2d = ops.Conv2d(C_in,
                              C_out,
                              kernel_size,
                              stride,
                              padding,
                              bias=False)
     self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine)
     if activation == 'hswish':
         self.act = ops.Hswish(inplace=inplace)
     elif activation == 'hsigmoid':
         self.act = ops.Hsigmoid(inplace=inplace)
     elif activation == 'relu6':
         self.act = ops.Relu6(inplace=inplace)
     else:
         self.act = ops.Relu(inplace=inplace)
Exemplo n.º 15
0
 def __init__(self, inchannel, outchannel, innerchannel, stride=1):
     """Init PruneBasicBlock."""
     super(PruneBasicBlock, self).__init__()
     conv_block = PruneBasicConv(inchannel, outchannel, innerchannel, stride)
     shortcut = ShortCut(inchannel, outchannel, self.expansion, stride)
     self.block = Add(conv_block, shortcut)
     self.relu3 = ops.Relu()
Exemplo n.º 16
0
    def call(self, inputs):
        """Forward compute.

        :param inputs: input feature map
        :return: tuple of feature map
        """
        # assert len(inputs) == len(self.in_channels)
        laterals = [
            lateral_conv(inputs[i + self.start_level])
            for i, lateral_conv in enumerate(self.lateral_convs)
        ]
        used_backbone_levels = len(laterals)
        for i in range(used_backbone_levels - 1, 0, -1):
            laterals[i - 1] += ops.InterpolateScale(scale_factor=2,
                                                    mode='nearest')(
                                                        laterals[i])
        outs = [
            self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels)
        ]
        if self.num_outs > len(outs):
            if not self.add_extra_convs:
                for i in range(self.num_outs - used_backbone_levels):
                    outs.append(ops.MaxPool2d(1, stride=2)(outs[-1]))
            else:
                if self.extra_convs_on_inputs:
                    orig = inputs[self.backbone_end_level - 1]
                    outs.append(self.fpn_convs[used_backbone_levels](orig))
                else:
                    outs.append(self.fpn_convs[used_backbone_levels](outs[-1]))
                for i in range(used_backbone_levels + 1, self.num_outs):
                    if self.relu_before_extra_convs:
                        outs.append(self.fpn_convs[i](ops.Relu()(outs[-1])))
                    else:
                        outs.append(self.fpn_convs[i](outs[-1]))
        return tuple(outs)
Exemplo n.º 17
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              dilation=1,
              affine=True,
              repeats=1):
     """Construct SepConv class."""
     super(SeparatedConv, self).__init__()
     for idx in range(repeats):
         self.add_module(
             '{}_conv1'.format(idx),
             ops.Conv2d(C_in,
                        C_in,
                        kernel_size=kernel_size,
                        stride=stride,
                        padding=padding,
                        dilation=dilation,
                        groups=C_in,
                        bias=False))
         self.add_module(
             '{}_conv2'.format(idx),
             ops.Conv2d(C_in, C_in, kernel_size=1, padding=0, bias=False))
         self.add_module('{}_batch'.format(idx),
                         ops.BatchNorm2d(C_in, affine=affine))
         self.add_module('{}_relu'.format(idx), ops.Relu(inplace=False))
Exemplo n.º 18
0
 def __init__(self, C, num_classes, input_size):
     """Init AuxiliaryHead."""
     super(AuxiliaryHead, self).__init__()
     stride = input_size - 5
     self.relu1 = ops.Relu(inplace=True)
     self.avgpool1 = ops.AvgPool2d(5,
                                   stride=stride,
                                   padding=0,
                                   count_include_pad=False)
     self.conv1 = ops.Conv2d(C, 128, 1, bias=False)
     self.batchnorm1 = ops.BatchNorm2d(128)
     self.relu2 = ops.Relu(inplace=True)
     self.conv2 = ops.Conv2d(128, 768, 2, bias=False)
     self.batchnorm2 = ops.BatchNorm2d(768)
     self.relu3 = ops.Relu(inplace=True)
     self.view = ops.View()
     self.classifier = ops.Linear(768, num_classes)
Exemplo n.º 19
0
 def __init__(self, channel, reduction=4):
     """Init SELayer."""
     super(SELayer, self).__init__()
     self.avg_pool = ops.AdaptiveAvgPool2d(1)
     hidden_dim = _make_divisible(channel // reduction, 8)
     self.fc = Sequential(ops.Linear(channel, hidden_dim),
                          ops.Relu(inplace=True),
                          ops.Linear(hidden_dim, channel), ops.Hsigmoid())
Exemplo n.º 20
0
 def __init__(self, in_channels, out_channels, kernel_size, stride, padding, dilation=1, groups=1, bias=False):
     super(BN_Conv2d, self).__init__()
     self.seq = Sequential(
         ops.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
                    padding=padding, dilation=dilation, groups=groups, bias=bias),
         ops.BatchNorm2d(out_channels),
         ops.Relu()
     )
Exemplo n.º 21
0
    def __init__(self, InChannel, OutChannel, growRate, nConvLayers, kSize=3):
        """Initialize Block.

        :param InChannel: channel number of input
        :type InChannel: int
        :param OutChannel: channel number of output
        :type OutChannel: int
        :param growRate: growth rate of block
        :type growRate: int
        :param nConvLayers: the number of convlution layer
        :type nConvLayers: int
        :param kSize: kernel size of convolution operation
        :type kSize: int
        """
        super(Cont_RDB, self).__init__()
        self.InChan = InChannel
        self.OutChan = OutChannel
        self.G = growRate
        self.C = nConvLayers
        if self.InChan != self.G:
            self.InConv = ops.Conv2d(self.InChan,
                                     self.G,
                                     1,
                                     padding=0,
                                     stride=1)
        if self.OutChan != self.G and self.OutChan != self.InChan:
            self.OutConv = ops.Conv2d(self.InChan,
                                      self.OutChan,
                                      1,
                                      padding=0,
                                      stride=1)
        self.pool = ops.AvgPool2d(2, 2)
        self.shup = ops.PixelShuffle(2)
        self.Convs = ops.MoudleList()
        self.ShrinkConv = ops.MoudleList()
        for i in range(self.C):
            self.Convs.append(
                Sequential(
                    ops.Conv2d(self.G,
                               self.G,
                               kSize,
                               padding=(kSize - 1) // 2,
                               stride=1), ops.Relu()))
            if i < (self.C - 1):
                self.ShrinkConv.append(
                    ops.Conv2d((2 + i) * self.G,
                               self.G,
                               1,
                               padding=0,
                               stride=1))
            else:
                self.ShrinkConv.append(
                    ops.Conv2d(int((2 + i) * self.G / 4),
                               self.OutChan,
                               1,
                               padding=0,
                               stride=1))
Exemplo n.º 22
0
 def call(self, x):
     """Call function."""
     out = self.conv1(x)
     out = self.conv2(out)
     out = self.bn(self.conv3(out))
     if self.short_cut is not None:
         out += self.short_cut(x)
     else:
         out += x
     return ops.Relu(inplace=True)(out)
Exemplo n.º 23
0
 def __init__(self, C_in, C_out, kernel_size, stride, padding, affine=True, use_relu6=False):
     """Construct ConvBnRelu class."""
     super(ConvBnRelu, self).__init__()
     self.conv2d = ops.Conv2d(
         C_in, C_out, kernel_size, stride=stride, padding=padding, bias=False)
     self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine)
     if use_relu6:
         self.relu = ops.Relu6(inplace=False)
     else:
         self.relu = ops.Relu(inplace=False)
Exemplo n.º 24
0
 def __init__(self, C_in, C_out, kernel_size, stride, padding, affine=True):
     """Init ReLUConvBN."""
     super(ReLUConvBN, self).__init__()
     self.relu = ops.Relu(inplace=False)
     self.conv = ops.Conv2d(C_in,
                            C_out,
                            kernel_size,
                            stride=stride,
                            padding=padding,
                            bias=False)
     self.bn = ops.BatchNorm2d(C_out, affine=affine)
Exemplo n.º 25
0
 def _make_stem_layer(self):
     """Make stem layer."""
     self.conv1 = ops.Conv2d(3,
                             self.inplanes,
                             kernel_size=7,
                             stride=2,
                             padding=3,
                             bias=False)
     self.norm1 = ops.BatchNorm2d(64)
     self.relu = ops.Relu(inplace=True)
     self.maxpool = ops.MaxPool2d(kernel_size=3, stride=2, padding=1)
Exemplo n.º 26
0
    def __init__(self, init_plane):
        """Create SmallInputInitialBlock layer.

        :param init_plane: input channel.
        :type init_plane: int
        """
        super(SmallInputInitialBlock, self).__init__()
        self.conv = ops.Conv2d(in_channels=3, out_channels=init_plane, kernel_size=3, stride=1,
                               padding=1, bias=False)
        self.bn = ops.BatchNorm2d(num_features=init_plane)
        self.relu = ops.Relu()
Exemplo n.º 27
0
 def _blocks(self, out_channels, desc_blocks):
     blocks = ModuleList()
     in_channels = 32
     for i in range(desc_blocks):
         blocks.append(Sequential(
             ops.Conv2d(in_channels, out_channels, padding=1, kernel_size=3),
             ops.BatchNorm2d(out_channels),
             ops.Relu(inplace=True),
         ))
         in_channels = out_channels
     return blocks
Exemplo n.º 28
0
    def __init__(self, C_in, C_out, affine=True):
        """Construct FactorizedReduce class.

        :param C_in: input channel
        :param C_out: output channel
        :param affine: whether to use affine in BN
        """
        super(FactorizedReduce, self).__init__()
        assert C_out % 2 == 0
        self.relu = ops.Relu(inplace=False)
        self.conv_1 = ops.Conv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False)
        self.conv_2 = ops.Conv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False)
        self.bn = ops.BatchNorm2d(C_out, affine=affine)
Exemplo n.º 29
0
    def call(self, x, **kwargs):
        """call."""
        outs = [x]
        current = x

        for _, module_layer in enumerate(self.module_list):
            for i, layer in enumerate(module_layer):
                if i == 0:
                    outs.append(layer(current))
                else:
                    outs = layer(outs)
            current = ops.Relu()(outs[-1])

        return current
Exemplo n.º 30
0
def _transsorm_op(init_layer):
    """Transform the torch op to Vega op."""
    if isinstance(init_layer, nn.Conv2d):
        in_channels = init_layer.in_channels
        out_channels = init_layer.out_channels
        kernel_size = init_layer.kernel_size
        stride = init_layer.stride
        padding = init_layer.padding
        # bias = init_layer.bias
        new_layer = ops.Conv2d(in_channels=in_channels,
                               out_channels=out_channels,
                               kernel_size=kernel_size,
                               stride=stride,
                               padding=padding)
    elif isinstance(init_layer, nn.BatchNorm2d):
        num_features = init_layer.num_features
        new_layer = ops.BatchNorm2d(num_features=num_features)
    elif isinstance(init_layer, nn.ReLU):
        new_layer = ops.Relu()
    elif isinstance(init_layer, nn.MaxPool2d):
        kernel_size = init_layer.kernel_size
        stride = init_layer.stride
        padding = init_layer.padding
        new_layer = ops.MaxPool2d(kernel_size=kernel_size,
                                  stride=stride,
                                  padding=padding)
    elif isinstance(init_layer, nn.AvgPool2d):
        kernel_size = init_layer.kernel_size
        stride = init_layer.stride
        padding = init_layer.padding
        new_layer = ops.AvgPool2d(kernel_size=kernel_size,
                                  stride=stride,
                                  padding=padding)
    elif isinstance(init_layer, nn.AdaptiveAvgPool2d):
        output_size = init_layer.output_size
        new_layer = ops.AdaptiveAvgPool2d(output_size=output_size)
    elif isinstance(init_layer, nn.Linear):
        in_features = init_layer.in_features
        out_features = init_layer.out_features
        # use_bias = init_layer.bias
        new_layer = ops.Linear(in_features=in_features,
                               out_features=out_features)
    elif isinstance(init_layer, nn.Dropout):
        prob = init_layer.p
        inplace = init_layer.inplace
        new_layer = ops.Dropout(prob=prob, inplace=inplace)
    else:
        raise ValueError("The op {} is not supported.".format(
            type(init_layer)))
    return new_layer