Exemple #1
0
 def __init__(self,
              inp,
              hidden_dim,
              oup,
              kernel_size,
              stride,
              use_se=False,
              use_hs=False,
              momentum=0.1):
     """Init InvertedResidualSE."""
     super(InvertedResidualSE, self).__init__()
     self.identity = stride == 1 and inp == oup
     self.ir_block = Sequential(
         # pw
         ops.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
         ops.BatchNorm2d(hidden_dim, momentum=momentum),
         ops.Hswish() if use_hs else ops.Relu(inplace=True),
         # dw
         ops.Conv2d(hidden_dim,
                    hidden_dim,
                    kernel_size,
                    stride, (kernel_size - 1) // 2,
                    groups=hidden_dim,
                    bias=False),
         ops.BatchNorm2d(hidden_dim, momentum=momentum),
         # Squeeze-and-Excite
         SELayer(hidden_dim) if use_se else Sequential(),
         ops.Hswish() if use_hs else ops.Relu(inplace=True),
         # pw-linear
         ops.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
         ops.BatchNorm2d(oup, momentum=momentum),
     )
Exemple #2
0
    def __init__(self, encoding):
        super(DNetBackbone, self).__init__()
        op_names = ["conv3", "conv1", "conv3_grp2", "conv3_grp4", "conv3_base1", "conv3_base32", "conv3_sep"]

        # code with kangning
        block_str, num_channel, macro_str = encoding.split('_')
        curr_channel, index = int(num_channel), 0

        _big_model = "*" in block_str
        if _big_model:
            block_encoding_list = block_str.split('*')

        # stem
        layers = [
            create_op('conv3', 3, curr_channel // 2, stride=2),
            ops.Relu(),
            create_op('conv3', curr_channel // 2, curr_channel // 2),
            ops.Relu(),
            create_op('conv3', curr_channel // 2, curr_channel, stride=2),
            ops.Relu()
        ]

        # body
        if not _big_model:
            while index < len(macro_str):
                stride = 1
                if macro_str[index] == '-':
                    stride = 2
                    index += 1

                channel_increase = int(macro_str[index])
                block = EncodedBlock(block_str, curr_channel, op_names, stride, channel_increase)
                layers.append(block)
                curr_channel *= channel_increase
                index += 1
        else:
            block_encoding_index = 0
            while index < len(macro_str):
                stride = 1
                if macro_str[index] == '-':
                    stride = 2
                    index += 1
                    block_encoding_index += 1
                channel_increase = int(macro_str[index])
                block_encoding = block_encoding_list[block_encoding_index]
                block = EncodedBlock(block_encoding, curr_channel, op_names, stride, channel_increase)
                layers.append(block)
                curr_channel *= channel_increase
                index += 1
        layers.append(ops.AdaptiveAvgPool2d((1, 1)))
        self.layers = Sequential(*layers)
Exemple #3
0
    def __init__(self, encoding, n_class=1000):
        super(DNet, self).__init__()
        op_names = ["conv3", "conv1", "conv3_grp2", "conv3_grp4", "conv3_base1", "conv3_base32", "conv3_sep"]
        block_str, num_channel, macro_str = encoding.split('_')
        curr_channel, index = int(num_channel), 0
        _big_model = "*" in block_str
        if _big_model:
            block_encoding_list = block_str.split('*')
        # stem
        self.layers = Sequential(
            create_op('conv3', 3, curr_channel // 2, stride=2),
            ops.Relu(),
            create_op('conv3', curr_channel // 2, curr_channel // 2),
            ops.Relu(),
            create_op('conv3', curr_channel // 2, curr_channel, stride=2),
            ops.Relu()
        )

        # body
        if not _big_model:
            while index < len(macro_str):
                stride = 1
                if macro_str[index] == '-':
                    stride = 2
                    index += 1

                channel_increase = int(macro_str[index])
                block = EncodedBlock(block_str, curr_channel, op_names, stride, channel_increase)
                self.layers.append(block)
                curr_channel *= channel_increase
                index += 1
        else:
            block_encoding_index = 0
            while index < len(macro_str):
                stride = 1
                if macro_str[index] == '-':
                    stride = 2
                    index += 1
                    block_encoding_index += 1
                channel_increase = int(macro_str[index])
                block_encoding = block_encoding_list[block_encoding_index]
                block = EncodedBlock(block_encoding, curr_channel, op_names, stride, channel_increase)
                self.layers.append(block)
                curr_channel *= channel_increase
                index += 1
        self.layers.append(ops.AdaptiveAvgPool2d((1, 1)))
        self.view = ops.View()
        self.fc = ops.Linear(in_features=curr_channel, out_features=n_class)
Exemple #4
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 expansion,
                 groups,
                 base_width,
                 stride=1,
                 norm_layer={"norm_type": 'BN'},
                 Conv2d='Conv2d'):
        """Create BottleConv layer.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param expansion: expansion
        :type expansion: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BottleConv, self).__init__()
        outchannel = int(outchannel * (base_width / 64.)) * groups
        self.conv1 = build_conv_layer(in_channels=inchannel,
                                      out_channels=outchannel,
                                      kernel_size=1,
                                      stride=1,
                                      bias=False,
                                      Conv2d=Conv2d)
        self.batch1 = build_norm_layer(features=outchannel, **norm_layer)
        self.relu1 = ops.Relu(inplace=True)
        self.conv2 = build_conv_layer(in_channels=outchannel,
                                      out_channels=outchannel,
                                      kernel_size=3,
                                      stride=stride,
                                      padding=1,
                                      groups=groups,
                                      bias=False,
                                      Conv2d=Conv2d)
        self.batch2 = build_norm_layer(features=outchannel, **norm_layer)
        self.relu2 = ops.Relu(inplace=True)
        self.conv3 = build_conv_layer(in_channels=outchannel,
                                      out_channels=outchannel * expansion,
                                      kernel_size=1,
                                      stride=1,
                                      bias=False,
                                      Conv2d=Conv2d)
        self.batch3 = build_norm_layer(features=outchannel * expansion,
                                       **norm_layer)
Exemple #5
0
 def call(self, x):
     """Call function."""
     out = self.conv1(x)
     out = self.conv2(out)
     out = self.bn(self.conv3(out))
     out += self.short_cut(x)
     return ops.Relu()(out)
Exemple #6
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              bias=False,
              momentum=0.1,
              affine=True,
              activation='relu',
              inplace=True):
     """Construct ConvBnAct class."""
     super(ConvBnAct, self).__init__()
     self.conv2d = ops.Conv2d(C_in,
                              C_out,
                              kernel_size,
                              stride,
                              padding,
                              bias=bias)
     self.batch_norm2d = ops.BatchNorm2d(C_out,
                                         affine=affine,
                                         momentum=momentum)
     if activation == 'hswish':
         self.act = ops.Hswish(inplace=inplace)
     elif activation == 'hsigmoid':
         self.act = ops.Hsigmoid(inplace=inplace)
     elif activation == 'relu6':
         self.act = ops.Relu6(inplace=inplace)
     else:
         self.act = ops.Relu(inplace=inplace)
Exemple #7
0
 def _make_stem_layer(self):
     """Make stem layer."""
     self.conv1 = ops.Conv2d(
         3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False)
     self.norm1 = ops.BatchNorm2d(64)
     self.relu = ops.Relu(inplace=True)
     self.maxpool = ops.MaxPool2d(kernel_size=3, stride=2, padding=1)
Exemple #8
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              dilation=1,
              affine=True,
              repeats=1):
     """Construct SepConv class."""
     super(SeparatedConv, self).__init__()
     for idx in range(repeats):
         self.add_module(
             '{}_conv1'.format(idx),
             ops.Conv2d(C_in,
                        C_in,
                        kernel_size=kernel_size,
                        stride=stride,
                        padding=padding,
                        dilation=dilation,
                        groups=C_in,
                        bias=False))
         self.add_module(
             '{}_conv2'.format(idx),
             ops.Conv2d(C_in, C_in, kernel_size=1, padding=0, bias=False))
         self.add_module('{}_batch'.format(idx),
                         ops.BatchNorm2d(C_in, affine=affine))
         self.add_module('{}_relu'.format(idx), ops.Relu(inplace=False))
Exemple #9
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              dilation,
              affine=True):
     """Construct SepConv class."""
     super(DilConv, self).__init__()
     self.relu = ops.Relu(inplace=False)
     self.conv1 = ops.Conv2d(C_in,
                             C_in,
                             kernel_size=kernel_size,
                             stride=stride,
                             padding=padding,
                             dilation=dilation,
                             groups=C_in,
                             bias=False)
     self.conv2 = ops.Conv2d(C_in,
                             C_out,
                             kernel_size=1,
                             padding=0,
                             bias=False)
     self.batch = ops.BatchNorm2d(C_out, affine=affine)
Exemple #10
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 groups=1,
                 base_width=64,
                 stride=1,
                 norm_layer={"norm_type": 'BN'},
                 Conv2d='Conv2d'):
        """Create BottleneckBlock layers.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BottleneckBlock, self).__init__()
        bottle_conv = BottleConv(inchannel=inchannel,
                                 outchannel=outchannel,
                                 expansion=self.expansion,
                                 stride=stride,
                                 groups=groups,
                                 base_width=base_width,
                                 norm_layer=norm_layer,
                                 Conv2d=Conv2d)
        shortcut = ShortCut(inchannel=inchannel,
                            outchannel=outchannel,
                            expansion=self.expansion,
                            stride=stride,
                            norm_layer=norm_layer)
        self.block = Add(bottle_conv, shortcut)
        self.relu = ops.Relu()
Exemple #11
0
 def __init__(self, in_channels, out_channels, kernel_size, stride, padding, dilation=1, groups=1, bias=False):
     super(BN_Conv2d, self).__init__()
     self.seq = Sequential(
         ops.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
                    padding=padding, dilation=dilation, groups=groups, bias=bias),
         ops.BatchNorm2d(out_channels),
         ops.Relu()
     )
Exemple #12
0
 def __init__(self, inchannel, outchannel, innerchannel, stride=1):
     """Init PruneBasicBlock."""
     super(PruneBasicBlock, self).__init__()
     conv_block = PruneBasicConv(inchannel, outchannel, innerchannel,
                                 stride)
     shortcut = ShortCut(inchannel, outchannel, self.expansion, stride)
     self.block = Add(conv_block, shortcut)
     self.relu3 = ops.Relu()
Exemple #13
0
 def __init__(self, C, num_classes, input_size):
     """Init AuxiliaryHead."""
     super(AuxiliaryHead, self).__init__()
     stride = input_size - 5
     self.relu1 = ops.Relu(inplace=True)
     self.avgpool1 = ops.AvgPool2d(5,
                                   stride=stride,
                                   padding=0,
                                   count_include_pad=False)
     self.conv1 = ops.Conv2d(C, 128, 1, bias=False)
     self.batchnorm1 = ops.BatchNorm2d(128)
     self.relu2 = ops.Relu(inplace=True)
     self.conv2 = ops.Conv2d(128, 768, 2, bias=False)
     self.batchnorm2 = ops.BatchNorm2d(768)
     self.relu3 = ops.Relu(inplace=True)
     self.view = ops.View()
     self.classifier = ops.Linear(768, num_classes)
Exemple #14
0
 def __init__(self, channel, reduction=4):
     """Init SELayer."""
     super(SELayer, self).__init__()
     self.avg_pool = ops.AdaptiveAvgPool2d(1)
     hidden_dim = _make_divisible(channel // reduction, 8)
     self.fc = Sequential(ops.Linear(channel, hidden_dim, use_bias=False),
                          ops.Relu(inplace=True),
                          ops.Linear(hidden_dim, channel, use_bias=False),
                          ops.Hsigmoid())
Exemple #15
0
 def __init__(self, in_channels=1, out_channels=16, kernel_size=(3, 3)):
     super(TextConvBlock, self).__init__()
     self.conv1 = ops.Conv2d(in_channels,
                             out_channels=out_channels,
                             kernel_size=kernel_size)
     self.squeeze = ops.Squeeze(3)
     self.relu = ops.Relu()
     self.max_pool = ops.GlobalMaxPool1d()
     self.squeeze2 = ops.Squeeze(-1)
Exemple #16
0
 def call(self, x):
     """Call function."""
     out = self.conv1(x)
     out = self.conv2(out)
     out = self.bn(self.conv3(out))
     if self.short_cut is not None:
         out += self.short_cut(x)
     else:
         out += x
     return ops.Relu(inplace=True)(out)
Exemple #17
0
 def __init__(self, in_planes, planes, inner_plane, stride=1):
     """Create BottleConv layer."""
     super(PruneBasicConv, self).__init__()
     self.conv1 = ops.Conv2d(in_planes,
                             inner_plane,
                             kernel_size=3,
                             stride=stride,
                             padding=1,
                             bias=False)
     self.bn1 = ops.BatchNorm2d(inner_plane)
     self.relu = ops.Relu()
     self.conv2 = ops.Conv2d(inner_plane,
                             planes,
                             kernel_size=3,
                             stride=1,
                             padding=1,
                             bias=False)
     self.bn2 = ops.BatchNorm2d(planes)
     self.relu2 = ops.Relu()
Exemple #18
0
 def __init__(self, C_in, C_out, kernel_size, stride, padding, affine=True):
     """Init ReLUConvBN."""
     super(ReLUConvBN, self).__init__()
     self.relu = ops.Relu(inplace=False)
     self.conv = ops.Conv2d(C_in,
                            C_out,
                            kernel_size,
                            stride=stride,
                            padding=padding,
                            bias=False)
     self.bn = ops.BatchNorm2d(C_out, affine=affine)
Exemple #19
0
    def call(self, x, **kwargs):
        """call."""
        outs = [x]
        current = x

        for _, module_layer in enumerate(self.module_list):
            for i, layer in enumerate(module_layer):
                if i == 0:
                    outs.append(layer(current))
                else:
                    outs = layer(outs)
            current = ops.Relu()(outs[-1])

        return current
Exemple #20
0
def _transform_op(init_layer):
    """Transform the torch op to Vega op."""
    if isinstance(init_layer, nn.Conv2d):
        in_channels = init_layer.in_channels
        out_channels = init_layer.out_channels
        kernel_size = init_layer.kernel_size[0]
        stride = init_layer.stride
        padding = init_layer.padding
        # bias = init_layer.bias
        new_layer = ops.Conv2d(in_channels=in_channels,
                               out_channels=out_channels,
                               kernel_size=kernel_size,
                               stride=stride,
                               padding=padding,
                               bias=False)
    elif isinstance(init_layer, nn.BatchNorm2d):
        num_features = init_layer.num_features
        new_layer = ops.BatchNorm2d(num_features=num_features)
    elif isinstance(init_layer, nn.ReLU):
        new_layer = ops.Relu()
    elif isinstance(init_layer, nn.MaxPool2d):
        kernel_size = init_layer.kernel_size
        stride = init_layer.stride
        # padding = init_layer.padding
        new_layer = ops.MaxPool2d(kernel_size=kernel_size, stride=stride)
    elif isinstance(init_layer, nn.AvgPool2d):
        kernel_size = init_layer.kernel_size
        stride = init_layer.stride
        padding = init_layer.padding
        new_layer = ops.AvgPool2d(kernel_size=kernel_size,
                                  stride=stride,
                                  padding=padding)
    elif isinstance(init_layer, P.ReduceMean):
        new_layer = ops.AdaptiveAvgPool2d()
    elif isinstance(init_layer, nn.Dense):
        in_features = init_layer.in_channels
        out_features = init_layer.out_channels
        # use_bias = init_layer.bias
        new_layer = ops.Linear(in_features=in_features,
                               out_features=out_features)
    elif isinstance(init_layer, nn.Dropout):
        prob = init_layer.p
        inplace = init_layer.inplace
        new_layer = ops.Dropout(prob=prob, inplace=inplace)
    elif isinstance(init_layer, nn.Flatten):
        new_layer = ops.View()
    else:
        raise ValueError("The op {} is not supported.".format(
            type(init_layer)))
    return new_layer
Exemple #21
0
 def _blocks(self, out_channels, desc_blocks):
     blocks = ModuleList()
     in_channels = 32
     for i in range(desc_blocks):
         blocks.append(
             Sequential(
                 ops.Conv2d(in_channels,
                            out_channels,
                            padding=1,
                            kernel_size=3),
                 ops.BatchNorm2d(out_channels),
                 ops.Relu(inplace=True),
             ))
         in_channels = out_channels
     return blocks
Exemple #22
0
    def __init__(self, init_plane):
        """Create SmallInputInitialBlock layer.

        :param init_plane: input channel.
        :type init_plane: int
        """
        super(SmallInputInitialBlock, self).__init__()
        self.conv = ops.Conv2d(in_channels=3,
                               out_channels=init_plane,
                               kernel_size=3,
                               stride=1,
                               padding=1,
                               bias=False)
        self.bn = ops.BatchNorm2d(num_features=init_plane)
        self.relu = ops.Relu()
Exemple #23
0
    def __init__(self,
                 inplanes,
                 planes,
                 stride=1,
                 dilation=1,
                 downsample=None,
                 style='pytorch',
                 with_cp=False):
        """Init Bottleneck."""
        super(Bottleneck, self).__init__()
        assert style in ['pytorch', 'caffe']
        self.inplanes = inplanes
        self.planes = planes
        self.stride = stride
        self.dilation = dilation
        self.style = style
        self.with_cp = with_cp
        self.norm1 = ops.BatchNorm2d(planes)
        self.norm2 = ops.BatchNorm2d(planes)
        self.norm3 = ops.BatchNorm2d(planes * self.expansion)

        self.conv1 = ops.Conv2d(inplanes, planes, kernel_size=1, bias=False)
        self.with_modulated_dcn = False
        self.conv2 = ops.Conv2d(
            planes,
            planes,
            kernel_size=3,
            stride=stride,
            padding=dilation,
            dilation=dilation,
            bias=False,
        )
        self.conv3 = ops.Conv2d(planes,
                                planes * self.expansion,
                                kernel_size=1,
                                bias=False)
        self.relu = ops.Relu(inplace=True)

        if stride > 1 or downsample is not None:
            conv_layer = ops.Conv2d(inplanes,
                                    planes * self.expansion,
                                    kernel_size=1,
                                    stride=stride,
                                    bias=False)
            norm_layer = ops.BatchNorm2d(planes * self.expansion)
            self.downsample = Sequential(conv_layer, norm_layer)
        else:
            self.downsample = None
Exemple #24
0
 def __init__(self,
              in_channels,
              out_channels,
              kernel_size,
              stride=1,
              padding=0,
              dilation=1,
              groups=1,
              bias='auto',
              activation='relu',
              inplace=True,
              activate_last=True):
     """Init Conv Module with Normalization."""
     super(ConvModule, self).__init__()
     self.activation = activation
     self.inplace = inplace
     self.activate_last = activate_last
     self.with_norm = True
     self.with_activatation = activation is not None
     if bias == 'auto':
         bias = False if self.with_norm else True
     self.with_bias = bias
     self.conv = ops.Conv2d(in_channels,
                            out_channels,
                            kernel_size,
                            stride=stride,
                            padding=padding,
                            dilation=dilation,
                            groups=groups,
                            bias=bias)
     self.in_channels = self.conv.in_channels
     self.out_channels = self.conv.out_channels
     self.kernel_size = self.conv.kernel_size
     self.stride = self.conv.stride
     self.padding = self.conv.padding
     self.dilation = self.conv.dilation
     self.transposed = self.conv.transposed
     self.output_padding = self.conv.output_padding
     self.groups = self.conv.groups
     if self.with_norm:
         norm_channels = out_channels if self.activate_last else in_channels
         self.norm = ops.BatchNorm2d(norm_channels)
     if self.with_activatation:
         if self.activation not in ['relu']:
             raise ValueError('{} is currently not supported.'.format(
                 self.activation))
         if self.activation == 'relu':
             self.activate = ops.Relu(inplace=inplace)
Exemple #25
0
    def __init__(self, init_plane):
        """Create InitialBlock layer.

        :param init_plane: input channel.
        :type init_plane: int
        """
        super(InitialBlock, self).__init__()
        self.conv = ops.Conv2d(in_channels=3,
                               out_channels=init_plane,
                               kernel_size=7,
                               stride=2,
                               padding=3,
                               bias=False)
        self.batch = ops.BatchNorm2d(num_features=init_plane)
        self.relu = ops.Relu()
        self.maxpool2d = ops.MaxPool2d(kernel_size=3, stride=2, padding=1)
Exemple #26
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              Conv2d='Conv2d',
              affine=True,
              use_relu6=False,
              norm_layer='BN',
              has_bn=True,
              has_relu=True,
              **kwargs):
     """Construct ConvBnRelu class."""
     super(ConvBnRelu, self).__init__()
     if Conv2d == 'Conv2d':
         self.conv2d = ops.Conv2d(C_in,
                                  C_out,
                                  kernel_size,
                                  stride=stride,
                                  padding=padding,
                                  bias=False)
     elif Conv2d == 'ConvWS2d':
         self.conv2d = ops.ConvWS2d(C_in,
                                    C_out,
                                    kernel_size,
                                    stride=stride,
                                    padding=padding,
                                    bias=False)
     if has_bn:
         if norm_layer == 'BN':
             self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine)
         elif norm_layer == 'GN':
             num_groups = kwargs.pop('num_groups')
             self.batch_norm2d = ops.GroupNorm(num_groups,
                                               C_out,
                                               affine=affine)
         elif norm_layer == 'Sync':
             self.batch_norm2d = ops.SyncBatchNorm(C_out, affine=affine)
     if has_relu:
         if use_relu6:
             self.relu = ops.Relu6(inplace=False)
         else:
             self.relu = ops.Relu(inplace=False)
Exemple #27
0
 def call(self, x):
     """Forward x."""
     out = x[self.collect_inds[0]]
     for i in range(1, len(self.collect_inds)):
         collect = x[self.collect_inds[i]]
         if ops.get_shape(out)[2] > ops.get_shape(collect)[2]:
             # upsample collect
             collect = ops.interpolate(collect,
                                       size=ops.get_shape(out)[2:],
                                       mode='bilinear',
                                       align_corners=True)
         elif ops.get_shape(collect)[2] > ops.get_shape(out)[2]:
             out = ops.interpolate(out,
                                   size=ops.get_shape(collect)[2:],
                                   mode='bilinear',
                                   align_corners=True)
         if self.agg_concat:
             out = ops.concat([out, collect])
         else:
             out += collect
     out = ops.Relu()(out)
     return out
Exemple #28
0
 def __init__(self,
              inplanes,
              planes,
              stride=1,
              dilation=1,
              downsample=None,
              style='pytorch',
              with_cp=False):
     """Init BasicBlock."""
     super(BasicBlock, self).__init__()
     self.expansion = 1
     self.norm1 = ops.BatchNorm2d(planes)
     self.norm2 = ops.BatchNorm2d(planes)
     self.conv1 = ops.Conv2d(inplanes,
                             planes,
                             3,
                             stride=stride,
                             padding=dilation,
                             dilation=dilation,
                             bias=False)
     self.conv2 = ops.Conv2d(planes, planes, 3, padding=1, bias=False)
     self.relu = ops.Relu(inplace=True)
     if stride > 1 or downsample is not None:
         conv_layer = ops.Conv2d(inplanes,
                                 planes * self.expansion,
                                 kernel_size=1,
                                 stride=stride,
                                 bias=False)
         norm_layer = ops.BatchNorm2d(planes)
         self.downsample = Sequential(conv_layer, norm_layer)
     else:
         self.downsample = None
     self.inplanes = inplanes
     self.planes = planes
     self.stride = stride
     self.dilation = dilation
     self.style = style
     assert not with_cp
Exemple #29
0
    def __init__(self, C_in, C_out, affine=True):
        """Construct FactorizedReduce class.

        :param C_in: input channel
        :param C_out: output channel
        :param affine: whether to use affine in BN
        """
        super(FactorizedReduce, self).__init__()
        assert C_out % 2 == 0
        self.relu = ops.Relu(inplace=False)
        self.conv_1 = ops.Conv2d(C_in,
                                 C_out // 2,
                                 1,
                                 stride=2,
                                 padding=0,
                                 bias=False)
        self.conv_2 = ops.Conv2d(C_in,
                                 C_out // 2,
                                 1,
                                 stride=2,
                                 padding=0,
                                 bias=False)
        self.bn = ops.BatchNorm2d(C_out, affine=affine)
Exemple #30
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 groups=1,
                 base_width=64,
                 stride=1,
                 norm_layer={"norm_type": 'BN'},
                 Conv2d='Conv2d'):
        """Create BasicConv layer.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BasicConv, self).__init__()
        self.conv = build_conv_layer(in_channels=inchannel,
                                     out_channels=outchannel,
                                     kernel_size=3,
                                     stride=stride,
                                     padding=1,
                                     groups=groups,
                                     bias=False,
                                     Conv2d=Conv2d)
        self.batch = build_norm_layer(features=outchannel, **norm_layer)
        self.relu = ops.Relu(inplace=True)
        self.conv2 = build_conv_layer(in_channels=outchannel,
                                      out_channels=outchannel,
                                      kernel_size=3,
                                      stride=1,
                                      padding=1,
                                      groups=groups,
                                      bias=False,
                                      Conv2d=Conv2d)
        self.batch2 = build_norm_layer(features=outchannel, **norm_layer)