Esempio n. 1
0
 def __init__(self,
              inplanes,
              planes,
              stride=1,
              dilation=1,
              downsample=None,
              style='pytorch',
              with_cp=False):
     """Init BasicBlock."""
     super(BasicBlock, self).__init__()
     self.expansion = 1
     self.norm1 = ops.BatchNorm2d(planes)
     self.norm2 = ops.BatchNorm2d(planes)
     self.conv1 = ops.Conv2d(inplanes,
                             planes,
                             3,
                             stride=stride,
                             padding=dilation,
                             dilation=dilation,
                             bias=False)
     self.conv2 = ops.Conv2d(planes, planes, 3, padding=1, bias=False)
     self.relu = ops.Relu(inplace=True)
     self.downsample = downsample
     self.inplanes = inplanes
     self.planes = planes
     self.stride = stride
     self.dilation = dilation
     self.style = style
     assert not with_cp
Esempio n. 2
0
    def __init__(self, inp, oup, stride, kernel=3, expand_ratio=1):
        """Construct InvertedResidual class.

        :param inp: input channel
        :param oup: output channel
        :param stride: stride
        :param kernel: kernel
        :param expand_ratio: channel increase multiplier
        """
        super(InvertedConv, self).__init__()
        hidden_dim = round(inp * expand_ratio)
        conv = []
        if expand_ratio > 1:
            conv = [
                ops.Conv2d(in_channels=inp, out_channels=hidden_dim,
                           kernel_size=1, stride=1, padding=0, bias=False),
                ops.BatchNorm2d(num_features=hidden_dim),
                ops.Relu6(inplace=True)
            ]
        conv = conv + [
            ops.Conv2d(in_channels=hidden_dim, out_channels=hidden_dim, kernel_size=kernel,
                       stride=stride, padding=kernel // 2, groups=hidden_dim, bias=False, depthwise=True),
            ops.BatchNorm2d(num_features=hidden_dim),
            ops.Relu6(inplace=True),
            ops.Conv2d(in_channels=hidden_dim, out_channels=oup,
                       kernel_size=1, stride=1, padding=0, bias=False),
            ops.BatchNorm2d(num_features=oup)
        ]
        self.models = Sequential(*conv)
Esempio n. 3
0
 def __init__(self,
              inp,
              hidden_dim,
              oup,
              kernel_size,
              stride,
              use_se=False,
              use_hs=False):
     """Init InvertedResidualSE."""
     super(InvertedResidualSE, self).__init__()
     self.identity = stride == 1 and inp == oup
     self.ir_block = Sequential(
         # pw
         ops.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
         ops.BatchNorm2d(hidden_dim),
         ops.Hswish() if use_hs else ops.Relu(inplace=True),
         # dw
         ops.Conv2d(hidden_dim,
                    hidden_dim,
                    kernel_size,
                    stride, (kernel_size - 1) // 2,
                    groups=hidden_dim,
                    bias=False),
         ops.BatchNorm2d(hidden_dim),
         # Squeeze-and-Excite
         SELayer(hidden_dim) if use_se else ops.Identity(),
         ops.Hswish() if use_hs else ops.Relu(inplace=True),
         # pw-linear
         ops.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
         ops.BatchNorm2d(oup),
     )
Esempio n. 4
0
 def __init__(self, in_chnls, cardinality, group_depth, stride):
     super(ResNeXt_Block, self).__init__()
     self.group_chnls = cardinality * group_depth
     self.conv1 = BN_Conv2d(in_chnls,
                            self.group_chnls,
                            1,
                            stride=1,
                            padding=0)
     self.conv2 = BN_Conv2d(self.group_chnls,
                            self.group_chnls,
                            3,
                            stride=stride,
                            padding=1,
                            groups=cardinality)
     self.conv3 = ops.Conv2d(self.group_chnls,
                             self.group_chnls * 2,
                             1,
                             stride=1,
                             padding=0)
     self.bn = ops.BatchNorm2d(self.group_chnls * 2)
     self.short_cut = Sequential(
         ops.Conv2d(in_chnls,
                    self.group_chnls * 2,
                    1,
                    stride,
                    0,
                    bias=False), ops.BatchNorm2d(self.group_chnls * 2))
Esempio n. 5
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 groups=1,
                 base_width=64,
                 stride=1):
        """Create BasicConv layer.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BasicConv, self).__init__()
        self.conv = ops.Conv2d(in_channels=inchannel,
                               out_channels=outchannel,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               groups=groups,
                               bias=False)
        self.batch = ops.BatchNorm2d(num_features=outchannel)
        self.relu = ops.Relu(inplace=True)
        self.conv2 = ops.Conv2d(in_channels=outchannel,
                                out_channels=outchannel,
                                kernel_size=3,
                                padding=1,
                                groups=groups,
                                bias=False)
        self.batch2 = ops.BatchNorm2d(num_features=outchannel)
Esempio n. 6
0
 def __init__(self, in_planes, planes, inner_plane, stride=1):
     """Create BottleConv layer."""
     super(PruneBasicConv, self).__init__()
     self.conv1 = ops.Conv2d(
         in_planes, inner_plane, kernel_size=3, stride=stride, padding=1, bias=False)
     self.bn1 = ops.BatchNorm2d(inner_plane)
     self.relu = ops.Relu()
     self.conv2 = ops.Conv2d(inner_plane, planes, kernel_size=3, stride=1, padding=1, bias=False)
     self.bn2 = ops.BatchNorm2d(planes)
     self.relu2 = ops.Relu()
Esempio n. 7
0
    def __init__(self,
                 inplanes,
                 planes,
                 stride=1,
                 dilation=1,
                 downsample=None,
                 style='pytorch',
                 with_cp=False):
        """Init Bottleneck."""
        super(Bottleneck, self).__init__()
        assert style in ['pytorch', 'caffe']
        self.expansion = 4
        self.inplanes = inplanes
        self.planes = planes
        self.stride = stride
        self.dilation = dilation
        self.style = style
        self.with_cp = with_cp
        if self.style == 'pytorch':
            self.conv1_stride = 1
            self.conv2_stride = stride
        else:
            self.conv1_stride = stride
            self.conv2_stride = 1

        self.norm1 = ops.BatchNorm2d(planes)
        self.norm2 = ops.BatchNorm2d(planes)
        self.norm3 = ops.BatchNorm2d(planes * self.expansion)
        self.conv1 = ops.Conv2d(inplanes,
                                planes,
                                kernel_size=1,
                                stride=self.conv1_stride,
                                bias=False)
        self.with_modulated_dcn = False
        self.conv2 = ops.Conv2d(
            planes,
            planes,
            kernel_size=3,
            stride=self.conv2_stride,
            padding=dilation,
            dilation=dilation,
            bias=False,
        )
        self.conv3 = ops.Conv2d(planes,
                                planes * self.expansion,
                                kernel_size=1,
                                bias=False)
        self.relu = ops.Relu(inplace=True)
        self.downsample = downsample
Esempio n. 8
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              affine=True,
              activation='relu',
              inplace=False):
     """Construct ConvBnAct class."""
     super(ConvBnAct, self).__init__()
     self.conv2d = ops.Conv2d(C_in,
                              C_out,
                              kernel_size,
                              stride,
                              padding,
                              bias=False)
     self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine)
     if activation == 'hswish':
         self.act = ops.Hswish(inplace=inplace)
     elif activation == 'hsigmoid':
         self.act = ops.Hsigmoid(inplace=inplace)
     elif activation == 'relu6':
         self.act = ops.Relu6(inplace=inplace)
     else:
         self.act = ops.Relu(inplace=inplace)
Esempio n. 9
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              dilation=1,
              affine=True,
              repeats=1):
     """Construct SepConv class."""
     super(SeparatedConv, self).__init__()
     for idx in range(repeats):
         self.add_module(
             '{}_conv1'.format(idx),
             ops.Conv2d(C_in,
                        C_in,
                        kernel_size=kernel_size,
                        stride=stride,
                        padding=padding,
                        dilation=dilation,
                        groups=C_in,
                        bias=False))
         self.add_module(
             '{}_conv2'.format(idx),
             ops.Conv2d(C_in, C_in, kernel_size=1, padding=0, bias=False))
         self.add_module('{}_batch'.format(idx),
                         ops.BatchNorm2d(C_in, affine=affine))
         self.add_module('{}_relu'.format(idx), ops.Relu(inplace=False))
Esempio n. 10
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              dilation,
              affine=True):
     """Construct SepConv class."""
     super(DilConv, self).__init__()
     self.relu = ops.Relu(inplace=False)
     self.conv1 = ops.Conv2d(C_in,
                             C_in,
                             kernel_size=kernel_size,
                             stride=stride,
                             padding=padding,
                             dilation=dilation,
                             groups=C_in,
                             bias=False)
     self.conv2 = ops.Conv2d(C_in,
                             C_out,
                             kernel_size=1,
                             padding=0,
                             bias=False)
     self.batch = ops.BatchNorm2d(C_out, affine=affine)
Esempio n. 11
0
 def __init__(self, in_channels, out_channels, kernel_size, stride, padding, dilation=1, groups=1, bias=False):
     super(BN_Conv2d, self).__init__()
     self.seq = Sequential(
         ops.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
                    padding=padding, dilation=dilation, groups=groups, bias=bias),
         ops.BatchNorm2d(out_channels),
         ops.Relu()
     )
Esempio n. 12
0
 def __init__(self, C, num_classes, input_size):
     """Init AuxiliaryHead."""
     super(AuxiliaryHead, self).__init__()
     stride = input_size - 5
     self.relu1 = ops.Relu(inplace=True)
     self.avgpool1 = ops.AvgPool2d(5,
                                   stride=stride,
                                   padding=0,
                                   count_include_pad=False)
     self.conv1 = ops.Conv2d(C, 128, 1, bias=False)
     self.batchnorm1 = ops.BatchNorm2d(128)
     self.relu2 = ops.Relu(inplace=True)
     self.conv2 = ops.Conv2d(128, 768, 2, bias=False)
     self.batchnorm2 = ops.BatchNorm2d(768)
     self.relu3 = ops.Relu(inplace=True)
     self.view = ops.View()
     self.classifier = ops.Linear(768, num_classes)
Esempio n. 13
0
 def __init__(self, C_in, C_out, kernel_size, stride, padding, affine=True, use_relu6=False):
     """Construct ConvBnRelu class."""
     super(ConvBnRelu, self).__init__()
     self.conv2d = ops.Conv2d(
         C_in, C_out, kernel_size, stride=stride, padding=padding, bias=False)
     self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine)
     if use_relu6:
         self.relu = ops.Relu6(inplace=False)
     else:
         self.relu = ops.Relu(inplace=False)
Esempio n. 14
0
 def _blocks(self, out_channels, desc_blocks):
     blocks = ModuleList()
     in_channels = 32
     for i in range(desc_blocks):
         blocks.append(Sequential(
             ops.Conv2d(in_channels, out_channels, padding=1, kernel_size=3),
             ops.BatchNorm2d(out_channels),
             ops.Relu(inplace=True),
         ))
         in_channels = out_channels
     return blocks
Esempio n. 15
0
 def __init__(self, C_in, C_out, kernel_size, stride, padding, affine=True):
     """Init ReLUConvBN."""
     super(ReLUConvBN, self).__init__()
     self.relu = ops.Relu(inplace=False)
     self.conv = ops.Conv2d(C_in,
                            C_out,
                            kernel_size,
                            stride=stride,
                            padding=padding,
                            bias=False)
     self.bn = ops.BatchNorm2d(C_out, affine=affine)
Esempio n. 16
0
 def _make_stem_layer(self):
     """Make stem layer."""
     self.conv1 = ops.Conv2d(3,
                             self.inplanes,
                             kernel_size=7,
                             stride=2,
                             padding=3,
                             bias=False)
     self.norm1 = ops.BatchNorm2d(64)
     self.relu = ops.Relu(inplace=True)
     self.maxpool = ops.MaxPool2d(kernel_size=3, stride=2, padding=1)
Esempio n. 17
0
    def __init__(self, init_plane):
        """Create SmallInputInitialBlock layer.

        :param init_plane: input channel.
        :type init_plane: int
        """
        super(SmallInputInitialBlock, self).__init__()
        self.conv = ops.Conv2d(in_channels=3, out_channels=init_plane, kernel_size=3, stride=1,
                               padding=1, bias=False)
        self.bn = ops.BatchNorm2d(num_features=init_plane)
        self.relu = ops.Relu()
Esempio n. 18
0
    def __init__(self, init_plane):
        """Create InitialBlock layer.

        :param init_plane: input channel.
        :type init_plane: int
        """
        super(InitialBlock, self).__init__()
        self.conv = ops.Conv2d(in_channels=3, out_channels=init_plane, kernel_size=7, stride=2, padding=3,
                               bias=False)
        self.batch = ops.BatchNorm2d(num_features=init_plane)
        self.maxpool2d = ops.MaxPool2d(kernel_size=3, stride=2, padding=1)
Esempio n. 19
0
    def __init__(self,
                 inchannel,
                 outchannel,
                 expansion,
                 groups,
                 base_width,
                 stride=1):
        """Create BottleConv layer.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param expansion: expansion
        :type expansion: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(BottleConv, self).__init__()
        outchannel = int(outchannel * (base_width / 64.)) * groups
        self.conv1 = ops.Conv2d(in_channels=inchannel,
                                out_channels=outchannel,
                                kernel_size=1,
                                stride=1,
                                bias=False)
        self.batch1 = ops.BatchNorm2d(num_features=outchannel)
        self.conv2 = ops.Conv2d(in_channels=outchannel,
                                out_channels=outchannel,
                                kernel_size=3,
                                stride=stride,
                                padding=1,
                                groups=groups,
                                bias=False)
        self.batch2 = ops.BatchNorm2d(num_features=outchannel)
        self.conv3 = ops.Conv2d(in_channels=outchannel,
                                out_channels=outchannel * expansion,
                                kernel_size=1,
                                stride=1,
                                bias=False)
        self.batch3 = ops.BatchNorm2d(num_features=outchannel * expansion)
        self.relu = ops.Relu()
Esempio n. 20
0
    def __init__(self, C_in, C_out, affine=True):
        """Construct FactorizedReduce class.

        :param C_in: input channel
        :param C_out: output channel
        :param affine: whether to use affine in BN
        """
        super(FactorizedReduce, self).__init__()
        assert C_out % 2 == 0
        self.relu = ops.Relu(inplace=False)
        self.conv_1 = ops.Conv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False)
        self.conv_2 = ops.Conv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False)
        self.bn = ops.BatchNorm2d(C_out, affine=affine)
Esempio n. 21
0
 def __init__(self, C, stride, ops_cands):
     """Init MixedOp."""
     super(MixedOp, self).__init__()
     if not isinstance(ops_cands, list):
         # train
         self.add_module(ops_cands, OPS[ops_cands](C, stride, True))
     else:
         # search
         for primitive in ops_cands:
             op = OPS[primitive](C, stride, False)
             if 'pool' in primitive:
                 op = Seq(op, ops.BatchNorm2d(C, affine=False))
             self.add_module(primitive, op)
Esempio n. 22
0
 def __init__(self,
              inplanes,
              planes,
              stride=1,
              dilation=1,
              downsample=None,
              style='pytorch',
              with_cp=False):
     """Init BasicBlock."""
     super(BasicBlock, self).__init__()
     self.expansion = 1
     self.norm1 = ops.BatchNorm2d(planes)
     self.norm2 = ops.BatchNorm2d(planes)
     self.conv1 = ops.Conv2d(inplanes,
                             planes,
                             3,
                             stride=stride,
                             padding=dilation,
                             dilation=dilation,
                             bias=False)
     self.conv2 = ops.Conv2d(planes, planes, 3, padding=1, bias=False)
     self.relu = ops.Relu(inplace=True)
     if stride > 1 or downsample is not None:
         conv_layer = ops.Conv2d(inplanes,
                                 planes * self.expansion,
                                 kernel_size=1,
                                 stride=stride,
                                 bias=False)
         norm_layer = ops.BatchNorm2d(planes)
         self.downsample = Sequential(conv_layer, norm_layer)
     else:
         self.downsample = None
     self.inplanes = inplanes
     self.planes = planes
     self.stride = stride
     self.dilation = dilation
     self.style = style
     assert not with_cp
Esempio n. 23
0
def _transsorm_op(init_layer):
    """Transform the torch op to Vega op."""
    if isinstance(init_layer, nn.Conv2d):
        in_channels = init_layer.in_channels
        out_channels = init_layer.out_channels
        kernel_size = init_layer.kernel_size
        stride = init_layer.stride
        padding = init_layer.padding
        # bias = init_layer.bias
        new_layer = ops.Conv2d(in_channels=in_channels,
                               out_channels=out_channels,
                               kernel_size=kernel_size,
                               stride=stride,
                               padding=padding)
    elif isinstance(init_layer, nn.BatchNorm2d):
        num_features = init_layer.num_features
        new_layer = ops.BatchNorm2d(num_features=num_features)
    elif isinstance(init_layer, nn.ReLU):
        new_layer = ops.Relu()
    elif isinstance(init_layer, nn.MaxPool2d):
        kernel_size = init_layer.kernel_size
        stride = init_layer.stride
        padding = init_layer.padding
        new_layer = ops.MaxPool2d(kernel_size=kernel_size,
                                  stride=stride,
                                  padding=padding)
    elif isinstance(init_layer, nn.AvgPool2d):
        kernel_size = init_layer.kernel_size
        stride = init_layer.stride
        padding = init_layer.padding
        new_layer = ops.AvgPool2d(kernel_size=kernel_size,
                                  stride=stride,
                                  padding=padding)
    elif isinstance(init_layer, nn.AdaptiveAvgPool2d):
        output_size = init_layer.output_size
        new_layer = ops.AdaptiveAvgPool2d(output_size=output_size)
    elif isinstance(init_layer, nn.Linear):
        in_features = init_layer.in_features
        out_features = init_layer.out_features
        # use_bias = init_layer.bias
        new_layer = ops.Linear(in_features=in_features,
                               out_features=out_features)
    elif isinstance(init_layer, nn.Dropout):
        prob = init_layer.p
        inplace = init_layer.inplace
        new_layer = ops.Dropout(prob=prob, inplace=inplace)
    else:
        raise ValueError("The op {} is not supported.".format(
            type(init_layer)))
    return new_layer
Esempio n. 24
0
 def __init__(self,
              in_channels,
              out_channels,
              kernel_size,
              stride=1,
              padding=0,
              dilation=1,
              groups=1,
              bias='auto',
              activation='relu',
              inplace=True,
              activate_last=True):
     """Init Conv Module with Normalization."""
     super(ConvModule, self).__init__()
     self.activation = activation
     self.inplace = inplace
     self.activate_last = activate_last
     self.with_norm = True
     self.with_activatation = activation is not None
     if bias == 'auto':
         bias = False if self.with_norm else True
     self.with_bias = bias
     self.conv = ops.Conv2d(in_channels,
                            out_channels,
                            kernel_size,
                            stride=stride,
                            padding=padding,
                            dilation=dilation,
                            groups=groups,
                            bias=bias)
     self.in_channels = self.conv.in_channels
     self.out_channels = self.conv.out_channels
     self.kernel_size = self.conv.kernel_size
     self.stride = self.conv.stride
     self.padding = self.conv.padding
     self.dilation = self.conv.dilation
     self.transposed = self.conv.transposed
     self.output_padding = self.conv.output_padding
     self.groups = self.conv.groups
     if self.with_norm:
         norm_channels = out_channels if self.activate_last else in_channels
         self.norm = ops.BatchNorm2d(norm_channels)
     if self.with_activatation:
         if self.activation not in ['relu']:
             raise ValueError('{} is currently not supported.'.format(
                 self.activation))
         if self.activation == 'relu':
             self.activate = ops.Relu(inplace=inplace)
Esempio n. 25
0
    def __init__(self, inchannel, outchannel, expansion, stride=1):
        """Create ShortCut layer.

        :param inchannel: input channel.
        :type inchannel: int
        :param outchannel: output channel.
        :type outchannel: int
        :param expansion: expansion
        :type expansion: int
        :param stride: the number to jump, default 1
        :type stride: int
        """
        super(ShortCut, self).__init__()
        if stride != 1 or inchannel != outchannel * expansion:
            self.conv1 = ops.Conv2d(in_channels=inchannel, out_channels=outchannel * expansion, kernel_size=1,
                                    stride=stride, bias=False)
            self.batch = ops.BatchNorm2d(num_features=outchannel * expansion)
Esempio n. 26
0
def build_norm_layer(features, norm_type='BN', **kwargs):
    """Build norm layers according to their type.

    :param features: input tensor.
    :param norm_type: type of norm layer.
    :param **kwargs: other optional parameters.
    """
    if norm_type == 'BN':
        return ops.BatchNorm2d(features, **kwargs)
    elif norm_type == 'GN':
        assert 'num_groups' in kwargs.keys(
        ), 'num_groups is required for group normalization'
        num_groups = kwargs.pop('num_groups')
        return ops.GroupNorm(num_groups, features, **kwargs)
    elif norm_type == 'Sync':
        return ops.SyncBatchNorm(features, **kwargs)
    else:
        raise ValueError('norm type {} is not defined'.format(norm_type))
Esempio n. 27
0
 def __init__(self,
              C_in,
              C_out,
              kernel_size,
              stride,
              padding,
              Conv2d='Conv2d',
              affine=True,
              use_relu6=False,
              norm_layer='BN',
              has_bn=True,
              has_relu=True,
              **kwargs):
     """Construct ConvBnRelu class."""
     super(ConvBnRelu, self).__init__()
     if Conv2d == 'Conv2d':
         self.conv2d = ops.Conv2d(C_in,
                                  C_out,
                                  kernel_size,
                                  stride=stride,
                                  padding=padding,
                                  bias=False)
     elif Conv2d == 'ConvWS2d':
         self.conv2d = ops.ConvWS2d(C_in,
                                    C_out,
                                    kernel_size,
                                    stride=stride,
                                    padding=padding,
                                    bias=False)
     if has_bn:
         if norm_layer == 'BN':
             self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine)
         elif norm_layer == 'GN':
             num_groups = kwargs.pop('num_groups')
             self.batch_norm2d = ops.GroupNorm(num_groups,
                                               C_out,
                                               affine=affine)
         elif norm_layer == 'Sync':
             self.batch_norm2d = ops.SyncBatchNorm(C_out, affine=affine)
     if has_relu:
         if use_relu6:
             self.relu = ops.Relu6(inplace=False)
         else:
             self.relu = ops.Relu(inplace=False)
Esempio n. 28
0
def make_res_layer(block,
                   inplanes,
                   planes,
                   blocks,
                   stride=1,
                   dilation=1,
                   style='pytorch',
                   with_cp=False):
    """Build resnet layer."""
    downsample = None
    if stride != 1 or inplanes != planes * block.expansion:
        conv_layer = ops.Conv2d(inplanes,
                                planes * block.expansion,
                                kernel_size=1,
                                stride=stride,
                                bias=False)
        norm_layer = ops.BatchNorm2d(planes * block.expansion)
        downsample = Sequential(conv_layer, norm_layer)
    layers = []
    layers.append(
        block(inplanes=inplanes,
              planes=planes,
              stride=stride,
              dilation=dilation,
              downsample=downsample,
              style=style,
              with_cp=with_cp))
    inplanes = planes * block.expansion
    for i in range(1, blocks):
        layers.append(
            block(inplanes=inplanes,
                  planes=planes,
                  stride=1,
                  dilation=dilation,
                  style=style,
                  with_cp=with_cp))
    return Sequential(*layers)
Esempio n. 29
0
     3, stride=stride, padding=1, count_include_pad=False),
 'max_pool_3x3': lambda C, stride, affine, repeats=1: ops.MaxPool2d(
     3, stride=stride, padding=1),
 'global_average_pool': lambda C, stride, affine, repeats=1: Seq(GAPConv1x1(C, C)),
 'skip_connect': lambda C, stride, affine, repeats=1: ops.Identity() if stride == 1 else FactorizedReduce(
     C, C, affine=affine),
 'sep_conv_3x3': lambda C, stride, affine, repeats=1: SeparatedConv(C, C, 3, stride, 1, affine=affine),
 'sep_conv_5x5': lambda C, stride, affine, repeats=1: SeparatedConv(C, C, 5, stride, 2, affine=affine),
 'sep_conv_7x7': lambda C, stride, affine, repeats=1: SeparatedConv(C, C, 7, stride, 3, affine=affine),
 'dil_conv_3x3': lambda C, stride, affine, repeats=1: DilConv(C, C, 3, stride, 2, 2, affine=affine),
 'dil_conv_5x5': lambda C, stride, affine, repeats=1: DilConv(C, C, 5, stride, 4, 2, affine=affine),
 'conv_7x1_1x7': lambda C, stride, affine, repeats=1: Seq(
     ops.Relu(inplace=False),
     ops.Conv2d(C, C, (1, 7), stride=(1, stride), padding=(0, 3), bias=False),
     ops.Conv2d(C, C, (7, 1), stride=(stride, 1), padding=(3, 0), bias=False),
     ops.BatchNorm2d(C, affine=affine)),
 'conv1x1': lambda C, stride, affine, repeats=1: Seq(
     conv1X1(C, C, stride=stride),
     ops.BatchNorm2d(C, affine=affine),
     ops.Relu(inplace=False)),
 'conv3x3': lambda C, stride, affine, repeats=1: Seq(
     conv3x3(C, C, stride=stride),
     ops.BatchNorm2d(C, affine=affine),
     ops.Relu(inplace=False)),
 'conv5x5': lambda C, stride, affine, repeats=1: Seq(
     conv5x5(C, C, stride=stride),
     ops.BatchNorm2d(C, affine=affine),
     ops.Relu(inplace=False)),
 'conv7x7': lambda C, stride, affine, repeats=1: Seq(
     conv7x7(C, C, stride=stride),
     ops.BatchNorm2d(C, affine=affine),
Esempio n. 30
0
 lambda C, stride, affine, repeats=1: SeparatedConv(
     C, C, 7, stride, 3, affine=affine),
 'dil_conv_3x3':
 lambda C, stride, affine, repeats=1: DilConv(
     C, C, 3, stride, 2, 2, affine=affine),
 'dil_conv_5x5':
 lambda C, stride, affine, repeats=1: DilConv(
     C, C, 5, stride, 4, 2, affine=affine),
 'conv_7x1_1x7':
 lambda C, stride, affine, repeats=1: Seq(
     ops.Relu(inplace=False),
     ops.Conv2d(
         C, C, (1, 7), stride=(1, stride), padding=(0, 3), bias=False),
     ops.Conv2d(
         C, C, (7, 1), stride=(stride, 1), padding=(3, 0), bias=False),
     ops.BatchNorm2d(C, affine=affine)),
 'conv1x1':
 lambda C, stride, affine, repeats=1: Seq(conv1X1(C, C, stride=stride),
                                          ops.BatchNorm2d(C, affine=affine),
                                          ops.Relu(inplace=False)),
 'conv3x3':
 lambda C, stride, affine, repeats=1: Seq(conv3x3(C, C, stride=stride),
                                          ops.BatchNorm2d(C, affine=affine),
                                          ops.Relu(inplace=False)),
 'conv5x5':
 lambda C, stride, affine, repeats=1: Seq(conv5x5(C, C, stride=stride),
                                          ops.BatchNorm2d(C, affine=affine),
                                          ops.Relu(inplace=False)),
 'conv7x7':
 lambda C, stride, affine, repeats=1: Seq(conv7x7(C, C, stride=stride),
                                          ops.BatchNorm2d(C, affine=affine),