def __init__(self, in_chnls, cardinality, group_depth, stride): super(ResNeXt_Block, self).__init__() self.group_chnls = cardinality * group_depth self.conv1 = BN_Conv2d(in_chnls, self.group_chnls, 1, stride=1, padding=0) self.conv2 = BN_Conv2d(self.group_chnls, self.group_chnls, 3, stride=stride, padding=1, groups=cardinality) self.conv3 = ops.Conv2d(self.group_chnls, self.group_chnls * 2, 1, stride=1, padding=0) self.bn = ops.BatchNorm2d(self.group_chnls * 2) if stride != 1 or in_chnls != self.group_chnls * 2: self.short_cut = Sequential( ops.Conv2d(in_chnls, self.group_chnls * 2, 1, stride, bias=False), ops.BatchNorm2d(self.group_chnls * 2)) else: self.short_cut = None
def __init__(self, inp, hidden_dim, oup, kernel_size, stride, use_se=False, use_hs=False, momentum=0.1): """Init InvertedResidualSE.""" super(InvertedResidualSE, self).__init__() self.identity = stride == 1 and inp == oup self.ir_block = Sequential( # pw ops.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False), ops.BatchNorm2d(hidden_dim, momentum=momentum), ops.Hswish() if use_hs else ops.Relu(inplace=True), # dw ops.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim, bias=False), ops.BatchNorm2d(hidden_dim, momentum=momentum), # Squeeze-and-Excite SELayer(hidden_dim) if use_se else Sequential(), ops.Hswish() if use_hs else ops.Relu(inplace=True), # pw-linear ops.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), ops.BatchNorm2d(oup, momentum=momentum), )
def __init__(self, C_in, C_out, kernel_size, stride, padding, dilation=1, affine=True, repeats=1): """Construct SepConv class.""" super(SeparatedConv, self).__init__() for idx in range(repeats): self.add_module( '{}_conv1'.format(idx), ops.Conv2d(C_in, C_in, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=C_in, bias=False)) self.add_module( '{}_conv2'.format(idx), ops.Conv2d(C_in, C_in, kernel_size=1, padding=0, bias=False)) self.add_module('{}_batch'.format(idx), ops.BatchNorm2d(C_in, affine=affine)) self.add_module('{}_relu'.format(idx), ops.Relu(inplace=False))
def __init__(self, C_in, C_out, kernel_size, stride, padding, dilation, affine=True): """Construct SepConv class.""" super(DilConv, self).__init__() self.relu = ops.Relu(inplace=False) self.conv1 = ops.Conv2d(C_in, C_in, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=C_in, bias=False) self.conv2 = ops.Conv2d(C_in, C_out, kernel_size=1, padding=0, bias=False) self.batch = ops.BatchNorm2d(C_out, affine=affine)
def __init__(self, **desc): """Initialize.""" super(SimpleCnn, self).__init__() desc = Config(**desc) self.num_class = desc.num_class self.fp16 = desc.get('fp16', False) self.channels = desc.channels self.conv1 = ops.Conv2d(3, 32, padding=1, kernel_size=3) self.pool1 = ops.MaxPool2d(2, stride=2) self.blocks = self._blocks(self.channels, desc.blocks) self.pool2 = ops.MaxPool2d(2, stride=2) self.conv2 = ops.Conv2d(self.channels, 64, padding=1, kernel_size=3) self.global_conv = ops.Conv2d(64, 64, kernel_size=8, padding=0) self.view = ops.View() self.fc = ops.Linear(64, self.num_class)
def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, style='pytorch', with_cp=False): """Init Bottleneck.""" super(Bottleneck, self).__init__() assert style in ['pytorch', 'caffe'] self.inplanes = inplanes self.planes = planes self.stride = stride self.dilation = dilation self.style = style self.with_cp = with_cp self.norm1 = ops.BatchNorm2d(planes) self.norm2 = ops.BatchNorm2d(planes) self.norm3 = ops.BatchNorm2d(planes * self.expansion) self.conv1 = ops.Conv2d(inplanes, planes, kernel_size=1, bias=False) self.with_modulated_dcn = False self.conv2 = ops.Conv2d( planes, planes, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, bias=False, ) self.conv3 = ops.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False) self.relu = ops.Relu(inplace=True) if stride > 1 or downsample is not None: conv_layer = ops.Conv2d(inplanes, planes * self.expansion, kernel_size=1, stride=stride, bias=False) norm_layer = ops.BatchNorm2d(planes * self.expansion) self.downsample = Sequential(conv_layer, norm_layer) else: self.downsample = None
def call(self, x, **kwargs): """Forward compute of resnet for detection.""" x = self.conv1(x) x = self.norm1(x) x = self.relu(x) x = self.maxpool(x) if self.parallel_code is None: outs = self.res_layers_seq(x) else: outs = [] subset_lists = [0 for i in range(self.subset_limit)] parallel_numbers = self.parallel_code.split('-') for key, layer in enumerate(self.res_layers): size = x.size()[2:] x = layer(x) x_l_k = x for k in range(int(parallel_numbers[key])): x_l_k = ops.Conv2d( self.channels[key + 1], self.channels[key], kernel_size=1).cuda()(x_l_k) x_l_k = ops.InterpolateScale( size=size, mode='nearest')(x_l_k) x_l_k = layer(subset_lists[k] + x_l_k) subset_lists[k] = x_l_k size = x_l_k.size()[2:] outs.append(x_l_k) return tuple(outs)
def build_conv_layer(in_channels, out_channels, kernel_size, bias, Conv2d, padding=0, groups=1, stride=1): """Build conv layers according to their type. :param features: input tensor. :param norm_type: type of norm layer. :param **kwargs: other optional parameters. """ if Conv2d == 'Conv2d': return ops.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=groups, bias=bias) elif Conv2d == 'ConvWS2d': return ops.ConvWS2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=groups, bias=bias)
def __init__(self, C_in, C_out, kernel_size, stride, padding, bias=False, momentum=0.1, affine=True, activation='relu', inplace=True): """Construct ConvBnAct class.""" super(ConvBnAct, self).__init__() self.conv2d = ops.Conv2d(C_in, C_out, kernel_size, stride, padding, bias=bias) self.batch_norm2d = ops.BatchNorm2d(C_out, affine=affine, momentum=momentum) if activation == 'hswish': self.act = ops.Hswish(inplace=inplace) elif activation == 'hsigmoid': self.act = ops.Hsigmoid(inplace=inplace) elif activation == 'relu6': self.act = ops.Relu6(inplace=inplace) else: self.act = ops.Relu(inplace=inplace)
def conv1X1(inchannel, outchannel, stride=1): """Create conv1X1 layer.""" return ops.Conv2d(inchannel, outchannel, kernel_size=1, stride=stride, bias=False)
def _make_stem_layer(self): """Make stem layer.""" self.conv1 = ops.Conv2d( 3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) self.norm1 = ops.BatchNorm2d(64) self.relu = ops.Relu(inplace=True) self.maxpool = ops.MaxPool2d(kernel_size=3, stride=2, padding=1)
def __init__(self, inchannel, outchannel, expansion, stride=1, norm_layer={"norm_type": 'BN'}): """Create ShortCut layer. :param inchannel: input channel. :type inchannel: int :param outchannel: output channel. :type outchannel: int :param expansion: expansion :type expansion: int :param stride: the number to jump, default 1 :type stride: int """ super(ShortCut, self).__init__() if stride != 1 or inchannel != outchannel * expansion: self.conv1 = ops.Conv2d(in_channels=inchannel, out_channels=outchannel * expansion, kernel_size=1, stride=stride, bias=False) self.batch = build_norm_layer(features=outchannel * expansion, **norm_layer) else: self.identity = ops.Identity()
def __init__(self, C, num_classes, input_size): """Init AuxiliaryHead.""" super(AuxiliaryHead, self).__init__() stride = input_size - 5 self.relu1 = ops.Relu(inplace=True) self.avgpool1 = ops.AvgPool2d(5, stride=stride, padding=0, count_include_pad=False) self.conv1 = ops.Conv2d(C, 128, 1, bias=False) self.batchnorm1 = ops.BatchNorm2d(128) self.relu2 = ops.Relu(inplace=True) self.conv2 = ops.Conv2d(128, 768, 2, bias=False) self.batchnorm2 = ops.BatchNorm2d(768) self.relu3 = ops.Relu(inplace=True) self.view = ops.View() self.classifier = ops.Linear(768, num_classes)
def __init__(self, in_channels, out_channels, kernel_size, stride, padding, dilation=1, groups=1, bias=False): super(BN_Conv2d, self).__init__() self.seq = Sequential( ops.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias), ops.BatchNorm2d(out_channels), ops.Relu() )
def __init__(self, in_channels=[64, 128, 256, 512], out_channels=256, code=None, weight_file=None, weights_prefix='head.backbone.1'): """Init FPN. :param desc: config dict """ super(ParallelFPN, self).__init__() self.code = code self.lateral_convs = ModuleList() self.fpn_convs = ModuleList() self.weight_file = weight_file self.weights_prefix = weights_prefix for in_channel in in_channels: # l_conv = Sequential(ops.Conv2d(in_channel, out_channels, 1), ops.BatchNorm2d(out_channels)) # fpn_conv = Sequential(ops.Conv2d(out_channels, out_channels, 3, padding=1), ops.BatchNorm2d(out_channels)) self.lateral_convs.append(ops.Conv2d(in_channel, out_channels, 1, bias=False)) self.fpn_convs.append(ops.Conv2d(out_channels, out_channels, 3, padding=1, bias=False))
def __init__(self, in_channels=1, out_channels=16, kernel_size=(3, 3)): super(TextConvBlock, self).__init__() self.conv1 = ops.Conv2d(in_channels, out_channels=out_channels, kernel_size=kernel_size) self.squeeze = ops.Squeeze(3) self.relu = ops.Relu() self.max_pool = ops.GlobalMaxPool1d() self.squeeze2 = ops.Squeeze(-1)
def conv7x7(inchannel, outchannel, stride=1, bias=False, dilation=1): """Create Convolution 7x7.""" return ops.Conv2d(inchannel, outchannel, kernel_size=7, stride=stride, padding=3, dilation=dilation, bias=bias)
def conv5x5(inchannel, outchannel, stride=1, bias=False, dilation=1): """Create Convolution 5x5.""" return ops.Conv2d(inchannel, outchannel, kernel_size=5, stride=stride, padding=2, dilation=dilation, bias=bias)
def conv3x3(inchannel, outchannel, groups=1, stride=1, bias=False, dilation=1): """Create conv3x3 layer.""" return ops.Conv2d(inchannel, outchannel, kernel_size=3, stride=stride, padding=dilation, groups=groups, bias=bias, dilation=dilation)
def __init__(self, inp, oup, stride, kernel=3, expand_ratio=1): """Construct InvertedResidual class. :param inp: input channel :param oup: output channel :param stride: stride :param kernel: kernel :param expand_ratio: channel increase multiplier """ super(InvertedConv, self).__init__() hidden_dim = round(inp * expand_ratio) conv = [] if expand_ratio > 1: conv = [ ops.Conv2d(in_channels=inp, out_channels=hidden_dim, kernel_size=1, stride=1, padding=0, bias=False), ops.BatchNorm2d(num_features=hidden_dim), ops.Relu6(inplace=True) ] conv = conv + [ ops.Conv2d(in_channels=hidden_dim, out_channels=hidden_dim, kernel_size=kernel, stride=stride, padding=kernel // 2, groups=hidden_dim, bias=False, depthwise=True), ops.BatchNorm2d(num_features=hidden_dim), ops.Relu6(inplace=True), ops.Conv2d(in_channels=hidden_dim, out_channels=oup, kernel_size=1, stride=1, padding=0, bias=False), ops.BatchNorm2d(num_features=oup) ] self.models = Sequential(*conv)
def __init__(self, in_planes, planes, inner_plane, stride=1): """Create BottleConv layer.""" super(PruneBasicConv, self).__init__() self.conv1 = ops.Conv2d(in_planes, inner_plane, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = ops.BatchNorm2d(inner_plane) self.relu = ops.Relu() self.conv2 = ops.Conv2d(inner_plane, planes, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = ops.BatchNorm2d(planes) self.relu2 = ops.Relu()
def conv33(in_channel, out_channel, stride=1, groups=1, bias=False): """Conv 3*3.""" if groups != 0 and in_channel % groups != 0: raise ValueError('In channel "{}" is not a multiple of groups: "{}"'.format( in_channel, groups)) if out_channel % groups != 0: raise ValueError('Out channel "{}" is not a multiple of groups: "{}"'.format( out_channel, groups)) return ops.Conv2d(in_channels=in_channel, out_channels=out_channel, kernel_size=3, stride=stride, padding=1, groups=groups, bias=bias)
def __init__(self, C_in, C_out, kernel_size, stride, padding, affine=True): """Init ReLUConvBN.""" super(ReLUConvBN, self).__init__() self.relu = ops.Relu(inplace=False) self.conv = ops.Conv2d(C_in, C_out, kernel_size, stride=stride, padding=padding, bias=False) self.bn = ops.BatchNorm2d(C_out, affine=affine)
def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, style='pytorch', with_cp=False): """Init BasicBlock.""" super(BasicBlock, self).__init__() self.expansion = 1 self.norm1 = ops.BatchNorm2d(planes) self.norm2 = ops.BatchNorm2d(planes) self.conv1 = ops.Conv2d(inplanes, planes, 3, stride=stride, padding=dilation, dilation=dilation, bias=False) self.conv2 = ops.Conv2d(planes, planes, 3, padding=1, bias=False) self.relu = ops.Relu(inplace=True) if stride > 1 or downsample is not None: conv_layer = ops.Conv2d(inplanes, planes * self.expansion, kernel_size=1, stride=stride, bias=False) norm_layer = ops.BatchNorm2d(planes) self.downsample = Sequential(conv_layer, norm_layer) else: self.downsample = None self.inplanes = inplanes self.planes = planes self.stride = stride self.dilation = dilation self.style = style assert not with_cp
def _transform_op(init_layer): """Transform the torch op to Vega op.""" if isinstance(init_layer, nn.Conv2d): in_channels = init_layer.in_channels out_channels = init_layer.out_channels kernel_size = init_layer.kernel_size[0] stride = init_layer.stride padding = init_layer.padding # bias = init_layer.bias new_layer = ops.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, bias=False) elif isinstance(init_layer, nn.BatchNorm2d): num_features = init_layer.num_features new_layer = ops.BatchNorm2d(num_features=num_features) elif isinstance(init_layer, nn.ReLU): new_layer = ops.Relu() elif isinstance(init_layer, nn.MaxPool2d): kernel_size = init_layer.kernel_size stride = init_layer.stride # padding = init_layer.padding new_layer = ops.MaxPool2d(kernel_size=kernel_size, stride=stride) elif isinstance(init_layer, nn.AvgPool2d): kernel_size = init_layer.kernel_size stride = init_layer.stride padding = init_layer.padding new_layer = ops.AvgPool2d(kernel_size=kernel_size, stride=stride, padding=padding) elif isinstance(init_layer, P.ReduceMean): new_layer = ops.AdaptiveAvgPool2d() elif isinstance(init_layer, nn.Dense): in_features = init_layer.in_channels out_features = init_layer.out_channels # use_bias = init_layer.bias new_layer = ops.Linear(in_features=in_features, out_features=out_features) elif isinstance(init_layer, nn.Dropout): prob = init_layer.p inplace = init_layer.inplace new_layer = ops.Dropout(prob=prob, inplace=inplace) elif isinstance(init_layer, nn.Flatten): new_layer = ops.View() else: raise ValueError("The op {} is not supported.".format( type(init_layer))) return new_layer
def __init__(self, C_in, C_out, affine=True): """Construct FactorizedReduce class. :param C_in: input channel :param C_out: output channel :param affine: whether to use affine in BN """ super(FactorizedReduce, self).__init__() assert C_out % 2 == 0 self.relu = ops.Relu(inplace=False) self.conv_1 = ops.Conv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False) self.conv_2 = ops.Conv2d(C_in, C_out // 2, 1, stride=2, padding=0, bias=False) self.bn = ops.BatchNorm2d(C_out, affine=affine)
def _blocks(self, out_channels, desc_blocks): blocks = ModuleList() in_channels = 32 for i in range(desc_blocks): blocks.append( Sequential( ops.Conv2d(in_channels, out_channels, padding=1, kernel_size=3), ops.BatchNorm2d(out_channels), ops.Relu(inplace=True), )) in_channels = out_channels return blocks
def __init__(self, init_plane): """Create SmallInputInitialBlock layer. :param init_plane: input channel. :type init_plane: int """ super(SmallInputInitialBlock, self).__init__() self.conv = ops.Conv2d(in_channels=3, out_channels=init_plane, kernel_size=3, stride=1, padding=1, bias=False) self.bn = ops.BatchNorm2d(num_features=init_plane) self.relu = ops.Relu()
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias='auto', activation='relu', inplace=True, activate_last=True): """Init Conv Module with Normalization.""" super(ConvModule, self).__init__() self.activation = activation self.inplace = inplace self.activate_last = activate_last self.with_norm = True self.with_activatation = activation is not None if bias == 'auto': bias = False if self.with_norm else True self.with_bias = bias self.conv = ops.Conv2d(in_channels, out_channels, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias) self.in_channels = self.conv.in_channels self.out_channels = self.conv.out_channels self.kernel_size = self.conv.kernel_size self.stride = self.conv.stride self.padding = self.conv.padding self.dilation = self.conv.dilation self.transposed = self.conv.transposed self.output_padding = self.conv.output_padding self.groups = self.conv.groups if self.with_norm: norm_channels = out_channels if self.activate_last else in_channels self.norm = ops.BatchNorm2d(norm_channels) if self.with_activatation: if self.activation not in ['relu']: raise ValueError('{} is currently not supported.'.format( self.activation)) if self.activation == 'relu': self.activate = ops.Relu(inplace=inplace)
def __init__(self, init_plane): """Create InitialBlock layer. :param init_plane: input channel. :type init_plane: int """ super(InitialBlock, self).__init__() self.conv = ops.Conv2d(in_channels=3, out_channels=init_plane, kernel_size=7, stride=2, padding=3, bias=False) self.batch = ops.BatchNorm2d(num_features=init_plane) self.relu = ops.Relu() self.maxpool2d = ops.MaxPool2d(kernel_size=3, stride=2, padding=1)