def __init__(self, inp, oup, stride, expend_ratio): super(InvertedResidual, self).__init__() self.stride = stride assert stride in [1, 2] hidden_dim = int(inp * expend_ratio) self.use_res_connect = self.stride == 1 and inp == oup if expend_ratio == 1: self.conv = nn.SequentialCell([ nn.Conv2d(hidden_dim, hidden_dim, 3, stride, group=hidden_dim), nn.BatchNorm2d(hidden_dim), nn.ReLU6(), nn.Conv2d(hidden_dim, oup, 1, 1), nn.BatchNorm2d(oup) ]) else: self.conv = nn.SequentialCell([ nn.Conv2d(inp, hidden_dim, 1, 1), nn.BatchNorm2d(hidden_dim), nn.ReLU6(), nn.Conv2d(hidden_dim, hidden_dim, 3, stride, group=hidden_dim), nn.BatchNorm2d(hidden_dim), nn.ReLU6(), nn.Conv2d(hidden_dim, oup, 1, 1), nn.BatchNorm2d(oup) ])
def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1, shared_conv=None): super(ConvBNReLU, self).__init__() padding = 0 in_channels = in_planes out_channels = out_planes if shared_conv is None: if groups == 1: conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad_mode='same', padding=padding) else: out_channels = in_planes conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad_mode='same', padding=padding, group=in_channels) layers = [conv, _bn(out_planes), nn.ReLU6()] else: layers = [shared_conv, _bn(out_planes), nn.ReLU6()] self.features = nn.SequentialCell(layers)
def __init__(self, input_channel, kernel_size): super(DepthwiseConv2dAndReLU6, self).__init__() weight_shape = [1, input_channel, kernel_size, kernel_size] from mindspore.common.initializer import initializer self.weight = Parameter(initializer('ones', weight_shape), name='weight') self.depthwise_conv = P.DepthwiseConv2dNative(channel_multiplier=1, kernel_size=(kernel_size, kernel_size)) self.relu6 = nn.ReLU6()
def __init__(self, inp, oup, stride, expand_ratio, last_relu=False): super(InvertedResidual, self).__init__() assert stride in [1, 2] hidden_dim = int(round(inp * expand_ratio)) self.use_res_connect = stride == 1 and inp == oup layers = [] if expand_ratio != 1: layers.append(ConvBNReLU(inp, hidden_dim, kernel_size=1)) layers.extend([ # dw ConvBNReLU(hidden_dim, hidden_dim, stride=stride, groups=hidden_dim), # pw-linear nn.Conv2d(hidden_dim, oup, kernel_size=1, stride=1, has_bias=False), _bn(oup), ]) self.conv = nn.SequentialCell(layers) self.add = P.TensorAdd() self.cast = P.Cast() self.last_relu = last_relu self.relu = nn.ReLU6()
def __init__(self, device_target, in_planes, out_planes, kernel_size=3, stride=1, groups=1): super(ConvBNReLU, self).__init__() padding = (kernel_size - 1) // 2 if groups == 1: conv = nn.Conv2d(in_planes, out_planes, kernel_size, stride, pad_mode='pad', padding=padding) else: if device_target == "Ascend": conv = DepthwiseConv(in_planes, kernel_size, stride, pad_mode='pad', pad=padding) elif device_target == "GPU": conv = nn.Conv2d(in_planes, out_planes, kernel_size, stride, group=in_planes, pad_mode='pad', padding=padding) layers = [conv, nn.BatchNorm2d(out_planes), nn.ReLU6()] self.features = nn.SequentialCell(layers)
def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1): padding = (kernel_size - 1) // 2 super(ConvBNReLU, self).__init__() if groups == 1: conv = nn.Conv2d(in_planes, out_planes, kernel_size, stride, pad_mode="pad", padding=padding, has_bias=False) else: conv = nn.Conv2d(in_planes, out_planes, kernel_size, stride, pad_mode="pad", padding=padding, has_bias=False, group=groups, weight_init=KaimingNormal(mode='fan_out')) layers = [ conv, nn.BatchNorm2d(out_planes).add_flags_recursive(fp32=True), nn.ReLU6() ] #, momentum=0.9 self.features = nn.SequentialCell(layers) self.in_planes = in_planes self.print = P.Print()
def _last_conv2d(in_channel, out_channel, kernel_size=3, stride=1, pad_mod='same', pad=0): in_channels = in_channel out_channels = in_channel depthwise_conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad_mode='same', padding=pad, group=in_channels) conv = _conv2d(in_channel, out_channel, kernel_size=1) return nn.SequentialCell([depthwise_conv, _bn(in_channel), nn.ReLU6(), conv])
def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1): super(ConvBNReLU, self).__init__() padding = (kernel_size - 1) // 2 in_channels = in_planes out_channels = out_planes if groups == 1: conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad_mode='pad', padding=padding) else: out_channels = in_planes conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, pad_mode='pad', padding=padding, group=in_channels) layers = [conv, nn.BatchNorm2d(out_planes), nn.ReLU6()] self.features = nn.SequentialCell(layers)
def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1): super(ConvBNReLU, self).__init__() padding = 0 if groups == 1: conv = nn.Conv2d(in_planes, out_planes, kernel_size, stride, pad_mode='same', padding=padding) else: conv = DepthwiseConv(in_planes, kernel_size, stride, pad_mode='same', pad=padding) layers = [conv, _bn(out_planes), nn.ReLU6()] self.features = nn.SequentialCell(layers)
def __init__(self, act_func): super(Activation, self).__init__() if act_func == 'relu': self.act = nn.ReLU() elif act_func == 'relu6': self.act = nn.ReLU6() elif act_func in ('hsigmoid', 'hard_sigmoid'): self.act = MyHSigmoid() # nn.HSigmoid() elif act_func in ('hswish', 'hard_swish'): self.act = nn.HSwish() else: raise NotImplementedError
def _last_conv2d(in_channel, out_channel, kernel_size=3, stride=1, pad_mod='same', pad=0): depthwise_conv = DepthwiseConv(in_channel, kernel_size, stride, pad_mode='same', pad=pad) conv = _conv2d(in_channel, out_channel, kernel_size=1) return nn.SequentialCell( [depthwise_conv, _bn(in_channel), nn.ReLU6(), conv])
def __init__(self, in_planes, out_planes, kernel_size=3, stride=1): super(FirstQuantLayer, self).__init__() padding = (kernel_size - 1) // 2 in_channels = in_planes out_channels = out_planes conv_inner = nn.Conv2dBnFoldQuantOneConv(in_channels, out_channels, kernel_size=kernel_size, stride=stride, pad_mode='pad', padding=padding, quant_config=quant_config, quant_dtype=QuantDtype.INT8) activation = nn.ActQuant(activation=nn.ReLU6(), quant_config=quant_config, quant_dtype=QuantDtype.INT8) self.features = nn.SequentialCell([conv_inner, activation])
def _last_conv2d(in_channel, out_channel, kernel_size=3, stride=1, pad_mod='same', pad=0): depthwise_conv = nn.Conv2d(in_channel, in_channel, kernel_size, stride, pad_mode='same', padding=pad, has_bias=False, group=in_channel, weight_init='ones') conv = _conv2d(in_channel, out_channel, kernel_size=1) return nn.SequentialCell( [depthwise_conv, _bn(in_channel), nn.ReLU6(), conv])
def __init__(self, inplace=False): super(Hswish, self).__init__() self.relu6 = nn.ReLU6() self.relu6.update_parameters_name("relu6_" + uuid.uuid1().hex[:8] + ".")
def __init__(self): super(MyHSigmoid, self).__init__() self.relu6 = nn.ReLU6()
def __init__(self, inplace=False): super(Relu6, self).__init__() self.relu6 = nn.ReLU6() self.relu6.update_parameters_name("relu6_" + str(np.random.rand()) + ".")