def __init__(self, in_channels, out_channels, kernel_size, stride, expand_ratio, se_ratio=0.25, drop_rate=0.2): super().__init__() channels = in_channels * expand_ratio use_se = se_ratio is not None and 0 < se_ratio < 1 self.use_res_connect = stride == 1 and in_channels == out_channels layers = nn.Sequential() if expand_ratio != 1: layers.add_module( "expand", Conv2d(in_channels, channels, kernel_size=1, norm_layer='default', activation='swish')) layers.add_module( "dwconv", Conv2d(channels, channels, kernel_size, stride, groups=channels, norm_layer='default', activation='swish')) if use_se: layers.add_module( "se", SEModule(channels, int(in_channels * se_ratio))) layers.add_module( "project", Conv2d(channels, out_channels, kernel_size=1, norm_layer='default')) if self.use_res_connect and drop_rate: layers.add_module( "drop_path", DropPath(drop_rate)) self.layers = layers
def __init__(self, in_channels, out_channels, use_se, drop_path): super().__init__() self.bn1 = Norm(in_channels) self.nl1 = Act("default") self.conv1 = Conv2d(in_channels, out_channels, kernel_size=3) self.bn2 = Norm(out_channels) self.nl2 = Act("default") self.conv2 = Conv2d(out_channels, out_channels, kernel_size=3) if use_se: self.se = SEModule(out_channels, reduction=8) if drop_path: self.drop_path = DropPath(drop_path)