def __init__(self, in_channels, channels, groups, stride=1, se_reduction=4, drop_path=0.2): super().__init__() branch1 = [ Norm(in_channels), Conv2d(in_channels, channels, kernel_size=1, norm='default', act='default'), *([Pool2d(3, 2)] if stride != 1 else []), Conv2d(channels, channels, kernel_size=3, groups=groups, norm='default', act='default'), *([SELayer(channels, se_reduction, groups)] if se_reduction else []), Conv2d(channels, channels, kernel_size=1, norm='default'), *([DropPath(drop_path)] if drop_path and stride == 1 else []), ] self.branch1 = Sequential(branch1) self.branch2 = Shortcut(in_channels, channels, stride)
def __init__(self, in_channels, channels, stride, zero_init_residual=True, reduction=16): super().__init__() out_channels = channels * self.expansion self.conv1 = Conv2d(in_channels, channels, kernel_size=1, norm='def', act='def') self.conv2 = Conv2d(channels, channels, kernel_size=3, stride=stride, norm='def', act='def') self.conv3 = Conv2d(channels, out_channels, kernel_size=1) self.bn3 = Norm(out_channels, gamma_init='zeros' if zero_init_residual else 'ones') self.se = SELayer(out_channels, reduction=reduction) if stride != 1 or in_channels != out_channels: shortcut = [] if stride != 1: shortcut.append(Pool2d(2, 2, type='avg')) shortcut.append( Conv2d(in_channels, out_channels, kernel_size=1, norm='def')) self.shortcut = Sequential(shortcut) else: self.shortcut = Identity() self.act = Act()
def __init__(self, in_channels, out_channels, stride, dropout, use_se=False): super().__init__() self.use_se = use_se self.norm1 = Norm(in_channels) self.act1 = Act() self.conv1 = Conv2d(in_channels, out_channels, kernel_size=3, stride=stride) self.norm2 = Norm(out_channels) self.act2 = Act() self.dropout = Dropout(dropout) if dropout else Identity() self.conv2 = Conv2d(out_channels, out_channels, kernel_size=3) if self.use_se: self.se = SELayer(out_channels, reduction=8) if stride != 1: assert in_channels != out_channels self.shortcut = Sequential([ Pool2d(2, 2, type='avg'), Conv2d(in_channels, out_channels, kernel_size=1, norm='def'), ]) else: self.shortcut = Identity()
def __init__(self, in_channels, out_channels, stride, groups, use_se): super().__init__() self.use_se = use_se self.conv1 = Conv2d(in_channels, out_channels, kernel_size=1, norm='def', act='def') self.conv2 = Conv2d(out_channels, out_channels, kernel_size=3, stride=stride, groups=groups, norm='def', act='def') if self.use_se: self.se = SELayer(out_channels, 4) self.conv3 = Sequential( Conv2d(out_channels, out_channels, kernel_size=1, bias=False), Norm(out_channels, gamma_init='zeros')) if stride != 1 or in_channels != out_channels: self.shortcut = Conv2d(in_channels, out_channels, kernel_size=1, norm='def') else: self.shortcut = Identity() self.act = Act()
def __init__(self, in_channels, out_channels, kernel_size, stride, expand_ratio, se_ratio, drop_connect): super().__init__() self._has_se = se_ratio is not None and 0 < se_ratio <= 1 channels = in_channels * expand_ratio self.expand = Conv2d(in_channels, channels, 1, norm='def', act='def') if expand_ratio != 1 else Identity() self.depthwise = Conv2d(channels, channels, kernel_size, stride, groups=channels, padding='SAME', norm='def', act='def') if self._has_se: self.se = SELayer(channels, se_channels=int(in_channels * se_ratio), min_se_channels=1) self.project = Conv2d(channels, out_channels, 1, norm='def') self._use_residual = in_channels == out_channels and stride == 1 if self._use_residual: self.drop_connect = DropPath( drop_connect) if drop_connect else Identity()
def __init__(self, in_channels, use_se): super().__init__() c = in_channels // 2 branch2 = [ Conv2d(c, c, kernel_size=1, norm='def', act='def'), Conv2d(c, c, kernel_size=3, groups=c, norm='def'), Conv2d(c, c, kernel_size=1, norm='def', act='def') ] if use_se: branch2.append(SELayer(c, reduction=2)) self.branch2 = Sequential(branch2)
def __init__(self, in_channels, out_channels, dropout, reduction): layers = [ Norm(in_channels), Act(), Conv2d(in_channels, out_channels, kernel_size=3), Norm(out_channels), Act(), Conv2d(out_channels, out_channels, kernel_size=3), ] if dropout: layers.insert(5, Dropout(dropout)) layers.append(SELayer(out_channels, reduction=reduction)) super().__init__(layers)
def __init__(self, in_channels, out_channels, dropout, use_se, drop_path): layers = [ Norm(in_channels), Act(), Conv2d(in_channels, out_channels, kernel_size=3), Norm(out_channels), Act(), Conv2d(out_channels, out_channels, kernel_size=3), ] if dropout: layers.insert(5, Dropout(dropout)) if use_se: layers.append(SELayer(out_channels, reduction=8)) if drop_path: layers.append(DropPath(drop_path)) super().__init__(layers)
def __init__(self, in_channels, channels, stride, cardinality, base_width, reduction): super().__init__() out_channels = channels * 4 D = math.floor(channels * (base_width / 64)) C = cardinality self.conv1 = Conv2d(in_channels, D * C, kernel_size=1, norm='def', act='def') self.conv2 = Conv2d(D * C, D * C, kernel_size=3, stride=stride, groups=cardinality, norm='def', act='def') self.conv3 = Conv2d(D * C, out_channels, kernel_size=1, norm='def') self.se = SELayer(out_channels, reduction=reduction) self.shortcut = Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, norm='def') if in_channels != out_channels else Identity() self.act = Act()
def __init__(self, in_channels, out_channels, stride, dropout, reduction=8): super().__init__() self.norm1 = Norm(in_channels) self.act1 = Act() self.conv1 = Conv2d(in_channels, out_channels, kernel_size=3, stride=stride) self.norm2 = Norm(out_channels) self.act2 = Act() self.dropout = Dropout(dropout) if dropout else Identity() self.conv2 = Conv2d(out_channels, out_channels, kernel_size=3) self.se = SELayer(out_channels, reduction=reduction) self.shortcut = Conv2d(in_channels, out_channels, kernel_size=1, stride=stride)