def __init__(self, inplanes, planes, stride=1, reduction=16): super(CifarSEBasicBlock, self).__init__() with self.init_scope(): self.conv1 = L.Convolution2D(inplanes, planes, ksize=3, stride=stride, pad=1, nobias=True) self.bn1 = L.BatchNormalization(planes) self.conv2 = L.Convolution2D(planes, planes, ksize=3, stride=1, pad=1, nobias=True) self.bn2 = L.BatchNormalization(planes) self.se = SELayer(planes, reduction) if inplanes != planes: self.downsample = Sequential( L.Convolution2D(inplanes, planes, ksize=1, stride=stride, nobias=True), L.BatchNormalization(planes)) else: self.downsample = lambda x: x
def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1, base_width=64, dilation=1, norm_layer=None, *, reduction=16): super(SEBottleneck, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) self.bn3 = nn.BatchNorm2d(planes * 4) self.relu = nn.ReLU(inplace=True) self.se = SELayer(planes * 4, reduction) self.downsample = downsample self.stride = stride
def __init__(self, inplanes, planes, stride=1, downsample=None, reduction=16): super(SEBasicBlock, self).__init__() self.conv1 = conv3x3(inplanes, planes, stride) self.bn1 = nn.BatchNorm2d(planes) self.relu = nn.ReLU(inplace=True) self.conv2 = conv3x3(planes, planes, 1) self.bn2 = nn.BatchNorm2d(planes) self.se = SELayer(planes, reduction) self.downsample = downsample self.stride = stride
def __init__(self, inplanes, planes, stride=1, reduction=16): super(CifarSEBasicBlock, self).__init__() self.conv1 = conv3x3(inplanes, planes, stride) self.bn1 = nn.BatchNorm2d(planes) self.relu = nn.ReLU(inplace=True) self.conv2 = conv3x3(planes, planes) self.bn2 = nn.BatchNorm2d(planes) self.se = SELayer(planes, reduction) if inplanes != planes: self.downsample = nn.Sequential(nn.Conv2d(inplanes, planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(planes)) else: self.downsample = lambda x: x self.stride = stride
def __init__(self, i): super(selayer, self).__init__() ks = [3, 3, 3, 3, 3, 3, 2] ps = [1, 1, 1, 1, 1, 1, 0] ss = [1, 1, 1, 1, 1, 1, 1] nm = [64, 128, 256, 256, 512, 512, 512] nIn = nc if i == 0 else nm[i - 1] nOut = nm[i] self.conv = nn.Conv2d(nIn, nOut, ks[i], ss[i], ps[i]) self.bn = nn.BatchNorm2d(nOut) self.relu = nn.ReLU(inplace=True) self.se = SELayer(nOut, 16)
class Bottleneck(nn.Module): expansion = 4 def __init__(self, inplanes, planes, stride=1, downsample=None, with_se=False): super(Bottleneck, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) self.bn3 = nn.BatchNorm2d(planes * 4) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.stride = stride #add by zc self.with_se = with_se self.se_module = SELayer(512) def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) out = self.relu(out) out = self.conv3(out) out = self.bn3(out) if self.downsample is not None: residual = self.downsample(x) if self.with_se: out = self.se_module.forward(out) out += residual out = self.relu(out) return out
def __init__(self, inplanes, planes, stride=1, downsample=None, with_se=False): super(Bottleneck, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) self.bn3 = nn.BatchNorm2d(planes * 4) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.stride = stride #add by zc self.with_se = with_se self.se_module = SELayer(512)
def __init__(self, in_planes, planes, stride=1, reduction=16): super(SEBottleneck, self).__init__() self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False) self.bn3 = nn.BatchNorm2d(self.expansion*planes) self.relu = nn.ReLU(inplace=True) self.se = SELayer(planes*self.expansion, reduction) self.shortcut = nn.Sequential() if stride != 1 or in_planes != self.expansion*planes: self.shortcut = nn.Sequential( nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(self.expansion*planes) )
def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1, base_width=64, dilation=1, norm_layer=None, *, reduction=16): super(SEBasicBlock, self).__init__() self.conv1 = conv3x3(inplanes, planes, stride) self.bn1 = nn.BatchNorm2d(planes) self.PReLU = nn.PReLU(inplace=True) self.conv2 = conv3x3(planes, planes, 1) self.bn2 = nn.BatchNorm2d(planes) self.se = SELayer(planes, reduction) self.downsample = downsample self.stride = stride
def __init__(self, num_classes, aux_logits=True, transform_input=False): super(SEInception3, self).__init__() model = Inception3(num_classes=num_classes, aux_logits=aux_logits, transform_input=transform_input) model.Mixed_5b.add_module("SELayer", SELayer(192)) model.Mixed_5c.add_module("SELayer", SELayer(256)) model.Mixed_5d.add_module("SELayer", SELayer(288)) model.Mixed_6a.add_module("SELayer", SELayer(288)) model.Mixed_6b.add_module("SELayer", SELayer(768)) model.Mixed_6c.add_module("SELayer", SELayer(768)) model.Mixed_6d.add_module("SELayer", SELayer(768)) model.Mixed_6e.add_module("SELayer", SELayer(768)) if aux_logits: model.AuxLogits.add_module("SELayer", SELayer(768)) model.Mixed_7a.add_module("SELayer", SELayer(768)) model.Mixed_7b.add_module("SELayer", SELayer(1280)) model.Mixed_7c.add_module("SELayer", SELayer(2048)) self.model = model