def __init__(self, inplanes, planes, stride=1, downsample=None, downstride=2, groups=1, base_width=64, dilation=1): super(Bottleneck, self).__init__() width = int(planes * (base_width / 64.)) * groups self.conv1 = conv1x1(inplanes, width) self.bn1 = nn.BatchNorm2d(width) self.conv2 = conv3x3(width, width, stride, groups, dilation) self.bn2 = nn.BatchNorm2d(width) self.conv3 = conv1x1(width, planes * self.expansion) self.bn3 = nn.BatchNorm2d(planes * self.expansion) self.relu = nn.ReLU(inplace=True) self.ca = ChannelAttention(planes * 4) self.sa = SpatialAttention() if downsample is True: self.downsample = nn.Sequential( conv1x1(inplanes, planes * self.expansion * 2, downstride), nn.BatchNorm2d(planes * self.expansion * 2), ) else: self.downsample = None
def __init__(self, inplanes, planes, baseWidth=4, cardinality=32, stride=1, downsample=None, downstride=2): """ Constructor Args: inplanes: input channel dimensionality planes: output channel dimensionality baseWidth: base width. cardinality: num of convolution groups. stride: conv stride. Replaces pooling layer. """ super(Bottleneck, self).__init__() D = int(math.floor(planes * (baseWidth / 64))) C = cardinality self.conv1 = nn.Conv2d(inplanes, D*C, kernel_size=1, stride=1, padding=0, bias=False) self.bn1 = nn.BatchNorm2d(D*C) self.conv2 = nn.Conv2d(D*C, D*C, kernel_size=3, stride=stride, padding=1, groups=C, bias=False) self.bn2 = nn.BatchNorm2d(D*C) self.conv3 = nn.Conv2d(D*C, planes * 4, kernel_size=1, stride=1, padding=0, bias=False) self.bn3 = nn.BatchNorm2d(planes * 4) self.relu = nn.ReLU(inplace=True) self.ca = ChannelAttention(planes * 4) self.sa_fm = SpatialAttention() if downsample is True: self.downsample = nn.Sequential( conv1x1(inplanes, planes * self.expansion*2, downstride), nn.BatchNorm2d(planes * self.expansion*2), ) else: self.downsample = None