def __init__(self, in_channels, channels, stride, downsample=False, last_gamma=False, **kwargs): super(BasicBlockV1, self).__init__(**kwargs) self.body = list() self.body.append(_conv3x3(in_channels, channels, stride)) self.body.append(nn.BatchNorm2d(channels)) self.body.append(nn.ReLU(inplace=True)) self.body.append(_conv3x3(channels, channels, 1)) tmp_layer = nn.BatchNorm2d(channels) if last_gamma: nn.init.zeros_(tmp_layer.weight) self.body.append(tmp_layer) self.body = nn.Sequential(*self.body) if downsample: self.downsample = nn.Sequential( nn.Conv2d(in_channels, channels, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(channels)) else: self.downsample = None
def __init__(self, in_channels, channels, stride, downsample=False, **kwargs): super(CIFARBasicBlockV2, self).__init__(**kwargs) self.bn1 = nn.BatchNorm2d(in_channels) self.conv1 = _conv3x3(in_channels, channels, stride) self.bn2 = nn.BatchNorm2d(channels) self.conv2 = _conv3x3(channels, channels, 1) if downsample: self.downsample = nn.Conv2d(in_channels, channels, 1, stride, bias=False) else: self.downsample = None
def __init__(self, in_channels, channels, stride, downsample=False, **kwargs): super(CIFARBasicBlockV1, self).__init__(**kwargs) self.body = nn.Sequential(_conv3x3(in_channels, channels, stride), nn.BatchNorm2d(channels), nn.ReLU(inplace=True), _conv3x3(channels, channels, 1), nn.BatchNorm2d(channels)) if downsample: self.downsample = nn.Sequential(nn.Conv2d(in_channels, channels, 1, stride=stride, bias=False), nn.BatchNorm2d(channels)) else: self.downsample = None
def __init__(self, block, layers, channels, classes=1000, thumbnail=False, last_gamma=False, **kwargs): super(ResNetV1, self).__init__(**kwargs) assert len(layers) == len(channels) - 2 self.features = list() if thumbnail: self.features.append(_conv3x3(channels[0], channels[1], 1)) else: self.features.append( nn.Conv2d(channels[0], channels[1], 7, 2, 3, bias=False)) self.features.append(nn.BatchNorm2d(channels[1])) self.features.append(nn.ReLU(inplace=True)) self.features.append(nn.MaxPool2d(3, 2, 1)) for i, num_layer in enumerate(layers): stride = 1 if i == 0 else 2 self.features.append( self._make_layer(block, num_layer, channels[i + 1], channels[i + 2], stride, last_gamma=last_gamma)) self.features = nn.Sequential(*self.features) self.output = nn.Linear(channels[-1], classes)
def __init__(self, in_channels, channels, stride, downsample=False, last_gamma=False, **kwargs): super(BottleneckV2, self).__init__(**kwargs) self.bn1 = nn.BatchNorm2d(in_channels) self.conv1 = nn.Conv2d(in_channels, channels // 4, kernel_size=1, stride=1, bias=False) self.bn2 = nn.BatchNorm2d(channels // 4) self.conv2 = _conv3x3(channels // 4, channels // 4, stride) self.bn3 = nn.BatchNorm2d(channels // 4) if last_gamma: nn.init.zeros_(self.bn3.weight) self.conv3 = nn.Conv2d(channels // 4, channels, kernel_size=1, stride=1, bias=False) if downsample: self.downsample = nn.Conv2d(in_channels, channels, 1, stride, bias=False) else: self.downsample = None
def __init__(self, in_channels, channels, stride, downsample=False, last_gamma=False, **kwargs): super(BasicBlockV2, self).__init__(**kwargs) self.bn1 = nn.BatchNorm2d(in_channels) self.conv1 = _conv3x3(in_channels, channels, stride) self.bn2 = nn.BatchNorm2d(channels) if last_gamma: nn.init.zeros_(self.bn2.weight) self.conv2 = _conv3x3(channels, channels, 1) if downsample: self.downsample = nn.Conv2d(in_channels, channels, 1, stride, bias=False) else: self.downsample = None