def __init__(self, depth, widen_factor, dropout_rate, num_classes): ''' Args: block (class): A subclass of nn.Module defining a block depth (int): The depth of the network, should be 6N+4 where N is group size widen_factor (int): The widen factor campared with ResNet dropout_rate (float): Dropout rate num_classes (int): Number of classes to predict ''' super().__init__() assert (depth - 4) % 6 == 0, 'Depth of Wide ResNet should be 6n+4' self.depth = depth self.widen_factor = widen_factor self.stages = [16, 32, 64] self.widths = [int(width * widen_factor) for width in self.stages] self.dropout_rate = dropout_rate self.num_classes = num_classes self.num_blocks = (self.depth - 4) // 6 self.conv1 = conv3x3(3, self.stages[0]) self.conv2 = self._make_layer(self.stages[0], self.widths[0], stride=1, padding=0) self.conv3 = self._make_layer(self.widths[0], self.widths[1], stride=2, padding=0) self.conv4 = self._make_layer(self.widths[1], self.widths[2], stride=2, padding=0) self.bn = nn.BatchNorm2d(self.widths[2], momentum=0.9) self.relu = nn.ReLU(inplace=True) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = nn.Linear(self.widths[2], num_classes)
def _make_block(self, block, duplicates, out_channels, stride=1): """ Create Block in ResNet. Args: block: BasicBlock duplicates: number of BasicBlock out_channels: out channels of the block Returns: nn.Sequential(*layers) """ downsample = None if (stride != 1) or (self.in_channels != out_channels): downsample = nn.Sequential( conv3x3(self.in_channels, out_channels, stride=stride), nn.BatchNorm2d(num_features=out_channels)) layers = [] layers.append(block(self.in_channels, out_channels, stride, downsample)) self.in_channels = out_channels for _ in range(1, duplicates): layers.append(block(out_channels, out_channels)) return nn.Sequential(*layers)
def __init__( self, inplanes: int, planes: int, stride: int = 1, downsample: Optional[Callable] = None, dropout_rate: int = 0.3, ) -> None: super().__init__() self.inplanes = inplanes self.planes = planes self.bn1 = nn.BatchNorm2d(self.inplanes) self.conv1 = conv3x3(self.inplanes, self.planes) self.dropout = nn.Dropout(dropout_rate) self.bn2 = nn.BatchNorm2d(self.planes) self.conv2 = conv3x3(self.planes, self.planes, stride) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.stride = stride
def get_downsample(self, in_channel, out_channel, stride=1, repeat=1): downsample_layers = list() for _ in range(repeat): if (stride != 1) or (in_channel != out_channel): downsample = nn.Sequential( conv3x3(in_channel, out_channel, stride=stride), nn.BatchNorm2d(num_features=out_channel)) in_channel = out_channel downsample_layers.append(downsample) return nn.Sequential(*downsample_layers)
def __init__(self, in_channels, out_channels, stride=1, downsample=None): """Basic Block of ReseNet Builder.""" super(BasicBlock, self).__init__() # First conv3x3 layer self.conv1 = conv3x3(in_channels, out_channels, stride) # Batch Normalization self.bn1 = nn.BatchNorm2d(num_features=out_channels) # ReLU Activation Function self.relu = nn.ReLU(inplace=True) # Second conv3x3 layer self.conv2 = conv3x3(out_channels, out_channels) # Batch Normalization self.bn2 = nn.BatchNorm2d(num_features=out_channels) # downsample for `residual` self.downsample = downsample self.stride = stride
def __init__(self, in_channels, mid_channels, out_channels, stride=1): super(BottleNeck, self).__init__() self.in_channels = in_channels self.mid_channels = mid_channels self.out_channels = out_channels self.stride = stride self.layer = nn.Sequential( conv1x1(in_channels, mid_channels, self.stride, 0), conv3x3(mid_channels, mid_channels, 1, 1), conv1x1(mid_channels, out_channels, 1, 0)) self.downsample = nn.MaxPool2d(2, stride) self.conv1 = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=self.stride, padding=0) self.relu = nn.ReLU()
def __init__(self, block, duplicates, num_classes=10): """Residual Neural Network Builder.""" super(ResNet, self).__init__() self.in_channels = 32 self.conv1 = conv3x3(in_channels=3, out_channels=32) self.bn = nn.BatchNorm2d(num_features=32) self.relu = nn.ReLU(inplace=True) self.dropout = nn.Dropout2d(p=0.02) # block of Basic Blocks self.conv2_x = self._make_block(block, duplicates[0], out_channels=32) self.conv3_x = self._make_block(block, duplicates[1], out_channels=64, stride=2) self.conv4_x = self._make_block(block, duplicates[2], out_channels=128, stride=2) self.conv5_x = self._make_block(block, duplicates[3], out_channels=256, stride=2) self.maxpool = nn.MaxPool2d(kernel_size=4, stride=1) self.fc_layer = nn.Linear(256, num_classes) # initialize weights # self.apply(initialize_weights) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight.data, mode='fan_out') elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): m.bias.data.zero_()