def __init__(self): super(Network, self).__init__() self.convs = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=32, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(32), nn.ReLU(), nn.MaxPool2d(2), NONLocalBlock2D(in_channels=32), nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(64), nn.ReLU(), nn.MaxPool2d(2), NONLocalBlock2D(in_channels=64), nn.Conv2d(in_channels=64, out_channels=128, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(128), nn.ReLU(), nn.MaxPool2d(2), ) self.fc = nn.Sequential( nn.Linear(in_features=128*3*3, out_features=256), nn.ReLU(), nn.Dropout(0.5), nn.Linear(in_features=256, out_features=10) )
def __init__(self): super(ResNet50Tiny, self).__init__() self.layer0 = nn.Sequential( nn.Conv2d(3, 32, kernel_size=(3, 3), stride=1, padding=1, bias=False), nn.BatchNorm2d(32), nn.ReLU(inplace=True)) self.inplanes = 32 self.layer1 = self._make_layer(32, 3, [2, 2]) self.layer2 = self._make_layer(64, 4, [2, 2]) self.layer3 = self._make_layer(128, 6, [2, 1]) self.layer4 = self._make_layer(256, 6, [2, 1]) self.non_local_4 = NONLocalBlock2D(256, sub_sample=False, bn_layer=True) self.layer5 = self._make_layer(512, 3, [2, 1]) self.non_local_5 = NONLocalBlock2D(512, sub_sample=False, bn_layer=True) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu") elif isinstance(m, nn.BatchNorm2d): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0)