Beispiel #1
0
    def __init__(self, in_planes, planes, stride=1):
        super(PreActBlock, self).__init__()
        self.sn1 = sn.SwitchNorm2d(in_planes)
        self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
        self.sn2 = sn.SwitchNorm2d(planes)
        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)

        if stride != 1 or in_planes != self.expansion*planes:
            self.shortcut = nn.Sequential(
                nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False)
            )
Beispiel #2
0
    def __init__(self, num_classes=17):
        self.inplanes = 128

        super(TSnetSE, self).__init__()

        self.layer1 = nn.Sequential(
            conv3x3(21, self.inplanes),
            sn.SwitchNorm2d(self.inplanes),
            nn.ReLU(inplace=True),
            conv3x3(self.inplanes, self.inplanes),
            sn.SwitchNorm2d(self.inplanes),
            nn.ReLU(inplace=True),
            conv3x3(self.inplanes, self.inplanes, 2),
            sn.SwitchNorm2d(self.inplanes),
            nn.ReLU(inplace=True),
        )
        self.se1 = SELayer(self.inplanes)
        self.layer2 = nn.Sequential(
            conv3x3(self.inplanes, self.inplanes * 2),
            sn.SwitchNorm2d(self.inplanes * 2),
            nn.ReLU(inplace=True),
            conv3x3(self.inplanes * 2, self.inplanes * 2),
            sn.SwitchNorm2d(self.inplanes * 2),
            nn.ReLU(inplace=True),
            conv3x3(self.inplanes * 2, self.inplanes * 2, 2),
            sn.SwitchNorm2d(self.inplanes * 2),
            nn.ReLU(inplace=True),
        )
        self.se2 = SELayer(self.inplanes * 2)
        self.layer3 = nn.Sequential(
            conv3x3(self.inplanes * 2, self.inplanes * 4),
            sn.SwitchNorm2d(self.inplanes * 4),
            nn.ReLU(inplace=True),
            conv3x3(self.inplanes * 4, self.inplanes * 4),
            sn.SwitchNorm2d(self.inplanes * 4),
            nn.ReLU(inplace=True),
            conv3x3(self.inplanes * 4, self.inplanes * 4, 2),
            sn.SwitchNorm2d(self.inplanes * 4),
        )
        self.se3 = SELayer(self.inplanes * 4)
        self.fc = nn.Sequential(
            nn.Linear(4096 * 2, 1024),
            nn.LeakyReLU(inplace=True),
            nn.Dropout(p=0.5),
            nn.Linear(1024, 512),
            nn.LeakyReLU(inplace=True),  #relu在drop前面还是后面
            nn.Dropout(p=0.8),
            nn.Linear(512, 17))
        self.LogSoftmax = nn.LogSoftmax()
        #后添加
        self.initialize()
Beispiel #3
0
 def __init__(self, in_size, out_size, normalize=True, dropout=0.0):
     super(UNetDown, self).__init__()
     layers = [nn.Conv2d(in_size, out_size, 4, 2, 1, bias=False)]
     if normalize:
         layers.append(sn.SwitchNorm2d(out_size))
     layers.append(nn.LeakyReLU(0.2))
     if dropout:
         layers.append(nn.Dropout(dropout))
     self.model = nn.Sequential(*layers)
Beispiel #4
0
 def discriminator_block(in_filters, out_filters, normalization=True):
     """Returns downsampling layers of each discriminator block"""
     layers = [
         nn.Conv2d(in_filters, out_filters, 4, stride=2, padding=1)
     ]
     if normalization:
         layers.append(sn.SwitchNorm2d(out_filters))
     layers.append(nn.LeakyReLU(0.2, inplace=True))
     return layers
Beispiel #5
0
    def __init__(self, in_size, out_size, dropout=0.0):
        super(UNetUp, self).__init__()
        layers = [
            nn.ConvTranspose2d(in_size, out_size, 4, 2, 1, bias=False),
            sn.SwitchNorm2d(out_size),
            nn.ReLU(inplace=True)
        ]
        if dropout:
            layers.append(nn.Dropout(dropout))

        self.model = nn.Sequential(*layers)