示例#1
0
    def __init__(self,output_dim=6, dropout=False):
        super(ResidualAttentionModel_92_Small, self).__init__()
        self.conv1 = nn.Sequential(
            nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias = False),
            nn.BatchNorm2d(64),
            nn.ReLU(inplace=True)
        )
        self.dropout = dropout
        self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.ResidualBlock1 = ResidualBlock(64, 256)
        if self.dropout:
            self.dp_1 = nn.Dropout(0.2)
            self.dp_2 = nn.Dropout(0.0)
            self.dp_3 = nn.Dropout(0.0)
        self.attention_module1 = AttentionModule_stage1(256, 256)
        self.ResidualBlock2 = ResidualBlock(256, 512, 2)
        self.attention_module2 = AttentionModule_stage2(512, 512)
        self.attention_module2_2 = AttentionModule_stage2(512, 512)  # tbq add
        self.ResidualBlock3 = ResidualBlock(512, 1024, 3)

        self.mpool2 = nn.Sequential(
            nn.BatchNorm2d(1024),
            nn.ReLU(inplace=True),
            nn.AvgPool2d(kernel_size=8, stride=3)
        )
        self.fc = nn.Linear(1024,output_dim)
        #self.softmax = nn.Softmax(dim=1)

        for m in self.children():
            if isinstance(m, nn.Conv2d):
                nn.init.xavier_uniform_(m.weight.data)
                nn.init.xavier_uniforml_(m.bias.data)
示例#2
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 size1=(28, 28),
                 size2=(14, 14)):
        super(AttentionModule_stage2, self).__init__()
        self.first_ResidualBlocks = ResidualBlock(in_channels, out_channels)

        self.trunk_branches = nn.Sequential(
            ResidualBlock(in_channels, out_channels),
            ResidualBlock(in_channels, out_channels))

        self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        self.softmax1_blocks = ResidualBlock(in_channels, out_channels)

        self.skip1_connection_ResidualBlock = ResidualBlock(
            in_channels, out_channels)

        self.mpool2 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        self.softmax2_blocks = nn.Sequential(
            ResidualBlock(in_channels, out_channels),
            ResidualBlock(in_channels, out_channels))

        self.interpolation2 = nn.UpsamplingBilinear2d(size=size2)

        self.softmax3_blocks = ResidualBlock(in_channels, out_channels)

        self.interpolation1 = nn.UpsamplingBilinear2d(size=size1)

        self.softmax4_blocks = nn.Sequential(
            nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True),
            nn.Conv2d(out_channels,
                      out_channels,
                      kernel_size=1,
                      stride=1,
                      bias=False), nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_channels,
                      out_channels,
                      kernel_size=1,
                      stride=1,
                      bias=False), nn.Sigmoid())

        self.last_blocks = ResidualBlock(in_channels, out_channels)
示例#3
0
    def __init__(self, in_channels, out_channels, size=(8, 8)):
        super(AttentionModule_stage3_cifar, self).__init__()
        self.first_ResidualBlocks = ResidualBlock(in_channels, out_channels)

        self.trunk_branches = nn.Sequential(
            ResidualBlock(in_channels, out_channels),
            ResidualBlock(in_channels, out_channels))

        self.middle_2r_blocks = nn.Sequential(
            ResidualBlock(in_channels, out_channels),
            ResidualBlock(in_channels, out_channels))

        self.conv1_1_blocks = nn.Sequential(
            nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True),
            nn.Conv2d(out_channels,
                      out_channels,
                      kernel_size=1,
                      stride=1,
                      bias=False), nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_channels,
                      out_channels,
                      kernel_size=1,
                      stride=1,
                      bias=False), nn.Sigmoid())

        self.last_blocks = ResidualBlock(in_channels, out_channels)
示例#4
0
    def __init__(self, in_channels, out_channels, size=(8, 8)):
        super(AttentionModule_stage2_cifar, self).__init__()
        self.first_ResidualBlocks = ResidualBlock(in_channels, out_channels)

        self.trunk_branches = nn.Sequential(
            ResidualBlock(in_channels, out_channels),
            ResidualBlock(in_channels, out_channels))

        self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)  # 4*4

        self.middle_2r_blocks = nn.Sequential(
            ResidualBlock(in_channels, out_channels),
            ResidualBlock(in_channels, out_channels))

        self.interpolation1 = nn.UpsamplingBilinear2d(size=size)  # 8*8

        self.conv1_1_blocks = nn.Sequential(
            nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True),
            nn.Conv2d(out_channels,
                      out_channels,
                      kernel_size=1,
                      stride=1,
                      bias=False), nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_channels,
                      out_channels,
                      kernel_size=1,
                      stride=1,
                      bias=False), nn.Sigmoid())

        self.last_blocks = ResidualBlock(in_channels, out_channels)
示例#5
0
 def __init__(self, output_dim):
     super(ResidualAttentionModel_92_32input_update, self).__init__()
     self.conv1 = nn.Sequential(
         nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1, bias=False),
         nn.BatchNorm2d(32),
         nn.ReLU(inplace=True)
     )  # 32*32
     # self.mpool1 = nn.MaxPool2d(kernel_size=2, stride=2, padding=0)  # 16*16
     self.ResidualBlock1 = ResidualBlock(32, 128)  # 32*32
     self.attention_module1 = AttentionModule_stage1_cifar(128, 128, size1=(32, 32), size2=(16, 16))  # 32*32
     self.ResidualBlock2 = ResidualBlock(128, 256, 2)  # 16*16
     self.attention_module2 = AttentionModule_stage2_cifar(256, 256, size=(16, 16))  # 16*16
     self.attention_module2_2 = AttentionModule_stage2_cifar(256, 256, size=(16, 16))  # 16*16 # tbq add
     self.ResidualBlock3 = ResidualBlock(256, 512, 2)  # 4*4
     self.attention_module3 = AttentionModule_stage3_cifar(512, 512)  # 8*8
     self.attention_module3_2 = AttentionModule_stage3_cifar(512, 512)  # 8*8 # tbq add
     self.attention_module3_3 = AttentionModule_stage3_cifar(512, 512)  # 8*8 # tbq add
     self.ResidualBlock4 = ResidualBlock(512, 1024)  # 8*8
     self.ResidualBlock5 = ResidualBlock(1024, 1024)  # 8*8
     self.ResidualBlock6 = ResidualBlock(1024, 1024)  # 8*8
     self.mpool2 = nn.Sequential(
         nn.BatchNorm2d(1024),
         nn.ReLU(inplace=True),
         nn.AvgPool2d(kernel_size=8)
     )
     self.fc = nn.Linear(1024, output_dim)