def __init__(self):
     super(ResidualAttentionModel_92, self).__init__()
     self.conv1 = nn.Sequential(
         nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias = False),
         nn.BatchNorm2d(64),
         nn.ReLU(inplace=True)
     )
     self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
     self.residual_block1 = ResidualBlock(64, 256)
     self.attention_module1 = AttentionModule_stage1(256, 256)
     self.residual_block2 = ResidualBlock(256, 512, 2)
     self.attention_module2 = AttentionModule_stage2(512, 512)
     self.attention_module2_2 = AttentionModule_stage2(512, 512)  # tbq add
     self.residual_block3 = ResidualBlock(512, 1024, 2)
     self.attention_module3 = AttentionModule_stage3(1024, 1024)
     self.attention_module3_2 = AttentionModule_stage3(1024, 1024)  # tbq add
     self.attention_module3_3 = AttentionModule_stage3(1024, 1024)  # tbq add
     self.residual_block4 = ResidualBlock(1024, 2048, 2)
     self.residual_block5 = ResidualBlock(2048, 2048)
     self.residual_block6 = ResidualBlock(2048, 2048)
     self.mpool2 = nn.Sequential(
         nn.BatchNorm2d(2048),
         nn.ReLU(inplace=True),
         nn.AvgPool2d(kernel_size=7, stride=1)
     )
     self.fc = nn.Linear(2048,14)
     self.sigmoid_end = nn.Sigmoid()
Exemple #2
0
 def __init__(self, num_classes):
     super(ResidualAttentionModel_448input, self).__init__()
     self.conv1 = nn.Sequential(
         nn.Conv2D(3,
                   64,
                   kernel_size=7,
                   stride=2,
                   padding=3,
                   bias_attr=False), nn.BatchNorm2D(64), nn.ReLU())
     self.mpool1 = nn.MaxPool2D(kernel_size=3, stride=2, padding=1)
     # tbq add
     # 112*112
     self.residual_block0 = ResidualBlock(64, 128)
     self.attention_module0 = AttentionModule_stage0(128, 128)
     # tbq add end
     self.residual_block1 = ResidualBlock(128, 256, 2)
     # 56*56
     self.attention_module1 = AttentionModule_stage1(256, 256)
     self.residual_block2 = ResidualBlock(256, 512, 2)
     self.attention_module2 = AttentionModule_stage2(512, 512)
     self.attention_module2_2 = AttentionModule_stage2(512, 512)  # tbq add
     self.residual_block3 = ResidualBlock(512, 1024, 2)
     self.attention_module3 = AttentionModule_stage3(1024, 1024)
     self.attention_module3_2 = AttentionModule_stage3(1024,
                                                       1024)  # tbq add
     self.attention_module3_3 = AttentionModule_stage3(1024,
                                                       1024)  # tbq add
     self.residual_block4 = ResidualBlock(1024, 2048, 2)
     self.residual_block5 = ResidualBlock(2048, 2048)
     self.residual_block6 = ResidualBlock(2048, 2048)
     self.mpool2 = nn.Sequential(nn.BatchNorm2D(2048), nn.ReLU(),
                                 nn.AvgPool2D(kernel_size=7, stride=1))
     self.fc = nn.Linear(2048, num_classes)
 def __init__(self):
     super(ResidualAttentionModel_448input, self).__init__()
     self.conv1 = nn.Sequential(
         nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias = False),
         nn.BatchNorm2d(64),
         nn.ReLU(inplace=True)
     )
     self.mpool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
     # tbq add
     # 112*112
     self.residual_block0 = ResidualBlock(64, 128)
     self.attention_module0 = AttentionModule_stage0(128, 128)
     # tbq add end
     self.residual_block1 = ResidualBlock(128, 256, 2)
     # 56*56
     self.attention_module1 = AttentionModule_stage1(256, 256)
     self.residual_block2 = ResidualBlock(256, 512, 2)
     self.attention_module2 = AttentionModule_stage2(512, 512)
     self.attention_module2_2 = AttentionModule_stage2(512, 512)  # tbq add
     self.residual_block3 = ResidualBlock(512, 1024, 2)
     self.attention_module3 = AttentionModule_stage3(1024, 1024)
     self.attention_module3_2 = AttentionModule_stage3(1024, 1024)  # tbq add
     self.attention_module3_3 = AttentionModule_stage3(1024, 1024)  # tbq add
     self.residual_block4 = ResidualBlock(1024, 2048, 2)
     self.residual_block5 = ResidualBlock(2048, 2048)
     self.residual_block6 = ResidualBlock(2048, 2048)
     self.mpool2 = nn.Sequential(
         nn.BatchNorm2d(2048),
         nn.ReLU(inplace=True),
         nn.AvgPool2d(kernel_size=7, stride=1)
     )
     #-----self.fc = nn.Linear(2048,10)
     #-----channel should be 14 because there are 14 classes
     self.fc = nn.Linear(2048,14)