Beispiel #1
0
    def __init__(self):
        super(ASPP, self).__init__()

        in_planes = 2048
        dilations = [1, 6, 12, 18]

        # all aspp module output feature maps with channel 256
        self.aspp1 = _ASPPModule(in_planes, planes=256, kernel_size=1, padding=0, dilation=dilations[0])
        self.aspp2 = _ASPPModule(in_planes, planes=256, kernel_size=3, padding=dilations[1], dilation=dilations[1])
        self.aspp3 = _ASPPModule(in_planes, planes=256, kernel_size=3, padding=dilations[2], dilation=dilations[2])
        self.aspp4 = _ASPPModule(in_planes, planes=256, kernel_size=3, padding=dilations[3], dilation=dilations[3])

        # perform global average pooling on the last feature map of the backbone
        # batchsize must be greater than 1, otherwise exception will be thrown in calculating BatchNorm
        self.global_avg_pool = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)),
                                             nn.Conv2d(in_planes, 256, 1, stride=1, bias=False),
                                             nn.BatchNorm2d(256),
                                             nn.ReLU())

        self.p1 = nn.AdaptiveAvgPool2d(1)
        self.p2 = nn.Conv2d(in_planes, 256, 1, stride=1, bias=False)
        self.p3 = nn.BatchNorm2d(256)

        self.conv1 = nn.Conv2d(1280, 256, 1, bias=False)
        self.bn1 = nn.BatchNorm2d(256)
        self.relu = nn.ReLU()
        self.dropout = nn.Dropout(0.5)
        self._init_weight()
Beispiel #2
0
 def __init__(self, in_planes=None, planes=None, stride=1, downsample=None):
     super(BasicBlock, self).__init__()
     self.conv1 = conv3x3(in_planes, planes, stride)
     self.bn1 = nn.BatchNorm2d(planes)
     self.relu1 = nn.ReLU()
     self.conv2 = conv3x3(planes, planes)
     self.bn2 = nn.BatchNorm2d(planes)
     self.downsample = downsample
     self.stride = stride
     self.relu2 = nn.ReLU(inplace=True)
Beispiel #3
0
    def __init__(self, block, layers, num_classes=10):
        super(ResNet, self).__init__()
        self.inplanes = 64
        self.initdata = nn.Conv2d(1,
                                  64,
                                  kernel_size=7,
                                  stride=1,
                                  padding=3,
                                  bias=False)
        self.bn0 = nn.BatchNorm2d(64)
        self.relu0 = nn.ReLU(inplace=True)
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        self.layer1 = self.make_layers(block, 64, layers[0])
        self.layer2 = self.make_layers(block, 128, layers[1], stride=1)
        self.layer3 = self.make_layers(block, 256, layers[2], stride=1)
        self.layer4 = self.make_layers(block, 512, layers[3], stride=2)

        self.avg = nn.AvgPool2d(7, stride=1)
        self.full = nn.Linear(512 * block.expansion, num_classes)
        self.sigmoid = nn.Sigmoid()

        for m in self.modules():
            if isinstance(
                    m,
                    nn.Conv2d):  # isinstance() 函数来判断一个对象是否是一个已知的类型,类似 type()。
                init.kaiming_normal_(m.weight,
                                     mode='fan_out',
                                     nonlinearity='relu')
            elif isinstance(m, nn.BatchNorm2d):
                init.constant_(m.weight, 1)
                init.constant_(m.bias, 0)
Beispiel #4
0
    def __init__(self, num_classes):
        super(Decoder, self).__init__()
        low_level_in_planes = 256

        self.conv1 = nn.Conv2d(low_level_in_planes, 48, 1, bias=False)
        self.bn1 = nn.BatchNorm2d(48)
        self.relu = nn.ReLU()
        self.last_conv = nn.Sequential(
            nn.Conv2d(304, 256, kernel_size=3, stride=1, padding=1,
                      bias=False), nn.BatchNorm2d(256), nn.ReLU(),
            nn.Dropout(0.5),
            nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1,
                      bias=False), nn.BatchNorm2d(256), nn.ReLU(),
            nn.Dropout(0.1),
            nn.Conv2d(256, num_classes, kernel_size=1, stride=1))
        self._init_weight()
Beispiel #5
0
 def __init__(self, inplane, plane, stride=1, downsample=None):
     super(Bottleneck, self).__init__()
     self.conv1 = nn.Conv2d(inplane, plane, kernel_size=1, bias=False)
     self.bn1 = nn.BatchNorm2d(plane)
     self.conv2 = nn.Conv2d(plane,
                            plane,
                            kernel_size=3,
                            padding=1,
                            stride=stride,
                            bias=False)
     self.bn2 = nn.BatchNorm2d(plane)
     self.conv3 = nn.Conv2d(plane,
                            self.expansion * plane,
                            kernel_size=1,
                            bias=False)
     self.bn3 = nn.BatchNorm2d(self.expansion * plane)
     self.downsample = downsample
     self.stride = stride
     self.relu3 = nn.ReLU(inplace=True)
Beispiel #6
0
    def __init__(self, in_channels, key_channels, out_channels, scale=1, dropout=0.1, bn_type=None):
        super(SpatialOCR_Module, self).__init__()

        self.object_context_block = ObjectAttentionBlock(in_channels, key_channels, scale, bn_type)
        _in_channels = 2 * in_channels

        self.conv_bn_dropout = nn.Sequential(
            nn.Conv2d(_in_channels, out_channels, kernel_size=1, padding=0, bias=False),
            nn.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            nn.Dropout2d(dropout)
        )
Beispiel #7
0
    def __init__(self, inp_dim, out_dim):
        """
        residual block
        :param inp_dim: input dimension
        :param out_dim: output dimension
        """
        super(Residual, self).__init__()
        # the channel must be at least 1
        out_dim_half = max(1, int(out_dim / 2))

        self.conv1 = nn.Conv2d(inp_dim, out_dim_half, 1, 1, bias=False)
        self.bn1 = nn.BatchNorm2d(out_dim_half)
        self.relu = nn.ReLU()

        self.conv2 = nn.Conv2d(out_dim_half, out_dim_half, 3, 1, 1, bias=False)
        self.bn2 = nn.BatchNorm2d(out_dim_half)

        self.conv3 = nn.Conv2d(out_dim_half, out_dim, 1, 1, bias=False)
        self.bn3 = nn.BatchNorm2d(out_dim)

        self.skip_layer = nn.Conv2d(inp_dim, out_dim, 1, 1, bias=False)
        self.skip_layer_bn = nn.BatchNorm2d(out_dim)
Beispiel #8
0
 def __init__(self,
              in_planes,
              planes,
              stride=1,
              downsample=None,
              dilation=1):
     super(Bottleneck, self).__init__()
     self.conv1 = conv1x1(in_planes, planes)
     self.bn1 = nn.BatchNorm2d(planes)
     self.conv2 = nn.Conv2d(planes,
                            planes,
                            kernel_size=3,
                            stride=stride,
                            dilation=dilation,
                            padding=dilation,
                            bias=False)
     self.bn2 = nn.BatchNorm2d(planes)
     self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
     self.bn3 = nn.BatchNorm2d(planes * self.expansion)
     self.relu = nn.ReLU(inplace=True)
     self.downsample = downsample
     self.stride = stride
Beispiel #9
0
    def __init__(self, in_planes, planes, kernel_size, padding, dilation):
        """
        One single ASPP module
        :param in_planes: input channels
        :param planes: output channels
        :param kernel_size: kernel size in conv
        :param padding: padding
        :param dilation: dilation
        """
        super(_ASPPModule, self).__init__()
        self.atrous_conv = nn.Conv2d(in_planes, planes, kernel_size=kernel_size,
                                     stride=1, padding=padding, dilation=dilation, bias=False)
        self.bn = nn.BatchNorm2d(planes)
        self.relu = nn.ReLU()

        self._init_weight()
Beispiel #10
0
 def make_layers(self, block, planes, num_blocks, stride=1):
     downsample = None
     layers = []
     if stride != 1 or self.inplanes != block.expansion * planes:
         downsample = nn.Sequential(
             nn.Conv2d(self.inplanes,
                       block.expansion * planes,
                       kernel_size=1,
                       stride=stride,
                       bias=False),
             nn.BatchNorm2d(block.expansion * planes))
     layers.append(
         block(self.inplanes, planes, stride=stride, downsample=downsample))
     self.inplanes = planes * block.expansion
     for i in range(1, num_blocks):
         layers.append(block(self.inplanes, planes))
     return nn.Sequential(*layers)
Beispiel #11
0
    def __init__(self):
        super().__init__()

        self.relu = nn.ReLU()
        self.avgpool2d = nn.AvgPool2d(2, stride=2)

        #输入部分
        self.conv2d_1 = nn.Conv2d(1, 6, kernel_size=5, padding=2)
        self.batchnorm2d = nn.BatchNorm2d(6)

        #中间残差块
        self.conv2d_2 = nn.Conv2d(6, 6, kernel_size=3, padding=1)

        #输出部分
        self.conv2d_3 = nn.Conv2d(6, 6, 5)
        self.flatten = nn.Flatten()
        self.sig = nn.Sigmoid()
        self.linear_1 = nn.Linear(6 * 5 * 5, 64)
        self.linear_2 = nn.Linear(64, 10)
Beispiel #12
0
    def _make_multi_grid_layer(self,
                               block,
                               planes,
                               blocks,
                               stride=1,
                               dilation=1):
        """
        Multi-grid unit
        :param block: Bottleneck
        :param planes:
        :param blocks:
        :param stride:
        :param dilation:
        :return:
        """
        downsample = None
        if stride != 1 or self.in_planes != planes * block.expansion:
            downsample = nn.Sequential(
                nn.Conv2d(self.in_planes,
                          planes * block.expansion,
                          kernel_size=1,
                          stride=stride,
                          bias=False),
                nn.BatchNorm2d(planes * block.expansion),
            )

        layers = list()
        layers.append(
            block(self.in_planes,
                  planes,
                  stride,
                  dilation=blocks[0] * dilation,
                  downsample=downsample))
        self.in_planes = planes * block.expansion
        for i in range(1, len(blocks)):
            layers.append(
                block(self.in_planes,
                      planes,
                      stride=1,
                      dilation=blocks[i] * dilation))

        return nn.Sequential(*layers)
Beispiel #13
0
 def __init__(self, in_channels, key_channels, scale=1, bn_type=None):
     super(ObjectAttentionBlock, self).__init__()
     self.scale = scale
     self.in_channels = in_channels
     self.key_channels = key_channels
     self.pool = nn.MaxPool2d(kernel_size=(scale, scale))
     self.f_pixel = nn.Sequential(
         nn.Conv2d(in_channels=self.in_channels, out_channels=self.key_channels, kernel_size=1, stride=1,
                   padding=0, bias=False),
         nn.BatchNorm2d(self.key_channels),
         nn.ReLU(inplace=True),
         nn.Conv2d(in_channels=self.key_channels, out_channels=self.key_channels, kernel_size=1, stride=1,
                   padding=0, bias=False),
         nn.BatchNorm2d(self.key_channels),
         nn.ReLU(inplace=True)
     )
     self.f_object = nn.Sequential(
         nn.Conv2d(in_channels=self.in_channels, out_channels=self.key_channels, kernel_size=1, stride=1, padding=0,
                   bias=False),
         nn.BatchNorm2d(self.key_channels),
         nn.ReLU(inplace=True),
         nn.Conv2d(in_channels=self.key_channels, out_channels=self.key_channels, kernel_size=1, stride=1, padding=0,
                   bias=False),
         nn.BatchNorm2d(self.key_channels),
         nn.ReLU(inplace=True)
     )
     self.f_down = nn.Sequential(
         nn.Conv2d(in_channels=self.in_channels, out_channels=self.key_channels, kernel_size=1, stride=1, padding=0,
                   bias=False),
         nn.BatchNorm2d(self.key_channels),
         nn.ReLU(inplace=True)
     )
     self.f_up = nn.Sequential(
         nn.Conv2d(in_channels=self.key_channels, out_channels=self.in_channels, kernel_size=1, stride=1, padding=0,
                   bias=False),
         nn.BatchNorm2d(self.key_channels),
         nn.ReLU(inplace=True)
     )
Beispiel #14
0
 def __init__(self, num_class=10):
     super(VGG16, self).__init__()
     self.feature = modules.Sequential(
         # #1,
         modules.Conv2d(3, 64, kernel_size=3, padding=1),
         modules.BatchNorm2d(64),
         modules.ReLU(True),
         #2
         modules.Conv2d(64, 64, kernel_size=3, padding=1),
         modules.BatchNorm2d(64),
         modules.ReLU(True),
         modules.MaxPool2d(kernel_size=2, stride=2),
         #3
         modules.Conv2d(64, 128, kernel_size=3, padding=1),
         modules.BatchNorm2d(128),
         modules.ReLU(True),
         # modules.MaxPool2d(kernel_size=2,stride=2),
         #4
         modules.Conv2d(128, 128, kernel_size=3, padding=1),
         modules.BatchNorm2d(128),
         modules.ReLU(True),
         modules.MaxPool2d(kernel_size=2, stride=2),
         #5
         modules.Conv2d(128, 256, kernel_size=3, padding=1),
         modules.BatchNorm2d(256),
         modules.ReLU(True),
         #6
         modules.Conv2d(256, 256, kernel_size=3, padding=1),
         modules.BatchNorm2d(256),
         modules.ReLU(True),
         #7
         modules.Conv2d(256, 256, kernel_size=3, padding=1),
         modules.BatchNorm2d(256),
         modules.ReLU(True),
         modules.MaxPool2d(kernel_size=2, stride=2),
         #8
         modules.Conv2d(256, 512, kernel_size=3, padding=1),
         modules.BatchNorm2d(512),
         modules.ReLU(True),
         #9
         modules.Conv2d(512, 512, kernel_size=3, padding=1),
         modules.BatchNorm2d(512),
         modules.ReLU(True),
         #10
         modules.Conv2d(512, 512, kernel_size=3, padding=1),
         modules.BatchNorm2d(512),
         modules.ReLU(True),
         modules.MaxPool2d(kernel_size=2, stride=2),
         #11
         modules.Conv2d(512, 512, kernel_size=3, padding=1),
         modules.BatchNorm2d(512),
         modules.ReLU(True),
         #12
         modules.Conv2d(512, 512, kernel_size=3, padding=1),
         modules.BatchNorm2d(512),
         modules.ReLU(True),
         #13
         modules.Conv2d(512, 512, kernel_size=3, padding=1),
         modules.BatchNorm2d(512),
         modules.ReLU(True),
         modules.MaxPool2d(kernel_size=2, stride=2),
         modules.AvgPool2d(kernel_size=1, stride=1),
     )
     # 全连接层
     self.classifier = modules.Sequential(
         # #14
         modules.Linear(512, 4096),
         modules.ReLU(True),
         modules.Dropout(),
         #15
         modules.Linear(4096, 4096),
         modules.ReLU(True),
         modules.Dropout(),
         #16
         modules.Linear(4096, num_class),
     )