コード例 #1
0
    def __init__(self, ConvLayer, in_planes, planes, stride=1):
        super(BasicBlock, self).__init__(ConvLayer)
        self.conv1 = self.ConvLayer(in_planes,
                                    planes,
                                    kernel_size=3,
                                    stride=stride,
                                    padding=1,
                                    bias=False)
        self.bn1 = nn.BatchNorm2d(planes)
        self.conv2 = self.ConvLayer(planes,
                                    planes,
                                    kernel_size=3,
                                    stride=1,
                                    padding=1,
                                    bias=False)
        self.bn2 = nn.BatchNorm2d(planes)

        self.shortcut = CustomSequential()
        if stride != 1 or in_planes != self.expansion * planes:
            self.shortcut = CustomSequential(
                self.ConvLayer(in_planes,
                               self.expansion * planes,
                               kernel_size=1,
                               stride=stride,
                               bias=False),
                nn.BatchNorm2d(self.expansion * planes))
コード例 #2
0
    def __init__(self, num_classes, backbone, BatchNorm, ConvLayer, wm=1.0):
        super(Decoder, self).__init__()
        if backbone == 'resnet' or backbone == 'drn':
            low_level_inplanes = 256
        elif backbone == 'xception':
            low_level_inplanes = 128
        elif backbone == 'mobilenet':
            low_level_inplanes = 24
        else:
            raise NotImplementedError

        self.conv1 = ConvLayer(low_level_inplanes, int(48 * wm), 1, bias=False)
        self.bn1 = BatchNorm(int(48 * wm))
        # self.conv1 = ConvLayer(int(low_level_inplanes*wm), int(48*wm), 1, bias=False)
        # self.bn1 = BatchNorm(int(48*wm))
        self.relu = nn.ReLU()
        self.last_conv = CustomSequential(
            ConvLayer(int(304 * wm),
                      int(256 * wm),
                      kernel_size=3,
                      stride=1,
                      padding=1,
                      bias=False), BatchNorm(int(256 * wm)), nn.ReLU(),
            nn.Dropout(0.5),
            ConvLayer(int(256 * wm),
                      int(256 * wm),
                      kernel_size=3,
                      stride=1,
                      padding=1,
                      bias=False), BatchNorm(int(256 * wm)), nn.ReLU(),
            nn.Dropout(0.1),
            ConvLayer(int(256 * wm), num_classes, kernel_size=1, stride=1))
        self._init_weight()
コード例 #3
0
 def _make_layer(self, block, planes, num_blocks, stride):
     strides = [stride] + [1] * (num_blocks - 1)
     layers = []
     for stride in strides:
         layers.append(block(self.ConvLayer, self.in_planes, planes,
                             stride))
         self.in_planes = planes * block.expansion
     return CustomSequential(*layers)
コード例 #4
0
    def __init__(self, inp, oup, stride, expand_ratio, conv=nn.Conv2d):
        super(InvertedResidual, self).__init__()
        assert stride in [1, 2]

        hidden_dim = round(inp * expand_ratio)
        self.identity = stride == 1 and inp == oup

        if expand_ratio == 1:
            self.conv = CustomSequential(
                # dw
                conv(hidden_dim,
                     hidden_dim,
                     3,
                     stride,
                     1,
                     groups=hidden_dim,
                     bias=False),
                nn.BatchNorm2d(hidden_dim),
                nn.ReLU6(inplace=True),
                # pw-linear
                conv(hidden_dim, oup, 1, 1, 0, bias=False),
                nn.BatchNorm2d(oup),
            )
        else:
            self.conv = CustomSequential(
                # pw
                conv(inp, hidden_dim, 1, 1, 0, bias=False),
                nn.BatchNorm2d(hidden_dim),
                nn.ReLU6(inplace=True),
                # dw
                conv(hidden_dim,
                     hidden_dim,
                     3,
                     stride,
                     1,
                     groups=hidden_dim,
                     bias=False),
                nn.BatchNorm2d(hidden_dim),
                nn.ReLU6(inplace=True),
                # pw-linear
                conv(hidden_dim, oup, 1, 1, 0, bias=False),
                nn.BatchNorm2d(oup),
            )
コード例 #5
0
    def __init__(self, backbone, output_stride, BatchNorm, ConvLayer, wm=1.0):
        super(ASPP, self).__init__()
        if backbone == 'drn':
            inplanes = 512
        elif backbone == 'mobilenet':
            inplanes = 320
        else:
            inplanes = 2048
        if output_stride == 16:
            dilations = [1, 6, 12, 18]
        elif output_stride == 8:
            dilations = [1, 12, 24, 36]
        else:
            raise NotImplementedError

        inplanes_wm = inplanes  #int(inplanes * wm)
        planes_wm = int(256 * wm)
        self.aspp1 = _ASPPModule(inplanes_wm,
                                 planes_wm,
                                 1,
                                 padding=0,
                                 dilation=dilations[0],
                                 BatchNorm=BatchNorm,
                                 ConvLayer=ConvLayer)
        self.aspp2 = _ASPPModule(inplanes_wm,
                                 planes_wm,
                                 3,
                                 padding=dilations[1],
                                 dilation=dilations[1],
                                 BatchNorm=BatchNorm,
                                 ConvLayer=ConvLayer)
        self.aspp3 = _ASPPModule(inplanes_wm,
                                 planes_wm,
                                 3,
                                 padding=dilations[2],
                                 dilation=dilations[2],
                                 BatchNorm=BatchNorm,
                                 ConvLayer=ConvLayer)
        self.aspp4 = _ASPPModule(inplanes_wm,
                                 planes_wm,
                                 3,
                                 padding=dilations[3],
                                 dilation=dilations[3],
                                 BatchNorm=BatchNorm,
                                 ConvLayer=ConvLayer)

        self.global_avg_pool = CustomSequential(
            nn.AdaptiveAvgPool2d((1, 1)),
            ConvLayer(inplanes_wm, int(256 * wm), 1, stride=1, bias=False),
            BatchNorm(int(256 * wm)), nn.ReLU())
        self.conv1 = ConvLayer(int(1280 * wm), int(256 * wm), 1, bias=False)
        self.bn1 = BatchNorm(int(256 * wm))
        self.relu = nn.ReLU()
        self.dropout = nn.Dropout(0.5)
        self._init_weight()
コード例 #6
0
    def __init__(self, conv, num_classes=200, width_multiplier=0.35):
        super(MobileNetV2, self).__init__(conv)
        # setting of inverted residual blocks
        self.cfgs = [
            # t, c, n, s
            [1, 16, 1, 1],
            [6, 24, 2, 2],
            [6, 32, 3, 2],
            [6, 64, 4, 2],
            [6, 96, 3, 1],
            [6, 160, 3, 2],
            [6, 320, 1, 1],
        ]

        # building first layer
        input_channel = _make_divisible(32 * width_multiplier,
                                        4 if width_multiplier == 0.1 else 8)
        layers = [conv_3x3_bn(3, input_channel, 2, conv=nn.Conv2d)]
        # building inverted residual blocks
        block = InvertedResidual
        for t, c, n, s in self.cfgs:
            output_channel = _make_divisible(
                c * width_multiplier, 4 if width_multiplier == 0.1 else 8)
            for i in range(n):
                layers.append(
                    block(input_channel,
                          output_channel,
                          s if i == 0 else 1,
                          t,
                          conv=self.ConvLayer))
                input_channel = output_channel
        self.features = CustomSequential(*layers)
        # building last several layers
        output_channel = _make_divisible(1280 * width_multiplier,
                                         4 if width_multiplier == 0.1 else
                                         8) if width_multiplier > 1.0 else 1280
        self.conv = conv_1x1_bn(input_channel,
                                output_channel,
                                conv=self.ConvLayer)
        self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
        self.classifier = nn.Linear(output_channel, num_classes)

        self._initialize_weights()
コード例 #7
0
def conv_1x1_bn(inp, oup, conv=nn.Conv2d):
    return CustomSequential(conv(inp, oup, 1, 1, 0, bias=False),
                            nn.BatchNorm2d(oup), nn.ReLU6(inplace=True))
コード例 #8
0
def conv_3x3_bn(inp, oup, stride, conv=nn.Conv2d):
    return CustomSequential(conv(inp, oup, 3, stride, 1, bias=False),
                            nn.BatchNorm2d(oup), nn.ReLU6(inplace=True))