Beispiel #1
0
    def __init__(self, num_classes, dim=64):
        super(Discriminator, self).__init__()

        self.conv1 = ConvLayer(in_dim=num_classes, out_dim=dim, kernel_size=5, stride=2,
                               padding=1, activation='lrelu')
        self.conv2 = ConvLayer(in_dim=dim, out_dim=dim * 2, kernel_size=3, stride=1,
                               padding=1, activation='lrelu')
        self.conv3 = ConvLayer(in_dim=dim * 2, out_dim=dim * 4, kernel_size=3, stride=1,
                               padding=1, activation='lrelu')
        self.conv4 = ConvLayer(in_dim=dim * 4, out_dim=dim * 8, kernel_size=3, stride=1,
                               padding=1, activation='lrelu')
        self.classifier = nn.Conv2d(dim * 8, 1, kernel_size=3, stride=1, padding=1)
    def _make_layer(self, block, dim, num_blocks, stride=1, dilation=(1, 1)):
        downsample = None
        if stride != 1 or self.in_dim != dim * block.expansion or dilation[
                0] == 2 or dilation[0] == 4:
            downsample = nn.Sequential(
                ConvLayer(in_dim=self.in_dim,
                          out_dim=dim * block.expansion,
                          kernel_size=1,
                          stride=stride,
                          norm='bn',
                          use_bias=False))
            set_require_grad(downsample._modules['0'].norm, False)
        layers = []
        layers += [
            block(in_dim=self.in_dim,
                  out_dim=dim,
                  stride=stride,
                  downsample=downsample,
                  dilation=dilation)
        ]
        self.in_dim = dim * block.expansion
        for i in range(num_blocks - 1):
            layers += [
                block(in_dim=self.in_dim, out_dim=dim, dilation=dilation)
            ]

        return nn.Sequential(*layers)
    def __init__(self, block, layers):
        super(ResNet, self).__init__()
        self.in_dim = 64
        self.layer1 = []
        planes = [64, 128, 256, 512]

        # stage 1
        self.layer1 += [
            ConvLayer(in_dim=3,
                      out_dim=64,
                      kernel_size=7,
                      stride=2,
                      padding=3,
                      norm='bn',
                      activation='relu',
                      use_bias=False)
        ]
        self.layer1 += [
            nn.MaxPool2d(kernel_size=3, stride=2, padding=1, ceil_mode=True)
        ]
        self.layer1 = nn.Sequential(*self.layer1)

        # stage 2-5
        self.layer2 = self._make_layer(block, planes[0], layers[0])
        self.layer3 = self._make_layer(block, planes[1], layers[1], stride=2)
        self.layer4 = self._make_layer(block,
                                       planes[2],
                                       layers[2],
                                       stride=1,
                                       dilation=(2, 2))
        self.layer5 = self._make_layer(block,
                                       planes[3],
                                       layers[3],
                                       stride=1,
                                       dilation=(4, 4))
Beispiel #4
0
 def _make_conv_layers(self, dim, convs, stride=1, dilation=1):
     layers = []
     for i in range(convs):
         layers += [
             ConvLayer(in_dim=self.in_dim,
                       out_dim=dim,
                       kernel_size=3,
                       stride=stride if i == 0 else 1,
                       padding=dilation,
                       use_bias=False,
                       dilation=dilation,
                       norm='bn',
                       activation='relu')
         ]
         self.in_dim = dim
     return nn.Sequential(*layers)
Beispiel #5
0
    def _make_layers(self,
                     block,
                     dim,
                     num_blocks,
                     stride=1,
                     dilation=1,
                     new_level=True):
        assert dilation == 1 or dilation % 2 == 0
        downsample = None
        if stride != 1 or self.in_dim != dim * block.expansion:
            downsample = nn.Sequential(
                ConvLayer(in_dim=self.in_dim,
                          out_dim=dim * block.expansion,
                          kernel_size=1,
                          stride=stride,
                          norm='bn',
                          use_bias=False))
            # set_require_grad(downsample._modules['0'].norm, False)
        layers = []

        layers += [
            block(in_dim=self.in_dim,
                  out_dim=dim,
                  stride=stride,
                  downsample=downsample,
                  dilation=(1, 1) if dilation == 1 else
                  (dilation // 2 if new_level else dilation, dilation))
        ]

        self.in_dim = dim * block.expansion
        for i in range(num_blocks - 1):
            layers += [
                block(in_dim=self.in_dim,
                      out_dim=dim,
                      dilation=(dilation, dilation))
            ]

        return nn.Sequential(*layers)
Beispiel #6
0
    def __init__(self,
                 block,
                 layers,
                 planes=(16, 32, 64, 128, 256, 512, 512, 512)):
        super(DRN, self).__init__()
        self.in_dim = planes[0]
        self.out_dim = planes[-1]
        self.layer0 = []
        self.layer0 += [
            ConvLayer(in_dim=3,
                      out_dim=planes[0],
                      kernel_size=7,
                      padding=3,
                      use_bias=False,
                      norm='bn',
                      activation='relu')
        ]
        self.layer0 = nn.Sequential(*self.layer0)
        self.layer1 = self._make_conv_layers(planes[0], layers[0], stride=1)
        self.layer2 = self._make_conv_layers(planes[1], layers[1], stride=2)

        self.layer3 = self._make_layers(block, planes[2], layers[2], stride=2)
        self.layer4 = self._make_layers(block, planes[3], layers[3], stride=2)
        self.layer5 = self._make_layers(block,
                                        planes[4],
                                        layers[4],
                                        dilation=2,
                                        new_level=False)
        self.layer6 = self._make_layers(block,
                                        planes[5],
                                        layers[5],
                                        dilation=4,
                                        new_level=False)

        self.layer7 = self._make_conv_layers(planes[6], layers[6], dilation=2)
        self.layer8 = self._make_conv_layers(planes[7], layers[7], dilation=1)
    def __init__(self, output_stride):
        super(AlignedXception, self).__init__()

        if output_stride == 16:
            entry_block3_stride = 2
            middle_block_dilation = 1
            exit_block_dilations = (1, 2)
        elif output_stride == 8:
            entry_block3_stride = 1
            middle_block_dilation = 2
            exit_block_dilations = (2, 4)
        else:
            raise NotImplementedError

        self.entry_flow = []
        self.middle_flow = []
        self.exit_flow = []

        # construct entry flow
        self.entry_flow += [
            ConvLayer(in_dim=3,
                      out_dim=32,
                      kernel_size=3,
                      stride=2,
                      padding=1,
                      use_bias=False,
                      activation='relu',
                      norm='bn')
        ]
        self.entry_flow += [
            ConvLayer(in_dim=32,
                      out_dim=64,
                      kernel_size=3,
                      stride=1,
                      padding=1,
                      use_bias=False,
                      activation='relu',
                      norm='bn')
        ]
        self.entry_flow += [
            XceptionBlock(64, 128, reps=2, stride=2, start_with_relu=False)
        ]
        self.entry_flow += [nn.ReLU(inplace=True)]
        self.entry_flow += [
            XceptionBlock(128,
                          256,
                          reps=2,
                          stride=2,
                          start_with_relu=False,
                          grow_first=True)
        ]
        self.entry_flow += [
            XceptionBlock(256,
                          728,
                          reps=2,
                          stride=entry_block3_stride,
                          start_with_relu=True,
                          grow_first=True,
                          is_last=True)
        ]

        # construct middle flow
        for i in range(16):
            self.middle_flow += [
                XceptionBlock(728,
                              728,
                              reps=3,
                              stride=1,
                              dilation=middle_block_dilation,
                              start_with_relu=True,
                              grow_first=True)
            ]

        # construct exit flow
        self.exit_flow += [
            XceptionBlock(728,
                          1024,
                          reps=2,
                          stride=1,
                          dilation=exit_block_dilations[0],
                          start_with_relu=True,
                          grow_first=False,
                          is_last=True)
        ]
        self.exit_flow += [nn.ReLU(inplace=True)]
        self.exit_flow += [
            ConvLayer(1024,
                      1024,
                      kernel_size=3,
                      dilation=exit_block_dilations[1],
                      norm='bn',
                      groups=1024,
                      use_bias=False,
                      fixed_padding=True)
        ]
        self.exit_flow += [
            ConvLayer(1024,
                      1536,
                      kernel_size=1,
                      use_bias=False,
                      norm='bn',
                      activation='relu')
        ]
        self.exit_flow += [
            ConvLayer(1536,
                      1536,
                      kernel_size=3,
                      dilation=exit_block_dilations[1],
                      norm='bn',
                      groups=1536,
                      use_bias=False,
                      fixed_padding=True)
        ]
        self.exit_flow += [
            ConvLayer(1536,
                      1536,
                      kernel_size=1,
                      use_bias=False,
                      norm='bn',
                      activation='relu')
        ]
        self.exit_flow += [
            ConvLayer(1536,
                      1536,
                      kernel_size=3,
                      dilation=exit_block_dilations[1],
                      norm='bn',
                      groups=1536,
                      use_bias=False,
                      fixed_padding=True)
        ]
        self.exit_flow += [
            ConvLayer(1536,
                      2048,
                      kernel_size=1,
                      use_bias=False,
                      norm='bn',
                      activation='relu')
        ]

        self.entry_flow = nn.Sequential(*self.entry_flow)
        self.middle_flow = nn.Sequential(*self.middle_flow)
        self.exit_flow = nn.Sequential(*self.exit_flow)
Beispiel #8
0
    def __init__(self,
                 stage_repeat_nums,
                 stage_out_dims,
                 num_classes,
                 block=shuffle_unit):
        super(ShuffleNetV2, self).__init__()

        assert len(
            stage_repeat_nums
        ) == 3, "the length of stage_repeat_nums is expected to be 3"
        assert len(stage_out_dims
                   ) == 5, "the length of stage_out_dims is expected to be 5"

        in_dim = 3
        out_dim = stage_out_dims[0]
        self.layer1 = []
        self.layer2 = []
        self.layer3 = []
        self.layer4 = []
        self.layer5 = []

        self.layer1 += [
            ConvLayer(in_dim=in_dim,
                      out_dim=out_dim,
                      kernel_size=3,
                      stride=2,
                      padding=1,
                      use_bias=False,
                      norm='bn',
                      activation='relu')
        ]
        in_dim = out_dim
        self.max_pool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        # construct stage 2-4
        stage_names = [2, 3, 4]
        for name, repeat_num, stage_dim in zip(stage_names, stage_repeat_nums,
                                               stage_out_dims[1:]):
            cur = []
            cur += [block(in_dim, stage_dim, 2)]
            for i in range(repeat_num - 1):
                cur += [block(stage_dim, stage_dim, 1)]
            in_dim = stage_dim

            if name == 2:
                self.layer2 = cur
            elif name == 3:
                self.layer3 = cur
            elif name == 4:
                self.layer4 = cur

        out_dim = stage_out_dims[-1]
        self.layer5 += [
            ConvLayer(in_dim=in_dim,
                      out_dim=out_dim,
                      kernel_size=1,
                      stride=1,
                      padding=0,
                      use_bias=False,
                      norm='bn',
                      activation='relu')
        ]

        self.layer1 = nn.Sequential(*self.layer1)
        self.layer2 = nn.Sequential(*self.layer2)
        self.layer3 = nn.Sequential(*self.layer3)
        self.layer4 = nn.Sequential(*self.layer4)
        self.layer5 = nn.Sequential(*self.layer5)
        self.classifier = nn.Linear(out_dim, num_classes)
 def __init__(self):
     super(VGG, self).__init__()
     self.vgg = []
     self.vgg += [
         ConvLayer(in_dim=3,
                   out_dim=64,
                   kernel_size=3,
                   padding=1,
                   activation='relu',
                   norm='bn')
     ]
     self.vgg += [
         ConvLayer(in_dim=64,
                   out_dim=64,
                   kernel_size=3,
                   padding=1,
                   activation='relu',
                   norm='bn')
     ]
     self.vgg += [nn.MaxPool2d(kernel_size=2, stride=2)]
     self.vgg += [
         ConvLayer(in_dim=64,
                   out_dim=128,
                   kernel_size=3,
                   padding=1,
                   activation='relu',
                   norm='bn')
     ]
     self.vgg += [
         ConvLayer(in_dim=128,
                   out_dim=128,
                   kernel_size=3,
                   padding=1,
                   activation='relu',
                   norm='bn')
     ]
     self.vgg += [nn.MaxPool2d(kernel_size=2, stride=2)]
     self.vgg += [
         ConvLayer(in_dim=128,
                   out_dim=256,
                   kernel_size=3,
                   padding=1,
                   activation='relu',
                   norm='bn')
     ]
     for i in range(2):
         self.vgg += [
             ConvLayer(in_dim=256,
                       out_dim=256,
                       kernel_size=3,
                       padding=1,
                       activation='relu',
                       norm='bn')
         ]
     self.vgg += [nn.MaxPool2d(kernel_size=2, stride=2)]
     self.vgg += [
         ConvLayer(in_dim=256,
                   out_dim=512,
                   kernel_size=3,
                   padding=1,
                   activation='relu',
                   norm='bn')
     ]
     for i in range(7):
         if i == 2 or i == 6:
             self.vgg += [nn.MaxPool2d(kernel_size=2, stride=2)]
         else:
             self.vgg += [
                 ConvLayer(in_dim=512,
                           out_dim=512,
                           kernel_size=3,
                           padding=1,
                           activation='relu',
                           norm='bn')
             ]
     self.vgg = nn.Sequential(*self.vgg)
Beispiel #10
0
    def __init__(self, block=None, width_mult=1., output_stride=8):
        super(MobileNetV2, self).__init__()
        self.block = block
        in_dim = 32
        cur_stride = 1
        rate = 1
        self.mobilenet = []

        interverted_residual_setting = [
            # t, c, n, s
            # t:expansion ratio,c:output channel
            # n:repeat times,s:stride
            [1, 16, 1, 1],
            [6, 24, 2, 2],
            [6, 32, 3, 2],
            [6, 64, 4, 2],
            [6, 96, 3, 1],
            [6, 160, 3, 2],
            [6, 320, 1, 1],
        ]
        in_dim = int(in_dim * width_mult)

        # build the first layer
        self.mobilenet += [
            ConvLayer(in_dim=3,
                      out_dim=in_dim,
                      kernel_size=3,
                      stride=2,
                      norm='bn',
                      padding=1,
                      use_bias=False,
                      activation='relu')
        ]
        cur_stride *= 2

        for t, c, n, s in interverted_residual_setting:
            if cur_stride == output_stride:
                stride = 1
                dilation = rate
                rate *= s
            else:
                stride = s
                dilation = 1
                cur_stride *= s
            out_dim = int(c * width_mult)
            for i in range(n):
                if i == 0:
                    self.mobilenet += [
                        self.block(in_dim=in_dim,
                                   out_dim=out_dim,
                                   stride=stride,
                                   dilation=dilation,
                                   expand_ratio=t)
                    ]
                else:
                    self.mobilenet += [
                        self.block(in_dim=in_dim,
                                   out_dim=out_dim,
                                   stride=1,
                                   dilation=dilation,
                                   expand_ratio=t)
                    ]
                in_dim = out_dim

        self.mobilenet = nn.Sequential(*self.mobilenet)
        self.low_level_feature = self.mobilenet[0:4]
        self.high_level_feature = self.mobilenet[4:]