Exemplo n.º 1
0
 def __init__(self, channels):
     super(ResidualBlock, self).__init__()
     self.conv1 = nn.Conv2d(channels, channels, kernel_size=3, padding=1)
     self.bn1 = nn.BatchNorm2d(channels)
     self.prelu = nn.PReLU()
     self.conv2 = nn.Conv2d(channels, channels, kernel_size=3, padding=1)
     self.bn2 = nn.BatchNorm2d(channels)
Exemplo n.º 2
0
 def __init__(
     self,
     inplanes,
     planes,
     stride=1,
     downsample=None,
     groups=1,
     base_width=64,
     dilation=1,
 ):
     super(IBasicBlock, self).__init__()
     if groups != 1 or base_width != 64:
         raise ValueError(
             "BasicBlock only supports groups=1 and base_width=64")
     if dilation > 1:
         raise NotImplementedError(
             "Dilation > 1 not supported in BasicBlock")
     self.bn1 = nn.BatchNorm2d(
         inplanes,
         eps=1e-05,
     )
     self.conv1 = conv3x3(inplanes, planes)
     self.bn2 = nn.BatchNorm2d(
         planes,
         eps=1e-05,
     )
     self.prelu = nn.ReLU(planes)
     self.conv2 = conv3x3(planes, planes, stride)
     self.bn3 = nn.BatchNorm2d(
         planes,
         eps=1e-05,
     )
     self.downsample = downsample
     self.stride = stride
Exemplo n.º 3
0
    def __init__(self, in_planes, out_planes, stride, groups):
        super(Bottleneck, self).__init__()
        self.stride = stride

        mid_planes = out_planes // 4
        g = 1 if in_planes == 24 else groups
        self.conv1 = nn.Conv2d(in_planes,
                               mid_planes,
                               kernel_size=1,
                               groups=g,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(mid_planes)
        self.shuffle1 = ShuffleBlock(groups=g)
        self.conv2 = nn.Conv2d(mid_planes,
                               mid_planes,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               groups=mid_planes,
                               bias=False)
        self.bn2 = nn.BatchNorm2d(mid_planes)
        self.conv3 = nn.Conv2d(mid_planes,
                               out_planes,
                               kernel_size=1,
                               groups=groups,
                               bias=False)
        self.bn3 = nn.BatchNorm2d(out_planes)

        self.shortcut = nn.Sequential()
        if stride == 2:
            self.shortcut = nn.Sequential(nn.AvgPool2d(3, stride=2, padding=1))
Exemplo n.º 4
0
    def __init__(self, w_in, w_out, stride, group_width, bottleneck_ratio,
                 se_ratio):
        super(Block, self).__init__()
        # 1x1
        w_b = int(round(w_out * bottleneck_ratio))
        self.conv1 = nn.Conv2d(w_in, w_b, kernel_size=1, bias=False)
        self.bn1 = nn.BatchNorm2d(w_b)
        # 3x3
        num_groups = w_b // group_width
        self.conv2 = nn.Conv2d(w_b,
                               w_b,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               groups=num_groups,
                               bias=False)
        self.bn2 = nn.BatchNorm2d(w_b)
        # se
        self.with_se = se_ratio > 0
        if self.with_se:
            w_se = int(round(w_in * se_ratio))
            self.se = SE(w_b, w_se)
        # 1x1
        self.conv3 = nn.Conv2d(w_b, w_out, kernel_size=1, bias=False)
        self.bn3 = nn.BatchNorm2d(w_out)

        self.shortcut = nn.Sequential()
        if stride != 1 or w_in != w_out:
            self.shortcut = nn.Sequential(
                nn.Conv2d(w_in,
                          w_out,
                          kernel_size=1,
                          stride=stride,
                          bias=False), nn.BatchNorm2d(w_out))
Exemplo n.º 5
0
 def __init__(self, in_planes, out_planes, stride=1):
     super(CellB, self).__init__()
     self.stride = stride
     # Left branch
     self.sep_conv1 = SepConv(in_planes,
                              out_planes,
                              kernel_size=7,
                              stride=stride)
     self.sep_conv2 = SepConv(in_planes,
                              out_planes,
                              kernel_size=3,
                              stride=stride)
     # Right branch
     self.sep_conv3 = SepConv(in_planes,
                              out_planes,
                              kernel_size=5,
                              stride=stride)
     if stride == 2:
         self.conv1 = nn.Conv2d(in_planes,
                                out_planes,
                                kernel_size=1,
                                stride=1,
                                padding=0,
                                bias=False)
         self.bn1 = nn.BatchNorm2d(out_planes)
     # Reduce channels
     self.conv2 = nn.Conv2d(2 * out_planes,
                            out_planes,
                            kernel_size=1,
                            stride=1,
                            padding=0,
                            bias=False)
     self.bn2 = nn.BatchNorm2d(out_planes)
Exemplo n.º 6
0
 def __init__(self,
              in_ch,
              out_ch,
              kernel_size,
              stride,
              expansion_factor,
              bn_momentum=0.1):
     super(_InvertedResidual, self).__init__()
     assert stride in [1, 2]
     assert kernel_size in [3, 5]
     mid_ch = in_ch * expansion_factor
     self.apply_resudual = in_ch == out_ch and stride == 1
     self.layers = nn.Sequential(
         # Pointwise
         nn.Conv2d(in_ch, mid_ch, 1, bias=False),
         nn.BatchNorm2d(mid_ch, momentum=bn_momentum),
         nn.ReLU(inplace=True),
         # Depthwise
         nn.Conv2d(
             mid_ch,
             mid_ch,
             kernel_size,
             padding=kernel_size // 2,
             stride=stride,
             groups=mid_ch,
             bias=False,
         ),
         nn.BatchNorm2d(mid_ch, momentum=bn_momentum),
         nn.ReLU(inplace=True),
         # Linear pointwise, Note that there's no activation
         nn.Conv2d(mid_ch, out_ch, 1, bias=False),
         nn.BatchNorm2d(out_ch, momentum=bn_momentum),
     )
Exemplo n.º 7
0
    def __init__(self, inp: int, oup: int, stride: int) -> None:
        super().__init__()

        if not (1 <= stride <= 3):
            raise ValueError("illegal stride value")
        self.stride = stride

        branch_features = oup // 2
        assert (self.stride != 1) or (inp == branch_features << 1)

        if self.stride > 1:
            self.branch1 = nn.Sequential(
                self.depthwise_conv(inp,
                                    inp,
                                    kernel_size=3,
                                    stride=self.stride,
                                    padding=1),
                nn.BatchNorm2d(inp),
                nn.Conv2d(inp,
                          branch_features,
                          kernel_size=1,
                          stride=1,
                          padding=0,
                          bias=False),
                nn.BatchNorm2d(branch_features),
                nn.ReLU(inplace=True),
            )
        else:
            self.branch1 = nn.Sequential()

        self.branch2 = nn.Sequential(
            nn.Conv2d(
                inp if (self.stride > 1) else branch_features,
                branch_features,
                kernel_size=1,
                stride=1,
                padding=0,
                bias=False,
            ),
            nn.BatchNorm2d(branch_features),
            nn.ReLU(inplace=True),
            self.depthwise_conv(
                branch_features,
                branch_features,
                kernel_size=3,
                stride=self.stride,
                padding=1,
            ),
            nn.BatchNorm2d(branch_features),
            nn.Conv2d(
                branch_features,
                branch_features,
                kernel_size=1,
                stride=1,
                padding=0,
                bias=False,
            ),
            nn.BatchNorm2d(branch_features),
            nn.ReLU(inplace=True),
        )
Exemplo n.º 8
0
    def __init__(self, block=BasicBlock, num_classes=10):
        super(DLA, self).__init__()
        self.base = nn.Sequential(
            nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1, bias=False),
            nn.BatchNorm2d(16),
            nn.ReLU(True)
        )

        self.layer1 = nn.Sequential(
            nn.Conv2d(16, 16, kernel_size=3, stride=1, padding=1, bias=False),
            nn.BatchNorm2d(16),
            nn.ReLU(True)
        )

        self.layer2 = nn.Sequential(
            nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1, bias=False),
            nn.BatchNorm2d(32),
            nn.ReLU(True)
        )

        self.layer3 = Tree(block,  32,  64, level=1, stride=1)
        self.layer4 = Tree(block,  64, 128, level=2, stride=2)
        self.layer5 = Tree(block, 128, 256, level=2, stride=2)
        self.layer6 = Tree(block, 256, 512, level=1, stride=2)
        self.linear = nn.Linear(512, num_classes)
Exemplo n.º 9
0
    def __init__(self, in_planes, planes, stride=1):
        super(PreActBlock, self).__init__()
        self.bn1 = nn.BatchNorm2d(in_planes)
        self.conv1 = nn.Conv2d(in_planes,
                               planes,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               bias=False)
        self.bn2 = nn.BatchNorm2d(planes)
        self.conv2 = nn.Conv2d(planes,
                               planes,
                               kernel_size=3,
                               stride=1,
                               padding=1,
                               bias=False)

        if stride != 1 or in_planes != planes:
            self.shortcut = nn.Sequential(
                nn.Conv2d(in_planes,
                          planes,
                          kernel_size=1,
                          stride=stride,
                          bias=False))

        # SE layers
        self.fc1 = nn.Conv2d(planes, planes // 16, kernel_size=1)
        self.fc2 = nn.Conv2d(planes // 16, planes, kernel_size=1)
Exemplo n.º 10
0
    def __init__(self, last_planes, in_planes, out_planes, dense_depth, stride,
                 first_layer):
        super(Bottleneck, self).__init__()
        self.out_planes = out_planes
        self.dense_depth = dense_depth

        self.conv1 = nn.Conv2d(last_planes,
                               in_planes,
                               kernel_size=1,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(in_planes)
        self.conv2 = nn.Conv2d(in_planes,
                               in_planes,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               groups=32,
                               bias=False)
        self.bn2 = nn.BatchNorm2d(in_planes)
        self.conv3 = nn.Conv2d(in_planes,
                               out_planes + dense_depth,
                               kernel_size=1,
                               bias=False)
        self.bn3 = nn.BatchNorm2d(out_planes + dense_depth)

        self.shortcut = nn.Sequential()
        if first_layer:
            self.shortcut = nn.Sequential(
                nn.Conv2d(last_planes,
                          out_planes + dense_depth,
                          kernel_size=1,
                          stride=stride,
                          bias=False),
                nn.BatchNorm2d(out_planes + dense_depth))
Exemplo n.º 11
0
    def __init__(self, in_planes, planes, stride=1):
        super(BasicBlock, self).__init__()
        self.conv1 = nn.Conv2d(in_planes,
                               planes,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(planes)
        self.conv2 = nn.Conv2d(planes,
                               planes,
                               kernel_size=3,
                               stride=1,
                               padding=1,
                               bias=False)
        self.bn2 = nn.BatchNorm2d(planes)

        self.shortcut = nn.Sequential()
        if stride != 1 or in_planes != self.expansion * planes:
            self.shortcut = nn.Sequential(
                nn.Conv2d(in_planes,
                          self.expansion * planes,
                          kernel_size=1,
                          stride=stride,
                          bias=False), nn.BatchNorm2d(self.expansion * planes))
Exemplo n.º 12
0
    def __init__(self,
                 in_planes,
                 cardinality=32,
                 bottleneck_width=4,
                 stride=1):
        super(Block, self).__init__()
        group_width = cardinality * bottleneck_width
        self.conv1 = nn.Conv2d(in_planes,
                               group_width,
                               kernel_size=1,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(group_width)
        self.conv2 = nn.Conv2d(group_width,
                               group_width,
                               kernel_size=3,
                               stride=stride,
                               padding=1,
                               groups=cardinality,
                               bias=False)
        self.bn2 = nn.BatchNorm2d(group_width)
        self.conv3 = nn.Conv2d(group_width,
                               self.expansion * group_width,
                               kernel_size=1,
                               bias=False)
        self.bn3 = nn.BatchNorm2d(self.expansion * group_width)

        self.shortcut = nn.Sequential()
        if stride != 1 or in_planes != self.expansion * group_width:
            self.shortcut = nn.Sequential(
                nn.Conv2d(in_planes,
                          self.expansion * group_width,
                          kernel_size=1,
                          stride=stride,
                          bias=False),
                nn.BatchNorm2d(self.expansion * group_width))
Exemplo n.º 13
0
    def __init__(
        self,
        in_chs,
        mid_chs,
        out_chs,
        dw_kernel_size=3,
        stride=1,
        act_layer=nn.ReLU,
        se_ratio=0.0,
    ):
        super(GhostBottleneck, self).__init__()
        has_se = se_ratio is not None and se_ratio > 0.0
        self.stride = stride

        # Point-wise expansion
        self.ghost1 = GhostModule(in_chs, mid_chs, relu=True)

        # Depth-wise convolution
        if self.stride > 1:
            self.conv_dw = nn.Conv2d(
                mid_chs,
                mid_chs,
                dw_kernel_size,
                stride=stride,
                padding=(dw_kernel_size - 1) // 2,
                groups=mid_chs,
                bias=False,
            )
            self.bn_dw = nn.BatchNorm2d(mid_chs)

        # Squeeze-and-excitation
        if has_se:
            self.se = SqueezeExcite(mid_chs, se_ratio=se_ratio)
        else:
            self.se = None

        # Point-wise linear projection
        self.ghost2 = GhostModule(mid_chs, out_chs, relu=False)

        # shortcut
        if in_chs == out_chs and self.stride == 1:
            self.shortcut = nn.Sequential()
        else:
            self.shortcut = nn.Sequential(
                nn.Conv2d(
                    in_chs,
                    in_chs,
                    dw_kernel_size,
                    stride=stride,
                    padding=(dw_kernel_size - 1) // 2,
                    groups=in_chs,
                    bias=False,
                ),
                nn.BatchNorm2d(in_chs),
                nn.Conv2d(in_chs, out_chs, 1, stride=1, padding=0, bias=False),
                nn.BatchNorm2d(out_chs),
            )
Exemplo n.º 14
0
 def __init__(self, in_channels, out_channels):
     super().__init__()
     self.double_conv = nn.Sequential(
         nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
         nn.BatchNorm2d(out_channels),
         nn.ReLU(inplace=True),
         nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
         nn.BatchNorm2d(out_channels),
         nn.ReLU(inplace=True),
     )
Exemplo n.º 15
0
 def __init__(self, in_planes, growth_rate):
     super(Bottleneck, self).__init__()
     self.bn1 = nn.BatchNorm2d(in_planes)
     self.conv1 = nn.Conv2d(in_planes,
                            4 * growth_rate,
                            kernel_size=1,
                            bias=False)
     self.bn2 = nn.BatchNorm2d(4 * growth_rate)
     self.conv2 = nn.Conv2d(4 * growth_rate,
                            growth_rate,
                            kernel_size=3,
                            padding=1,
                            bias=False)
Exemplo n.º 16
0
    def __init__(
        self,
        stages_repeats: List[int],
        stages_out_channels: List[int],
        num_classes: int = 1000,
        inverted_residual: Callable[..., nn.Module] = InvertedResidual,
    ) -> None:
        super().__init__()

        if len(stages_repeats) != 3:
            raise ValueError(
                "expected stages_repeats as list of 3 positive ints")
        if len(stages_out_channels) != 5:
            raise ValueError(
                "expected stages_out_channels as list of 5 positive ints")
        self._stage_out_channels = stages_out_channels

        input_channels = 3
        output_channels = self._stage_out_channels[0]
        self.conv1 = nn.Sequential(
            nn.Conv2d(input_channels, output_channels, 3, 2, 1, bias=False),
            nn.BatchNorm2d(output_channels),
            nn.ReLU(inplace=True),
        )
        input_channels = output_channels

        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        # Static annotations for mypy
        self.stage2: nn.Sequential
        self.stage3: nn.Sequential
        self.stage4: nn.Sequential
        stage_names = ["stage{}".format(i) for i in [2, 3, 4]]
        for name, repeats, output_channels in zip(
                stage_names, stages_repeats, self._stage_out_channels[1:]):
            seq = [inverted_residual(input_channels, output_channels, 2)]
            for i in range(repeats - 1):
                seq.append(
                    inverted_residual(output_channels, output_channels, 1))
            setattr(self, name, nn.Sequential(*seq))
            input_channels = output_channels

        output_channels = self._stage_out_channels[-1]
        self.conv5 = nn.Sequential(
            nn.Conv2d(input_channels, output_channels, 1, 1, 0, bias=False),
            nn.BatchNorm2d(output_channels),
            nn.ReLU(inplace=True),
        )

        self.fc = nn.Linear(output_channels, num_classes)
Exemplo n.º 17
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 stride,
                 expand_ratio=1,
                 se_ratio=0.,
                 drop_rate=0.):
        super(Block, self).__init__()
        self.stride = stride
        self.drop_rate = drop_rate
        self.expand_ratio = expand_ratio

        # Expansion
        channels = expand_ratio * in_channels
        self.conv1 = nn.Conv2d(in_channels,
                               channels,
                               kernel_size=1,
                               stride=1,
                               padding=0,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(channels)

        # Depthwise conv
        self.conv2 = nn.Conv2d(channels,
                               channels,
                               kernel_size=kernel_size,
                               stride=stride,
                               padding=(1 if kernel_size == 3 else 2),
                               groups=channels,
                               bias=False)
        self.bn2 = nn.BatchNorm2d(channels)

        # SE layers
        se_channels = int(in_channels * se_ratio)
        self.se = SE(channels, se_channels)

        # Output
        self.conv3 = nn.Conv2d(channels,
                               out_channels,
                               kernel_size=1,
                               stride=1,
                               padding=0,
                               bias=False)
        self.bn3 = nn.BatchNorm2d(out_channels)

        # Skip connection if in and out shapes are the same (MV-V2 style)
        self.has_skip = (stride == 1) and (in_channels == out_channels)
Exemplo n.º 18
0
    def __init__(self, cfg):
        super(DPN, self).__init__()
        in_planes, out_planes = cfg['in_planes'], cfg['out_planes']
        num_blocks, dense_depth = cfg['num_blocks'], cfg['dense_depth']

        self.conv1 = nn.Conv2d(3,
                               64,
                               kernel_size=3,
                               stride=1,
                               padding=1,
                               bias=False)
        self.bn1 = nn.BatchNorm2d(64)
        self.last_planes = 64
        self.layer1 = self._make_layer(in_planes[0],
                                       out_planes[0],
                                       num_blocks[0],
                                       dense_depth[0],
                                       stride=1)
        self.layer2 = self._make_layer(in_planes[1],
                                       out_planes[1],
                                       num_blocks[1],
                                       dense_depth[1],
                                       stride=2)
        self.layer3 = self._make_layer(in_planes[2],
                                       out_planes[2],
                                       num_blocks[2],
                                       dense_depth[2],
                                       stride=2)
        self.layer4 = self._make_layer(in_planes[3],
                                       out_planes[3],
                                       num_blocks[3],
                                       dense_depth[3],
                                       stride=2)
        self.linear = nn.Linear(
            out_planes[3] + (num_blocks[3] + 1) * dense_depth[3], 10)
Exemplo n.º 19
0
 def __init__(self, in_planes, out_planes, stride=1):
     super(Block, self).__init__()
     self.conv1 = nn.Conv2d(in_planes,
                            in_planes,
                            kernel_size=3,
                            stride=stride,
                            padding=1,
                            groups=in_planes,
                            bias=False)
     self.bn1 = nn.BatchNorm2d(in_planes)
     self.conv2 = nn.Conv2d(in_planes,
                            out_planes,
                            kernel_size=1,
                            stride=1,
                            padding=0,
                            bias=False)
     self.bn2 = nn.BatchNorm2d(out_planes)
Exemplo n.º 20
0
 def __init__(self):
     super(Discriminator, self).__init__()
     self.net = nn.Sequential(
         nn.Conv2d(3, 64, kernel_size=3, padding=1),
         nn.LeakyReLU(0.2),
         nn.Conv2d(64, 64, kernel_size=3, stride=2, padding=1),
         nn.BatchNorm2d(64),
         nn.LeakyReLU(0.2),
         nn.Conv2d(64, 128, kernel_size=3, padding=1),
         nn.BatchNorm2d(128),
         nn.LeakyReLU(0.2),
         nn.Conv2d(128, 128, kernel_size=3, stride=2, padding=1),
         nn.BatchNorm2d(128),
         nn.LeakyReLU(0.2),
         nn.Conv2d(128, 256, kernel_size=3, padding=1),
         nn.BatchNorm2d(256),
         nn.LeakyReLU(0.2),
         nn.Conv2d(256, 256, kernel_size=3, stride=2, padding=1),
         nn.BatchNorm2d(256),
         nn.LeakyReLU(0.2),
         nn.Conv2d(256, 512, kernel_size=3, padding=1),
         nn.BatchNorm2d(512),
         nn.LeakyReLU(0.2),
         nn.Conv2d(512, 512, kernel_size=3, stride=2, padding=1),
         nn.BatchNorm2d(512),
         nn.LeakyReLU(0.2),
         nn.AdaptiveAvgPool2d(1),
         nn.Conv2d(512, 1024, kernel_size=1),
         nn.LeakyReLU(0.2),
         nn.Conv2d(1024, 1, kernel_size=1),
     )
Exemplo n.º 21
0
 def __init__(self, in_channels, out_channels, kernel_size=1):
     super(Root, self).__init__()
     self.conv = nn.Conv2d(in_channels,
                           out_channels,
                           kernel_size,
                           stride=1,
                           padding=(kernel_size - 1) // 2,
                           bias=False)
     self.bn = nn.BatchNorm2d(out_channels)
Exemplo n.º 22
0
 def __init__(self, in_planes, out_planes, kernel_size, stride):
     super(SepConv, self).__init__()
     self.conv1 = nn.Conv2d(in_planes,
                            out_planes,
                            kernel_size,
                            stride,
                            padding=(kernel_size - 1) // 2,
                            bias=False,
                            groups=in_planes)
     self.bn1 = nn.BatchNorm2d(out_planes)
Exemplo n.º 23
0
 def __init__(self, num_classes=10):
     super(MobileNetV2, self).__init__()
     # NOTE: change conv1 stride 2 -> 1 for CIFAR10
     self.conv1 = nn.Conv2d(3,
                            32,
                            kernel_size=3,
                            stride=1,
                            padding=1,
                            bias=False)
     self.bn1 = nn.BatchNorm2d(32)
     self.layers = self._make_layers(in_planes=32)
     self.conv2 = nn.Conv2d(320,
                            1280,
                            kernel_size=1,
                            stride=1,
                            padding=0,
                            bias=False)
     self.bn2 = nn.BatchNorm2d(1280)
     self.linear = nn.Linear(1280, num_classes)
Exemplo n.º 24
0
 def __init__(self, alpha, num_classes=1000, dropout=0.2):
     super(MNASNet, self).__init__()
     assert alpha > 0.0
     self.alpha = alpha
     self.num_classes = num_classes
     depths = _get_depths(alpha)
     layers = [
         # First layer: regular conv.
         nn.Conv2d(3, depths[0], 3, padding=1, stride=2, bias=False),
         nn.BatchNorm2d(depths[0], momentum=_BN_MOMENTUM),
         nn.ReLU(inplace=True),
         # Depthwise separable, no skip.
         nn.Conv2d(
             depths[0],
             depths[0],
             3,
             padding=1,
             stride=1,
             groups=depths[0],
             bias=False,
         ),
         nn.BatchNorm2d(depths[0], momentum=_BN_MOMENTUM),
         nn.ReLU(inplace=True),
         nn.Conv2d(depths[0], depths[1], 1, padding=0, stride=1,
                   bias=False),
         nn.BatchNorm2d(depths[1], momentum=_BN_MOMENTUM),
         # MNASNet blocks: stacks of inverted residuals.
         _stack(depths[1], depths[2], 3, 2, 3, 3, _BN_MOMENTUM),
         _stack(depths[2], depths[3], 5, 2, 3, 3, _BN_MOMENTUM),
         _stack(depths[3], depths[4], 5, 2, 6, 3, _BN_MOMENTUM),
         _stack(depths[4], depths[5], 3, 1, 6, 2, _BN_MOMENTUM),
         _stack(depths[5], depths[6], 5, 2, 6, 4, _BN_MOMENTUM),
         _stack(depths[6], depths[7], 3, 1, 6, 1, _BN_MOMENTUM),
         # Final mapping to classifier input.
         nn.Conv2d(depths[7], 1280, 1, padding=0, stride=1, bias=False),
         nn.BatchNorm2d(1280, momentum=_BN_MOMENTUM),
         nn.ReLU(inplace=True),
     ]
     self.layers = nn.Sequential(*layers)
     self.classifier = nn.Sequential(nn.Dropout(p=dropout, inplace=True),
                                     nn.Linear(1280, num_classes))
     self._initialize_weights()
Exemplo n.º 25
0
 def __init__(self, num_classes=10):
     super(MobileNet, self).__init__()
     self.conv1 = nn.Conv2d(3,
                            32,
                            kernel_size=3,
                            stride=1,
                            padding=1,
                            bias=False)
     self.bn1 = nn.BatchNorm2d(32)
     self.layers = self._make_layers(in_planes=32)
     self.linear = nn.Linear(1024, num_classes)
Exemplo n.º 26
0
 def __init__(self, cfg, num_classes=10):
     super(EfficientNet, self).__init__()
     self.cfg = cfg
     self.conv1 = nn.Conv2d(3,
                            32,
                            kernel_size=3,
                            stride=1,
                            padding=1,
                            bias=False)
     self.bn1 = nn.BatchNorm2d(32)
     self.layers = self._make_layers(in_channels=32)
     self.linear = nn.Linear(cfg['out_channels'][-1], num_classes)
Exemplo n.º 27
0
    def __init__(self, in_channels, n_filters) -> None:
        super().__init__()

        self.relu = nn.ReLU(inplace=True)

        self.conv1 = nn.Conv2d(in_channels, in_channels // 4, 1)
        self.norm1 = nn.BatchNorm2d(in_channels // 4)

        self.deconv2 = nn.ConvTranspose2d(
            in_channels // 4,
            in_channels // 4,
            kernel_size=4,
            stride=2,
            padding=1,
            output_padding=0,
        )

        self.norm2 = nn.BatchNorm2d(in_channels // 4)

        self.conv3 = nn.Conv2d(in_channels // 4, n_filters, 1)
        self.norm3 = nn.BatchNorm2d(n_filters)
Exemplo n.º 28
0
 def __init__(
     self,
     num_input_features: int,
     growth_rate: int,
     bn_size: int,
     drop_rate: float,
 ) -> None:
     super(_DenseLayer, self).__init__()
     self.norm1: nn.BatchNorm2d
     self.add_module("norm1", nn.BatchNorm2d(num_input_features))
     self.relu1: nn.ReLU
     self.add_module("relu1", nn.ReLU(inplace=True))
     self.conv1: nn.Conv2d
     self.add_module(
         "conv1",
         nn.Conv2d(
             num_input_features,
             bn_size * growth_rate,
             kernel_size=1,
             stride=1,
             bias=False,
         ),
     )
     self.norm2: nn.BatchNorm2d
     self.add_module("norm2", nn.BatchNorm2d(bn_size * growth_rate))
     self.relu2: nn.ReLU
     self.add_module("relu2", nn.ReLU(inplace=True))
     self.conv2: nn.Conv2d
     self.add_module(
         "conv2",
         nn.Conv2d(
             bn_size * growth_rate,
             growth_rate,
             kernel_size=3,
             stride=1,
             padding=1,
             bias=False,
         ),
     )
     self.drop_rate = float(drop_rate)
Exemplo n.º 29
0
    def __init__(
        self,
        input_size,
        in_channel,
        out_channel,
        kernel_size,
        stride,
        dropout=0.1,
        batch_norm=False,
        residual=False,
        act_func_type="relu",
    ):
        super(Conv2dLayer, self).__init__()

        self.input_size = input_size
        self.in_channel = in_channel
        self.out_channel = out_channel

        self.batch_norm = batch_norm
        self.kernel_size = kernel_size
        self.stride = stride
        self.padding = (
            0,
            kernel_size //
            2 if isinstance(self.kernel_size, int) else kernel_size[1] // 2,
        )

        self.residual = residual

        self.act_func_type = act_func_type

        self.conv_layer = nn.Conv2d(
            in_channels=in_channel,
            out_channels=out_channel,
            kernel_size=self.kernel_size,
            stride=self.stride,
            padding=self.padding,
        )

        self.output_size = cal_width_dim_2d(
            input_size,
            self.kernel_size
            if isinstance(self.kernel_size, int) else self.kernel_size[1],
            self.stride if isinstance(self.stride, int) else self.stride[1],
            padding=self.padding
            if isinstance(self.padding, int) else self.padding[1],
        )

        if self.batch_norm:
            self.norm = nn.BatchNorm2d(out_channel)

        self.dropout = nn.Dropout(dropout)
Exemplo n.º 30
0
    def __init__(self, cfgs, num_classes=1000, width=1.0, dropout=0.2):
        super(GhostNet, self).__init__()
        # setting of inverted residual blocks
        self.cfgs = cfgs
        self.dropout = dropout

        # building first layer
        output_channel = _make_divisible(16 * width, 4)
        self.conv_stem = nn.Conv2d(3, output_channel, 3, 2, 1, bias=False)
        self.bn1 = nn.BatchNorm2d(output_channel)
        self.act1 = nn.ReLU(inplace=True)
        input_channel = output_channel

        # building inverted residual blocks
        stages = []
        block = GhostBottleneck
        for cfg in self.cfgs:
            layers = []
            for k, exp_size, c, se_ratio, s in cfg:
                output_channel = _make_divisible(c * width, 4)
                hidden_channel = _make_divisible(exp_size * width, 4)
                layers.append(
                    block(
                        input_channel,
                        hidden_channel,
                        output_channel,
                        k,
                        s,
                        se_ratio=se_ratio,
                    ))
                input_channel = output_channel
            stages.append(nn.Sequential(*layers))

        output_channel = _make_divisible(exp_size * width, 4)
        stages.append(
            nn.Sequential(ConvBnAct(input_channel, output_channel, 1)))
        input_channel = output_channel

        self.blocks = nn.Sequential(*stages)

        # building last several layers
        output_channel = 1280
        self.global_pool = nn.AdaptiveAvgPool2d((1, 1))
        self.conv_head = nn.Conv2d(input_channel,
                                   output_channel,
                                   1,
                                   1,
                                   0,
                                   bias=True)
        self.act2 = nn.ReLU(inplace=True)
        self.classifier = nn.Linear(output_channel, num_classes)
        self.dropout = nn.Dropout(p=self.dropout)