Exemple #1
0
    def __init__(self, in_channels, out_channels, stride=1, base_width=64):
        super().__init__()
        width = int(out_channels * (base_width / 64.))
        self.residual_function = nn.Sequential(
            layers.Conv2d(in_channels, width, kernel_size=1, bias=False),
            layers.BatchNorm2d(width),
            nn.ReLU(inplace=True),
            layers.Conv2d(width,
                          width,
                          stride=stride,
                          kernel_size=3,
                          padding=1,
                          bias=False),
            layers.BatchNorm2d(width),
            nn.ReLU(inplace=True),
            layers.Conv2d(width,
                          out_channels * BottleNeck.expansion,
                          kernel_size=1,
                          bias=False),
            layers.BatchNorm2d(out_channels * BottleNeck.expansion),
        )

        self.shortcut = layers.Identity2d(in_channels)

        if stride != 1 or in_channels != out_channels * BottleNeck.expansion:
            self.shortcut = nn.Sequential(
                layers.Conv2d(in_channels,
                              out_channels * BottleNeck.expansion,
                              stride=stride,
                              kernel_size=1,
                              bias=False),
                layers.BatchNorm2d(out_channels * BottleNeck.expansion))
    def __init__(self, f_in: int, f_out: int, downsample=False):
        super(Block, self).__init__()

        stride = 2 if downsample else 1
        self.conv1 = layers.Conv2d(f_in,
                                   f_out,
                                   kernel_size=3,
                                   stride=stride,
                                   padding=1,
                                   bias=False)
        self.bn1 = layers.BatchNorm2d(f_out)
        self.conv2 = layers.Conv2d(f_out,
                                   f_out,
                                   kernel_size=3,
                                   stride=1,
                                   padding=1,
                                   bias=False)
        self.bn2 = layers.BatchNorm2d(f_out)

        # No parameters for shortcut connections.
        if downsample or f_in != f_out:
            self.shortcut = nn.Sequential(
                layers.Conv2d(f_in, f_out, kernel_size=1, stride=2,
                              bias=False), layers.BatchNorm2d(f_out))
        else:
            self.shortcut = layers.Identity2d(f_in)
Exemple #3
0
    def __init__(self, in_channels, out_channels, stride=1, base_width=64):
        super().__init__()

        #residual function
        self.residual_function = nn.Sequential(
            layers.Conv2d(in_channels,
                          out_channels,
                          kernel_size=3,
                          stride=stride,
                          padding=1,
                          bias=False), layers.BatchNorm2d(out_channels),
            nn.ReLU(inplace=True),
            layers.Conv2d(out_channels,
                          out_channels * BasicBlock.expansion,
                          kernel_size=3,
                          padding=1,
                          bias=False),
            layers.BatchNorm2d(out_channels * BasicBlock.expansion))

        #shortcut
        self.shortcut = layers.Identity2d(in_channels)

        #the shortcut output dimension is not the same with residual function
        #use 1*1 convolution to match the dimension
        if stride != 1 or in_channels != BasicBlock.expansion * out_channels:
            self.shortcut = nn.Sequential(
                layers.Conv2d(in_channels,
                              out_channels * BasicBlock.expansion,
                              kernel_size=1,
                              stride=stride,
                              bias=False),
                layers.BatchNorm2d(out_channels * BasicBlock.expansion))
    def __init__(self,
                 in_channels,
                 out_channels,
                 stride=1,
                 base_width=64,
                 batch_norm=True):
        super().__init__()

        self.batch_norm = batch_norm

        # residual function
        layer_list = [
            layers.Conv2d(
                in_channels,
                out_channels,
                kernel_size=3,
                stride=stride,
                padding=1,
                bias=False,
            ),
        ]
        if self.batch_norm:
            layer_list.append(layers.BatchNorm2d(out_channels))
        layer_list += [
            nn.ReLU(inplace=True),
            layers.Conv2d(
                out_channels,
                out_channels * BasicBlock.expansion,
                kernel_size=3,
                padding=1,
                bias=False,
            ),
        ]
        if self.batch_norm:
            layer_list.append(
                layers.BatchNorm2d(out_channels * BasicBlock.expansion))
        self.residual_function = nn.Sequential(*layer_list)

        # shortcut
        self.shortcut = layers.Identity2d(in_channels)

        # the shortcut output dimension is not the same with residual function
        # use 1*1 convolution to match the dimension
        if stride != 1 or in_channels != BasicBlock.expansion * out_channels:
            layer_list = [
                layers.Conv2d(
                    in_channels,
                    out_channels * BasicBlock.expansion,
                    kernel_size=1,
                    stride=stride,
                    bias=False,
                )
            ]
            if self.batch_norm:
                layer_list.append(
                    layers.BatchNorm2d(out_channels * BasicBlock.expansion))
            self.shortcut = nn.Sequential(*layer_list)
    def __init__(self,
                 in_channels,
                 out_channels,
                 stride=1,
                 base_width=64,
                 batch_norm=True):
        super().__init__()

        self.batch_norm = batch_norm

        width = int(out_channels * (base_width / 64.0))
        layer_list = [
            layers.Conv2d(in_channels, width, kernel_size=1, bias=False),
        ]
        if self.batch_norm:
            layer_list.append(layers.BatchNorm2d(width))
        layer_list += [
            nn.ReLU(inplace=True),
            layers.Conv2d(width,
                          width,
                          stride=stride,
                          kernel_size=3,
                          padding=1,
                          bias=False),
        ]
        if self.batch_norm:
            layer_list.append(layers.BatchNorm2d(width))
        layer_list += [
            nn.ReLU(inplace=True),
            layers.Conv2d(width,
                          out_channels * BottleNeck.expansion,
                          kernel_size=1,
                          bias=False),
        ]
        if self.batch_norm:
            layer_list.append(
                layers.BatchNorm2d(out_channels * BottleNeck.expansion))
        self.residual_function = nn.Sequential(*layer_list)

        self.shortcut = layers.Identity2d(in_channels)

        if stride != 1 or in_channels != out_channels * BottleNeck.expansion:
            layer_list = [
                layers.Conv2d(
                    in_channels,
                    out_channels * BottleNeck.expansion,
                    stride=stride,
                    kernel_size=1,
                    bias=False,
                ),
            ]
            if self.batch_norm:
                layer_list.append(
                    layers.BatchNorm2d(out_channels * BottleNeck.expansion))
            self.shortcut = nn.Sequential(*layer_list)