Exemplo n.º 1
0
 def __init__(self, channels):
     super(ELUBR, self).__init__()
     self.conv1 = conv3x3(channels, channels)
     self.bn1 = nn.BatchNorm2d(channels)
     self.relu = nn.ELU(inplace=True)
     self.conv2 = conv3x3(channels, channels)
     self.bn2 = nn.BatchNorm2d(channels)
Exemplo n.º 2
0
    def __init__(self, inplanes, planes,stride=1, downsample=None, groups=1,
                 base_width=64, dilation=1, norm_layer=None,scale=4):
        super(BasicBlockV4, self).__init__()
        self.scale = scale
        self.each_scale_planes = planes//scale
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        if groups != 1 or base_width != 64:
            raise ValueError('BasicBlock only supports groups=1 and base_width=64')
        if dilation > 1:
            raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
        # Both self.conv1 and self.downsample layers downsample the input when stride != 1

        self.conv1 = conv3x3(inplanes, planes, stride)
        self.bn1 = norm_layer(planes)
        self.relu = nn.ReLU(inplace=True)

        self.shuffleblock = ShuffleBlockV1(scale)

        self.conv2 = nn.ModuleList()
        for i in range(scale):
            self.conv2.append(nn.Sequential(conv3x3(self.each_scale_planes, self.each_scale_planes),
                                              norm_layer(self.each_scale_planes)))

        self.downsample = downsample
        self.stride = stride

        self.seblock = SeBlock(planes)
Exemplo n.º 3
0
 def __init__(self, inplanes, planes, stride=1, downsample=None):
     super(BasicBlockNoBN, self).__init__()
     self.conv1 = resnet.conv3x3(inplanes, planes, stride)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = resnet.conv3x3(planes, planes)
     self.downsample = downsample
     self.stride = stride
Exemplo n.º 4
0
    def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1,
                 base_width=64, dilation=1, norm_layer=None, input_dims=None, attn_params=None):
        super(BasicBlock, self).__init__()
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        if groups != 1 or base_width != 64:
            raise ValueError('BasicBlock only supports groups=1 and base_width=64')
        if dilation > 1:
            raise NotImplementedError("Dilation > 1 not supported in BasicBlock")

        # attention
        if attn_params is not None:
            nh = attn_params['nh']
            dk = max(20*nh, int((attn_params['k'] * planes // nh)*nh))
            dv = int((attn_params['v'] * planes // nh)*nh)
            relative = attn_params['relative']
            # scale input dims to network HW outputs at this layer
            input_dims = int(attn_params['input_dims'][0] * 16 / planes), int(attn_params['input_dims'][1] * 16 / planes)
            print('BasicBlock attention: dk {}, dv {}, input_dims {}x{}'.format(dk, dv, *input_dims))

        # Both self.conv1 and self.downsample layers downsample the input when stride != 1
        self.conv1 = conv3x3(inplanes, planes, stride) if attn_params is None else \
                     AAConv2d(inplanes, planes, 3, stride, dk, dv, nh, relative, input_dims)
        self.bn1 = norm_layer(planes)
        self.relu = nn.ReLU(inplace=True)
        self.conv2 = conv3x3(planes, planes)
        self.bn2 = norm_layer(planes)
        self.downsample = downsample
        self.stride = stride
Exemplo n.º 5
0
 def __init__(
         self,
         inplanes: int,
         planes: int,
         stride: int = 1,
         downsample: Optional[nn.Module] = None,
         groups: int = 1,
         base_width: int = 64,
         dilation: int = 1,
         norm_layer: Optional[Callable[..., nn.Module]] = None) -> None:
     super(BasicBlock, self).__init__()
     if norm_layer is None:
         norm_layer = nn.BatchNorm2d
     if groups != 1 or base_width != 64:
         raise ValueError(
             'BasicBlock only supports groups=1 and base_width=64')
     if dilation > 1:
         raise NotImplementedError(
             "Dilation > 1 not supported in BasicBlock")
     # Both self.conv1 and self.downsample layers downsample the input when stride != 1
     self.conv1 = conv3x3(inplanes, planes, stride)
     self.bn1 = norm_layer(planes)
     self.relu = nn.ReLU(inplace=True)
     # self.relu = nn.LeakyReLU(inplace=True)
     self.conv2 = conv3x3(planes, planes)
     self.bn2 = norm_layer(planes)
     self.downsample = downsample
     self.stride = stride
Exemplo n.º 6
0
    def __init__(self,
                 in_planes,
                 out_planes,
                 nb_compressions=0,
                 norm_layer=None):

        if norm_layer is None:
            norm_layer = nn.BatchNorm2d

        layers = [
            conv3x3(in_planes, out_planes),
            norm_layer(out_planes),
            nn.LeakyReLU(0.1, inplace=True)
        ]
        for _ in range(nb_compressions):
            layers.extend([
                conv1x1(out_planes, in_planes),
                norm_layer(in_planes),
                nn.LeakyReLU(0.1, inplace=True),
                conv3x3(in_planes, out_planes),
                norm_layer(out_planes),
                nn.LeakyReLU(0.1, inplace=True)
            ])

        super().__init__(*layers)
Exemplo n.º 7
0
    def __init__(self, layout, num_classes=20, anchors=None):

        super().__init__()

        # Priors computed using K-means
        if anchors is None:
            anchors = torch.tensor([[1.08, 1.19], [3.42, 4.41], [6.63, 11.38],
                                    [9.42, 5.11], [16.62, 10.52]])
        self.num_classes = num_classes

        self.backbone = DarknetBodyV2(layout, passthrough=True)

        self.reorg_layer = ConcatDownsample2d(scale_factor=2)

        self.block5 = nn.Sequential(conv3x3(layout[-1][0], layout[-1][0]),
                                    nn.BatchNorm2d(layout[-1][0]),
                                    nn.LeakyReLU(0.1, inplace=True),
                                    conv3x3(layout[-1][0], layout[-1][0]),
                                    nn.BatchNorm2d(layout[-1][0]),
                                    nn.LeakyReLU(0.1, inplace=True))

        self.block6 = nn.Sequential(
            conv3x3(layout[-1][0] + layout[-2][0] * 2**2, layout[-1][0]),
            nn.BatchNorm2d(layout[-1][0]), nn.LeakyReLU(0.1, inplace=True))

        # Each box has P_objectness, 4 coords, and score for each class
        self.head = conv1x1(layout[-1][0],
                            anchors.shape[0] * (5 + num_classes))

        # Register losses
        self.register_buffer('anchors', anchors)

        init_module(self, 'leaky_relu')
Exemplo n.º 8
0
 def __init__(self,
              inplanes,
              planes,
              stride=1,
              downsample=None,
              groups=1,
              base_width=64,
              dilation=1,
              norm_layer=None):
     super(BasicBlock, self).__init__()
     if norm_layer is None:
         norm_layer = nn.BatchNorm2d
     if groups != 1 or base_width != 64:
         raise ValueError(
             "BasicBlock only supports groups=1 and base_width=64")
     if dilation > 1:
         raise NotImplementedError(
             "Dilation > 1 not supported in BasicBlock")
     # Both self.conv1 and self.downsample layers downsample the input when stride != 1
     self.conv1 = conv3x3(inplanes, planes, stride)
     self.bn1 = norm_layer(planes)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(planes, planes)
     self.bn2 = norm_layer(planes)
     self.downsample = downsample
     self.stride = stride
Exemplo n.º 9
0
    def __init__(self, layout, num_classes=20, num_anchors=2, lambda_noobj=0.5, lambda_coords=5.):

        super().__init__()

        self.backbone = DarknetBodyV1(layout)

        self.block4 = nn.Sequential(
            conv3x3(1024, 1024),
            nn.LeakyReLU(inplace=True),
            conv3x3(1024, 1024, stride=2),
            nn.LeakyReLU(inplace=True),
            conv3x3(1024, 1024),
            nn.LeakyReLU(inplace=True),
            conv3x3(1024, 1024),
            nn.LeakyReLU(inplace=True))

        self.classifier = nn.Sequential(
            nn.Flatten(),
            nn.Linear(1024 * 7 ** 2, 4096),
            nn.LeakyReLU(inplace=True),
            nn.Linear(4096, 7 ** 2 * (num_anchors * 5 + num_classes)))
        self.num_anchors = num_anchors
        self.num_classes = num_classes
        # Loss coefficients
        self.lambda_noobj = lambda_noobj
        self.lambda_coords = lambda_coords

        init_module(self, 'leaky_relu')
Exemplo n.º 10
0
    def __init__(self, block, layers, context, aux=False):
        self.inplanes = 128
        super(ResNet, self).__init__()
        self.conv1 = conv3x3(3, 64, stride=2)
        self.bn1 = nn.BatchNorm2d(64)
        self.relu1 = nn.ReLU(inplace=False)
        self.conv2 = conv3x3(64, 64)
        self.bn2 = nn.BatchNorm2d(64)
        self.relu2 = nn.ReLU(inplace=False)
        self.conv3 = conv3x3(64, 128)
        self.bn3 = nn.BatchNorm2d(128)
        self.relu3 = nn.ReLU(inplace=False)
        self.maxpool = nn.MaxPool2d(kernel_size=3,
                                    stride=2,
                                    padding=1,
                                    ceil_mode=False)

        self.layer1 = self._make_layer(block, 64, layers[0])
        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
        self.layer3 = self._make_layer(block,
                                       256,
                                       layers[2],
                                       stride=1,
                                       dilation=2)
        self.layer4 = self._make_layer(block,
                                       512,
                                       layers[3],
                                       stride=1,
                                       dilation=4,
                                       multi_grid=(1, 1, 1))

        self.up0 = nn.Sequential(BasicBlock(128, 128), )

        self.up1 = nn.Sequential(
            nn.Conv2d(256, 128, kernel_size=1, bias=False),
            BasicBlock(128, 128),
            BasicBlock(128, 128),
        )

        self.merge0 = nn.Sequential(
            BasicBlock(128, 128),
            BasicBlock(128, 128),
        )

        self.merge1 = nn.Sequential(
            BasicBlock(128, 128),
            BasicBlock(128, 128),
        )

        # extra added layers
        self.context = context
        self.cls = nn.Sequential(
            BasicBlock(128, 128), BasicBlock(128, 128),
            nn.Conv2d(128, 1, kernel_size=1, stride=1, padding=0, bias=True))

        self.aux = nn.Sequential(
            ASP_OC_Module(1024, 128), BasicBlock(128, 128),
            BasicBlock(128, 128),
            nn.Conv2d(128, 1, kernel_size=1, stride=1, padding=0, bias=True))
Exemplo n.º 11
0
 def __init__(self, channels):
     super(DualAttentation, self).__init__()
     self.cam = CAM_Module(channels)
     self.pam = PAM_Module(channels)
     self.conv_cam_1 = conv3x3(channels, channels)
     self.conv_cam_2 = conv3x3(channels, channels)
     self.conv_pam_1 = conv3x3(channels, channels)
     self.conv_pam_2 = conv3x3(channels, channels)
 def __init__(self, inplanes, planes, stride=1):
     super(BasicBlock, self).__init__()
     # Both self.conv1 and self.downsample layers downsample the input when stride != 1
     self.conv1 = conv3x3(inplanes, planes, stride)
     self.bn1 = nn.BatchNorm2d(planes)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(inplanes, planes, stride)
     self.bn2 = nn.BatchNorm2d(planes)
     self.stride = stride
Exemplo n.º 13
0
 def __init__(self, inplanes, planes, stride=1, downsample=None):
     super(BasicBlockV1, self).__init__()
     self.conv1 = conv3x3(inplanes, planes, stride)
     self.bn1 = nn.BatchNorm2d(planes)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(planes, planes)
     self.bn2 = nn.BatchNorm2d(planes)
     self.downsample = downsample
     self.stride = stride
Exemplo n.º 14
0
 def __init__(self, in_channels, out_channels, stride=1, downsample=None, reduction=16):
     super(SERCU, self).__init__()
     self.conv1 = conv3x3(in_channels, out_channels, stride)
     self.bn1 = nn.BatchNorm2d(out_channels)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(out_channels, out_channels)
     self.bn2 = nn.BatchNorm2d(out_channels)
     self.downsample = downsample
     self.stride = stride
     self.se_module = SEModule(out_channels * self.multiplier, reduction=reduction)
Exemplo n.º 15
0
 def __init__(self, inplanes, outplanes, stride=1, downsample=None):
     super(IncrementalBlock, self).__init__()
     interm_planes = outplanes - inplanes
     self.conv1 = conv3x3(inplanes, interm_planes, stride)
     self.bn1 = nn.BatchNorm2d(interm_planes)
     self.relu1 = nn.ReLU(inplace=True)
     self.relu2 = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(interm_planes,interm_planes)
     self.bn2 = nn.BatchNorm2d(interm_planes)
     self.stride = stride
Exemplo n.º 16
0
 def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1,
              base_width=64, dilation=1, norm_layer=None):
     super(AttMap, self).__init__()
     norm_layer = nn.BatchNorm2d
     self.conv1 = conv3x3(inplanes, planes, stride)
     self.bn1 = norm_layer(planes)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(planes, planes)
     self.bn2 = norm_layer(planes)
     self.downsample = downsample
     self.stride = stride
Exemplo n.º 17
0
    def init_head(self):
        self.sfs = [SaveFeature(self.backbone[i]) for i in [2, 4, 5, 6]]
        self.up_layer1 = UpLayer(self.block, 512, 256, self.layers[-1])
        self.up_layer2 = UpLayer(self.block, 256, 128, self.layers[-2])
        self.up_layer3 = UpLayer(self.block, 128, 64, self.layers[-3])

        self.map = conv3x3(64 * self.block.expansion, 64)  # 64e -> 64
        self.conv = conv3x3(128, 64)
        self.bn_conv = nn.BatchNorm2d(64)
        self.up_conv = nn.ConvTranspose2d(64, 1, 2, 2, 0)
        self.bn_up = nn.BatchNorm2d(1)
Exemplo n.º 18
0
 def __init__(self, inplanes, planes, groups=1, base_width=64, dilation=1):
     super(BasicBlock, self).__init__()
     if groups != 1 or base_width != 64:
         raise ValueError(
             'BasicBlock only supports groups=1 and base_width=64')
     if dilation > 1:
         raise NotImplementedError(
             "Dilation > 1 not supported in BasicBlock")
     # Both self.conv1 and self.downsample layers downsample the input when stride != 1
     self.conv1 = resnet.conv3x3(inplanes, planes)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = resnet.conv3x3(planes, planes)
Exemplo n.º 19
0
    def __init__(self, inplanes, planes, norm_layer, stride=1, downsample=None):
        super(BasicBlockCifar, self).__init__()
        self.downsample = downsample
        self.stride = stride

        self.bn1 = norm_layer(inplanes)
        self.relu1 = nn.ReLU(inplace=True)
        self.conv1 = conv3x3(inplanes, planes, stride)

        self.bn2 = norm_layer(planes)
        self.relu2 = nn.ReLU(inplace=True)
        self.conv2 = conv3x3(planes, planes)
 def __init__(self, inplanes, planes, stride=1, death_rate=0.,
              downsample=None):
     super(BasicBlockWithDeathRate, self).__init__()
     self.conv1 = conv3x3(inplanes, planes, stride)
     self.bn1 = nn.BatchNorm2d(planes)
     self.relu1 = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(planes, planes)
     self.bn2 = nn.BatchNorm2d(planes)
     self.relu2 = nn.ReLU(inplace=True)
     self.downsample = downsample
     self.stride = stride
     self.death_rate = death_rate
Exemplo n.º 21
0
    def __init__(self, inplanes, planes, stride=1, downsample=None):
        super(BasicBlockV2, self).__init__()
        self.relu = nn.ReLU(inplace=True)

        self.bn1 = nn.BatchNorm2d(inplanes)
        # F: by default a dilation = 1 (padding 1 on each side)
        self.conv1 = conv3x3(inplanes, planes, stride=stride)
        self.bn2 = nn.BatchNorm2d(planes)
        self.conv2 = conv3x3(planes, planes, stride=1)
        self.downsample = downsample

        self.stride = stride
Exemplo n.º 22
0
 def __init__(self, config_channels, prefix, channels, stride=1):
     nn.Module.__init__(self)
     channels_in = config_channels.channels
     self.conv1 = conv3x3(config_channels.channels, config_channels(channels, '%s.conv1.weight' % prefix), stride)
     self.bn1 = nn.BatchNorm2d(config_channels.channels)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(config_channels.channels, config_channels(channels, '%s.conv2.weight' % prefix))
     self.bn2 = nn.BatchNorm2d(config_channels.channels)
     if stride > 1 or channels_in != config_channels.channels:
         downsample = []
         downsample.append(nn.Conv2d(channels_in, config_channels.channels, kernel_size=1, stride=stride, bias=False))
         downsample.append(nn.BatchNorm2d(config_channels.channels))
         self.downsample = nn.Sequential(*downsample)
     else:
         self.downsample = None
Exemplo n.º 23
0
    def __init__(self, layout):

        super(DarknetBodyV3, self).__init__()

        self.conv1 = conv3x3(3, 32)
        self.bn1 = nn.BatchNorm2d(32)
        self.conv2 = conv3x3(32, 64, stride=2)
        self.bn2 = nn.BatchNorm2d(64)
        self.activation = nn.LeakyReLU(0.1, inplace=True)

        self.block1 = self._make_layer(*layout[0])
        self.block2 = self._make_layer(*layout[1])
        self.block3 = self._make_layer(*layout[2])
        self.block4 = self._make_layer(*layout[3])
        self.block5 = self._make_layer(*layout[4])
Exemplo n.º 24
0
 def __init__(self,
              inplanes: int,
              planes: int,
              stride: int = 1,
              dilation: int = 1) -> None:
     super().__init__()
     if dilation > 1:
         raise NotImplementedError(
             "Dilation > 1 not supported in BasicBlock")
     self.conv1 = conv3x3(inplanes, planes, stride=stride)
     self.bn1 = nn.BatchNorm2d(planes)
     self.relu = nn.ReLU(inplace=True)
     self.conv2 = conv3x3(planes, planes)
     self.bn2 = nn.BatchNorm2d(planes)
     self.stride = stride
Exemplo n.º 25
0
 def __init__(
         self,
         inplanes: int,
         planes: int,
         stride: int = 1,
         downsample: Optional[nn.Module] = None,
         groups: int = 1,
         base_width: int = 64,
         dilation: int = 1,
         norm_layer: Optional[Callable[..., nn.Module]] = None) -> None:
     super(Bottleneck, self).__init__()
     if norm_layer is None:
         norm_layer = nn.BatchNorm2d
     width = int(planes * (base_width / 64.)) * groups
     # Both self.conv2 and self.downsample layers downsample the input when stride != 1
     self.conv1 = conv1x1(inplanes, width)
     self.bn1 = norm_layer(width)
     self.conv2 = conv3x3(width, width, stride, groups, dilation)
     self.bn2 = norm_layer(width)
     self.conv3 = conv1x1(width, planes * self.expansion)
     self.bn3 = norm_layer(planes * self.expansion)
     self.relu = nn.ReLU(inplace=True)
     # self.relu = nn.LeakyReLU(inplace=True)
     self.downsample = downsample
     self.stride = stride
Exemplo n.º 26
0
    def __init__(self, args):
        super(ResNet164, self).__init__()
        block = Bottleneck

        self.args = args
        self.depth = args.depth
        n = (self.depth - 2) // 9
        width = 16
        self.inplanes = width

        if args.data_train == 'CIFAR10':
            num_classes = 10
        elif args.data_train == 'CIFAR100':
            num_classes = 100
        else:
            raise NotImplementedError('The module is not designed for dataset ' + args.data_train)

        self.conv1 = conv3x3(3, width)
        self.bn1 = nn.BatchNorm2d(width)
        self.relu = nn.ReLU(inplace=True)
        self.layer1 = self._make_layer(block, width, n)
        self.layer2 = self._make_layer(block, width * 2, n, stride=2)
        self.layer3 = self._make_layer(block, width * 4, n, stride=2)
        # self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
        self.avgpool = nn.AvgPool2d(8, stride=1)
        self.fc = nn.Linear(self.inplanes, num_classes)

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                m.weight.data.normal_(0, math.sqrt(2. / n))
            elif isinstance(m, nn.BatchNorm2d):
                m.weight.data.fill_(1)
                m.bias.data.zero_()
Exemplo n.º 27
0
    def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1,
                 base_width=64, dilation=1, norm_layer=None, input_dims=None, attn_params=None):
        super(Bottleneck, self).__init__()
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        width = int(planes * (base_width / 64.)) * groups

        # attention
        if attn_params is not None:
            nh = attn_params['nh']
            dk = max(20*nh, int((attn_params['k'] * width // nh)*nh))
            dv = int((attn_params['v'] * width // nh)*nh)
            relative = attn_params['relative']
            # scale input dims to network HW outputs at this layer
            input_dims = int(attn_params['input_dims'][0] * 16 / planes), int(attn_params['input_dims'][1] * 16 / planes)
            print('Bottleneck attention: dk {}, dv {}, input_dims {}x{}'.format(dk, dv, *input_dims))

        # Both self.conv2 and self.downsample layers downsample the input when stride != 1
        self.conv1 = conv1x1(inplanes, width)
        self.bn1 = norm_layer(width)
        self.conv2 = conv3x3(width, width, stride, groups, dilation) if attn_params is None else \
                     AAConv2d(width, width, 3, stride, dk, dv, nh, relative, input_dims, groups=groups, dilation=dilation)
        self.bn2 = norm_layer(width)
        self.conv3 = conv1x1(width, planes * self.expansion)
        self.bn3 = norm_layer(planes * self.expansion)
        self.relu = nn.ReLU(inplace=True)
        self.downsample = downsample
        self.stride = stride
Exemplo n.º 28
0
    def __init__(self, block, layers, num_classes=1000, zero_init_residual=False,
                 groups=1, width_per_group=64, replace_stride_with_dilation=None,
                 norm_layer=None, out_dim=128):
        super().__init__(block, layers, num_classes, zero_init_residual, groups, width_per_group,
                         replace_stride_with_dilation, norm_layer)
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        '''
        # For reference:
        self.layer1 = self._make_layer(block, 64, layers[0])
        self.layer2 = self._make_layer(block, 128, layers[1], stride=2,
                                       dilate=replace_stride_with_dilation[0])
        self.layer3 = self._make_layer(block, 256, layers[2], stride=2,
                                       dilate=replace_stride_with_dilation[1])
        self.layer4 = self._make_layer(block, 512, layers[3], stride=2,
                                       dilate=replace_stride_with_dilation[2])
        '''
        uplayers = []
        inplanes = 2048
        first = True
        for i in range(2):
            uplayers.append(ReverseBottleneck(inplanes, inplanes // 2, norm_layer=norm_layer, passthrough=not first))
            inplanes = inplanes // 2
            first = False
        self.uplayers = nn.ModuleList(uplayers)
        self.tail = nn.Sequential(conv1x1(1024, 512),
                                  norm_layer(512),
                                  nn.ReLU(),
                                  conv3x3(512, 512),
                                  norm_layer(512),
                                  nn.ReLU(),
                                  conv1x1(512, out_dim))

        del self.fc  # Not used in this implementation and just consumes a ton of GPU memory.
Exemplo n.º 29
0
 def __init__(self, inplanes, planes, groups=1, passthrough=False,
              base_width=64, dilation=1, norm_layer=None):
     super().__init__()
     if norm_layer is None:
         norm_layer = nn.BatchNorm2d
     width = int(planes * (base_width / 64.)) * groups
     self.passthrough = passthrough
     if passthrough:
         self.integrate = conv1x1(inplanes*2, inplanes)
         self.bn_integrate = norm_layer(inplanes)
     # Both self.conv2 and self.downsample layers downsample the input when stride != 1
     self.conv1 = conv1x1(inplanes, width)
     self.bn1 = norm_layer(width)
     self.conv2 = conv3x3(width, width, groups, dilation)
     self.bn2 = norm_layer(width)
     self.residual_upsample = nn.Sequential(
         nn.Upsample(scale_factor=2, mode='nearest'),
         conv1x1(width, width),
         norm_layer(width),
     )
     self.conv3 = conv1x1(width, planes)
     self.bn3 = norm_layer(planes)
     self.relu = nn.ReLU(inplace=True)
     self.upsample = nn.Sequential(
         nn.Upsample(scale_factor=2, mode='nearest'),
         conv1x1(inplanes, planes),
         norm_layer(planes),
     )
Exemplo n.º 30
0
    def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1,
                 base_width=64, dilation=1, norm_layer=None,scale=4):
        super(BottleneckV4, self).__init__()

        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        width = int(planes * (base_width / 64.)) * groups
        # Both self.conv2 and self.downsample layers downsample the input when stride != 1
        self.conv1 = conv1x1(inplanes, width)
        self.bn1 = norm_layer(width)

        # self.conv2 = conv3x3(width, width, stride, groups, dilation)
        # self.bn2 = norm_layer(width)
        self.shuffleblock = ShuffleBlockV1(scale)

        self.scale = scale
        self.each_scale_planes = width // scale
        self.conv2 = nn.ModuleList()
        for i in range(scale):
            self.conv2.append(nn.Sequential(conv3x3(self.each_scale_planes, self.each_scale_planes, stride, groups, dilation),
                                            norm_layer(self.each_scale_planes),
                                            nn.ReLU(inplace=True)))

        self.conv3 = conv1x1(width, planes * self.expansion)
        self.bn3 = norm_layer(planes * self.expansion)
        self.relu = nn.ReLU(inplace=True)
        self.downsample = downsample
        self.stride = stride

        self.seblock = SeBlock(planes * self.expansion)