Esempio n. 1
0
 def _make_layer(self, planes, num_blocks, stride):
     layers = []
     for i in range(num_blocks):
         s = stride if i == 0 else 1
         downsample = None
         if self.in_planes != planes or s != 1:
             downsample = conv1x1(self.in_planes,
                                  planes * BasicBlock.expansion,
                                  stride=s)
         layers.append(
             BasicBlock(self.in_planes,
                        planes,
                        stride=s,
                        downsample=downsample,
                        norm_layer=nn.Identity))
         self.in_planes = planes * BasicBlock.expansion
     return nn.Sequential(*layers)
    def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
        norm_layer = self._norm_layer
        downsample = None
        previous_dilation = self.dilation
        if dilate:
            self.dilation *= stride
            stride = 1
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(conv1x1(self.inplanes, planes * block.expansion, stride), norm_layer(planes * block.expansion), )

        layers = []
        layers.append(block(inplanes = self.inplanes, planes= planes, stride=stride, downsample = downsample, groups= self.groups, base_width = self.base_width, dilation = previous_dilation, norm_layer = norm_layer))
        
        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            layers.append(block(self.inplanes, planes, groups=self.groups, base_width=self.base_width, dilation=self.dilation, norm_layer=norm_layer))

        return nn.Sequential(*layers)
Esempio n. 3
0
    def __init__(self, in_c, out_c, batchnorm=True, activation=True, k=3):
        super().__init__()
        if k == 3:
            self.conv = conv3x3(in_c, out_c)
        elif k == 1:
            self.conv = conv1x1(in_c, out_c)
        else:
            raise ValueError()

        if batchnorm:
            self.bn = nn.BatchNorm2d(out_c)
        else:
            self.bn = nn.Identity()

        if activation:
            self.relu = nn.ReLU()
        else:
            self.relu = nn.Identity()
Esempio n. 4
0
    def __init__(self,
                 block,
                 layers,
                 num_classes=1000,
                 zero_init_residual=False,
                 groups=1,
                 width_per_group=64,
                 replace_stride_with_dilation=None,
                 norm_layer=None,
                 out_dim=128):
        super().__init__(block, layers, num_classes, zero_init_residual,
                         groups, width_per_group, replace_stride_with_dilation,
                         norm_layer)
        if norm_layer is None:
            norm_layer = nn.BatchNorm2d
        self.level_conv = ConvBnRelu(3, 64)
        '''
        # For reference:
        self.layer1 = self._make_layer(block, 64, layers[0])
        self.layer2 = self._make_layer(block, 128, layers[1], stride=2,
                                       dilate=replace_stride_with_dilation[0])
        self.layer3 = self._make_layer(block, 256, layers[2], stride=2,
                                       dilate=replace_stride_with_dilation[1])
        self.layer4 = self._make_layer(block, 512, layers[3], stride=2,
                                       dilate=replace_stride_with_dilation[2])
        '''
        uplayers = []
        inplanes = 2048
        first = True
        div = [2, 2, 2, 4, 1]
        for i in range(5):
            uplayers.append(
                ReverseBottleneck(inplanes,
                                  inplanes // div[i],
                                  norm_layer=norm_layer,
                                  passthrough=not first))
            inplanes = inplanes // div[i]
            first = False
        self.uplayers = nn.ModuleList(uplayers)
        self.tail = nn.Sequential(conv3x3(128, 64), norm_layer(64), nn.ReLU(),
                                  conv1x1(64, out_dim))

        del self.fc  # Not used in this implementation and just consumes a ton of GPU memory.
Esempio n. 5
0
def get_network(nf, nf2, n_classes):
    """
    """
    ds = nn.Sequential(
        conv1x1(nf, nf2),
        nn.AvgPool2d(2),
    )
    resblock1 = BasicBlock(inplanes=nf, planes=nf2, stride=2, downsample=ds)
    resblock2 = BasicBlock(inplanes=nf2,
                           planes=nf2,
                           stride=2,
                           downsample=nn.AvgPool2d(2))
    fn = nn.Sequential(
        resblock1,
        resblock2,
        nn.AdaptiveAvgPool2d(1),
        Flatten(),
        nn.Linear(nf2, n_classes),
    )
    return fn
Esempio n. 6
0
    def _make_layer(self,
                    block,
                    planes: int,
                    blocks: int,
                    stride: int = 1,
                    dilate: bool = False) -> nn.Sequential:
        norm_layer = self._norm_layer
        downsample = None
        previous_dilation = self.dilation
        if dilate:
            self.dilation *= stride
            stride = 1
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(
                conv1x1(self.inplanes, planes * block.expansion, stride),
                norm_layer(planes * block.expansion),
            )
        layers = []

        sd_prob = 1.0 - self.prob_delta * self.layer_num
        layers.append(
            block(self.inplanes, planes, stride, downsample, self.groups,
                  self.base_width, previous_dilation, norm_layer,
                  self.dropout_prob, sd_prob))
        self.layer_num += 1

        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            sd_prob = 1.0 - self.prob_delta * self.layer_num
            layers.append(
                block(self.inplanes,
                      planes,
                      groups=self.groups,
                      base_width=self.base_width,
                      dilation=self.dilation,
                      norm_layer=norm_layer,
                      dropout_prob=self.dropout_prob,
                      stochastic_depth_prob=sd_prob))
            self.layer_num += 1

        return nn.Sequential(*layers)
Esempio n. 7
0
    def _make_layer(
        self,
        block: Type[Union[BasicBlock, Bottleneck]],
        planes: int,
        blocks: int,
        stride: int = 1,
    ):
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(
                conv1x1(self.inplanes, planes * block.expansion, stride),
                nn.BatchNorm2d(planes * block.expansion),
            )

        layers = []
        layers.append(block(self.inplanes, planes, stride, downsample))
        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            layers.append(block(self.inplanes, planes))

        return nn.Sequential(*layers)
Esempio n. 8
0
    def _make_layer(self, block, planes, blocks, stride=1):
        """Creates a concatenation of blocks in the ResNet.

        This function is similar to the one in torchvision/resnets.
        https://pytorch.org/vision/0.8/_modules/torchvision/models/resnet.html

        Args:
            block: basic block to use (with one skip connection)
            planes: number of parallel planes
            blocks: number of sequential blocks
            stride: factor between input and output planes

        Returns:
            a sequence of blocks
        """
        norm_layer = BatchNorm2d
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = Sequential(
                conv1x1(self.inplanes, planes * block.expansion, stride),
                norm_layer(planes * block.expansion),
            )

        layers = [
            block(self.inplanes, planes, stride, downsample, 1, 64, 1,
                  norm_layer)
        ]
        self.inplanes = planes * block.expansion
        layers += [
            block(
                self.inplanes,
                planes,
                groups=1,
                base_width=64,
                dilation=1,
                norm_layer=norm_layer,
            ) for _ in range(1, blocks)
        ]

        return Sequential(*layers)
Esempio n. 9
0
    def _make_layer(self,
                    block,
                    planes,
                    blocks,
                    stride=1,
                    dilate=False,
                    multi_grid=None):
        norm_layer = self._norm_layer
        downsample = None
        previous_dilation = self.dilation

        if multi_grid is None:
            multi_grid = [1 for _ in range(blocks)]
        else:
            assert len(multi_grid) == blocks

        if dilate:
            self.dilation *= stride
            stride = 1
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(
                conv1x1(self.inplanes, planes * block.expansion, stride),
                norm_layer(planes * block.expansion),
            )

        layers = []
        layers.append(
            block(self.inplanes, planes, stride, downsample, self.groups,
                  self.base_width, previous_dilation * multi_grid[0]))
        self.inplanes = planes * block.expansion
        for i in range(1, blocks):
            layers.append(
                block(self.inplanes,
                      planes,
                      groups=self.groups,
                      base_width=self.base_width,
                      dilation=self.dilation * multi_grid[i],
                      norm_layer=norm_layer))

        return nn.Sequential(*layers)
    def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
        norm_layer = self._norm_layer
        downsample = None
        previous_dilation = self.dilation
        if dilate:
            self.dilation *= stride
            stride = 1
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(*[
                nn.AvgPool2d(2, 2, ceil_mode=True, count_include_pad=False),
                conv1x1(self.inplanes, planes * block.expansion, stride=1),
                norm_layer(planes * block.expansion),
            ])

        layers = []
        layers.append(
            block(
                self.inplanes,
                planes,
                stride,
                downsample,
                self.groups,
                self.base_width,
                previous_dilation,
                norm_layer,
            ))
        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            layers.append(
                block(
                    self.inplanes,
                    planes,
                    groups=self.groups,
                    base_width=self.base_width,
                    dilation=self.dilation,
                    norm_layer=norm_layer,
                ))

        return nn.Sequential(*layers)
Esempio n. 11
0
    def __init__(self, layout, num_classes=20, anchors=None, lambda_noobj=0.5, lambda_coords=5.,
                 backbone_norm_layer=None):

        super().__init__()

        # Priors computed using K-means
        if anchors is None:
            anchors = torch.tensor([[1.08, 1.19], [3.42, 4.41], [6.63, 11.38], [9.42, 5.11], [16.62, 10.52]])
        self.num_classes = num_classes

        self.backbone = DarknetBodyV2(layout, True, backbone_norm_layer)

        self.reorg_layer = ConcatDownsample2d(scale_factor=2)

        self.block5 = nn.Sequential(
            conv3x3(layout[-1][0], layout[-1][0]),
            nn.BatchNorm2d(layout[-1][0]),
            nn.LeakyReLU(0.1, inplace=True),
            conv3x3(layout[-1][0], layout[-1][0]),
            nn.BatchNorm2d(layout[-1][0]),
            nn.LeakyReLU(0.1, inplace=True))

        self.block6 = nn.Sequential(
            conv3x3(layout[-1][0] + layout[-2][0] * 2 ** 2, layout[-1][0]),
            nn.BatchNorm2d(layout[-1][0]),
            nn.LeakyReLU(0.1, inplace=True))

        # Each box has P_objectness, 4 coords, and score for each class
        self.head = conv1x1(layout[-1][0], anchors.shape[0] * (5 + num_classes))

        # Register losses
        self.register_buffer('anchors', anchors)

        # Loss coefficients
        self.lambda_noobj = lambda_noobj
        self.lambda_coords = lambda_coords

        init_module(self, 'leaky_relu')
def _make_layer(block,
                in_dim,
                out_dim,
                blocks,
                stride=1,
                norm_layer=None,
                is_first=False,
                is_last=False):
    if norm_layer is None:
        norm_layer = nn.BatchNorm2d

    if is_last:
        return [
            nn.AvgPool2d(in_dim[1:]),
            Flatten(),
            nn.Linear(in_dim[0], out_dim)
        ]

    in_planes = in_dim[0]
    planes = out_dim[0]
    if is_first:
        return [conv3x3(in_planes, planes), norm_layer(planes), nn.ReLU()]

    downsample = None
    if stride != 1 or in_planes != planes * block.expansion:
        downsample = nn.Sequential(
            conv1x1(in_planes, planes * block.expansion, stride),
            norm_layer(planes * block.expansion),
        )

    layers = []
    layers.append(block(in_planes, planes, stride, downsample, norm_layer))
    in_planes = planes * block.expansion
    for _ in range(1, blocks):
        layers.append(block(in_planes, planes, norm_layer=norm_layer))

    return layers
Esempio n. 13
0
    def _make_layer(self, block, planes, blocks, stride=1):
        norm_layer = nn.BatchNorm2d
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(
                conv1x1(self.inplanes, planes * block.expansion, stride),
                norm_layer(planes * block.expansion),
            )

        layers = []
        layers.append(
            block(self.inplanes, planes, stride, downsample, self.groups,
                  self.base_width, self.dilation, norm_layer))
        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            layers.append(
                block(self.inplanes,
                      planes,
                      groups=self.groups,
                      base_width=self.base_width,
                      dilation=self.dilation,
                      norm_layer=norm_layer))

        return nn.Sequential(*layers)
Esempio n. 14
0
    def _make_layer(self, block: Type[Union[BasicBlock, Bottleneck]], planes: int, blocks: int,
                    stride: int = 1, dilate: bool = False) -> nn.Sequential:
        norm_layer = self._norm_layer
        downsample = None
        previous_dilation = self.dilation
        if dilate:
            self.dilation *= stride
            stride = 1
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = nn.Sequential(
                conv1x1(self.inplanes, planes * block.expansion, stride),
                norm_layer(planes * block.expansion),
            )

        layers = []
        layers.append(block(self.inplanes, planes, stride, downsample, self.groups,
                            self.base_width, previous_dilation, norm_layer))
        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            layers.append(block(self.inplanes, planes, groups=self.groups,
                                base_width=self.base_width, dilation=self.dilation,
                                norm_layer=norm_layer))

        return nn.Sequential(*layers)
Esempio n. 15
0
    def _make_layer(self,
                    block,
                    cbamblock,
                    planes,
                    blocks,
                    withCAM,
                    withSAM,
                    stride=1,
                    dilate=False,
                    redr=16,
                    camflag='full',
                    samsize=7,
                    samflag='full',
                    samplanes=None):
        norm_layer = self._norm_layer
        downsample = None
        previous_dilation = self.dilation
        if dilate:
            self.dilation *= stride
            stride = 1
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = torch.nn.Sequential(
                conv1x1(self.inplanes, planes * block.expansion, stride),
                norm_layer(planes * block.expansion),
            )

        layer = []

        if ((not withCAM) and (not withSAM)):
            layer.append(
                block(self.inplanes, planes, stride, downsample, self.groups,
                      self.base_width, previous_dilation, norm_layer))
        else:
            _block = cbamblock(self.inplanes, planes, withCAM, withSAM, stride,
                               downsample, self.groups, self.base_width,
                               previous_dilation, norm_layer, redr, camflag,
                               samsize, samflag, samplanes)
            layer.append(_block)
            self.cbamlayer.append(_block.cbamblock)

        self.inplanes = planes * block.expansion
        for _ in range(1, blocks):
            if ((not withCAM) and (not withSAM)):
                layer.append(
                    block(self.inplanes,
                          planes,
                          groups=self.groups,
                          base_width=self.base_width,
                          dilation=self.dilation,
                          norm_layer=norm_layer))
            else:
                _block = cbamblock(self.inplanes,
                                   planes,
                                   withCAM,
                                   withSAM,
                                   groups=self.groups,
                                   base_width=self.base_width,
                                   dilation=self.dilation,
                                   norm_layer=norm_layer,
                                   redr=redr,
                                   camflag=camflag,
                                   samsize=samsize,
                                   samflag=samflag,
                                   samplanes=samplanes)
                layer.append(_block)
                self.cbamlayer.append(_block.cbamblock)

        return torch.nn.Sequential(*layer)
Esempio n. 16
0
def _make_layer(block,
                in_dim,
                out_dim,
                blocks,
                stride=1,
                pool=None,
                pool_size=None,
                norm_layer=None,
                is_first=False,
                is_last=False,
                end_act=True,
                n_blocks=None):

    if n_blocks is None:
        n_blocks = 1 if is_first or is_last else blocks
    if stride is None:
        stride = []
    elif isinstance(stride, int):
        stride = [stride] + [1] * (n_blocks - 1)
    if isinstance(in_dim[0], int):
        in_dim = [in_dim] + [out_dim] * (n_blocks - 1)
    if isinstance(out_dim, int):
        out_dim = [out_dim]
    elif isinstance(out_dim[0], int):
        out_dim = [out_dim] * n_blocks
    if pool_size is None or isinstance(pool_size, int):
        pool_size = [None] * (n_blocks - 1) + [pool_size]
    assert len(stride) == n_blocks or is_last and len(stride) == n_blocks - 1

    layers = []

    if is_first:
        in_size, in_dim = in_dim[0], in_dim[1:]
        in_planes = in_size[0]
        out_size, out_dim = out_dim[0], out_dim[1:]
        out_planes = out_size[0]
        s, stride = stride[0], stride[1:]
        p, pool_size = pool_size[0], pool_size[1:]
        layers.extend([
            conv3x3(in_planes, out_planes),
            get_norm_layer(norm_layer, out_size)
        ])
        if end_act:
            layers.append(nn.ReLU())
    assert len(stride) <= len(in_dim) and len(stride) <= len(out_dim)
    for l in range(len(stride)):
        in_s, in_dim = in_dim[0], in_dim[1:]
        out_s, out_dim = out_dim[0], out_dim[1:]
        s, stride = stride[0], stride[1:]
        downsample = None
        if s != 1 or in_s[0] != out_s[0]:
            downsample = nn.Sequential(
                Contiguousize(),
                conv1x1(in_s[0], out_s[0], s),
                get_norm_layer(norm_layer, out_s),
            )

        layers.append(
            block(in_s, out_s, s, downsample, norm_layer, end_act
                  or l < blocks - 1))

        p, pool_size = pool_size[0], pool_size[1:]
        if p is not None:
            layers.append(nn.ReLU())
            layers.append(get_pool_l(pool, p))
            # layers.append(nn.ReLU())

    assert not stride
    assert not any(pool_size)
    # if stride:
    #     in_dim = in_dim[d:]
    #     out_dim = out_dim[d:]

    # part.remove(0)
    # in_planes = out_planes * block.expansion
    # for d in range(1, blocks):
    #     if d in part:
    # layers.append(block(out_dim, out_dim, norm_layer=norm_layer,
    #                      end_act=end_act or d < blocks - 1))

    # if any(pool_size):
    #
    #     layers.append(nn.ReLU())
    #     layers.append(pool_l(1))

    if is_last:
        in_size, in_dim = in_dim[0], in_dim[1:]
        in_planes = in_size[0]
        out_size, out_dim = out_dim[0], out_dim[1:]
        if not isinstance(out_size, int):
            assert len(out_size) == 1
            out_size = out_size[0]
        assert len(in_size) == 3
        layers.extend([Flatten(), nn.Linear(in_planes, out_size)])

    return layers
Esempio n. 17
0
def _downsample(inplanes,outplanes,stride):
    return torch.nn.Sequential(
        conv1x1(inplanes, outplanes, stride),
        torch.nn.BatchNorm2d(outplanes),
    )
Esempio n. 18
0
 def _downsample(self, in_planes, out_planes, stride):
     return nn.Sequential(conv1x1(in_planes, out_planes, stride),
                          nn.BatchNorm2d(out_planes))
Esempio n. 19
0
def get_downsample(channels_in, channels_out, stride=1):
    """Standart resnet normalziation. Same as in torchvision.resnet"""
    return nn.Sequential(resnet.conv1x1(channels_in, channels_out, stride),
                         nn.BatchNorm2d(channels_out))
def make_downsample(inplanes, planes, stride=2):
    downsample = nn.Sequential(conv1x1(inplanes, planes, stride),
                               nn.BatchNorm2d(planes))
    return downsample
Esempio n. 21
0
 def __init__(self, inplanes, planes, stride):
     super(DownSample, self).__init__()
     self.downsample = nn.Sequential(
         resnet.conv1x1(inplanes, planes, stride),
         nn.BatchNorm2d(planes),
     )