Exemplo n.º 1
0
 def _init_weights(self, m):
     if isinstance(m, nn.Linear):
         trunc_normal_(m.weight)
         if isinstance(m, nn.Linear) and m.bias is not None:
             zeros_(m.bias)
     elif isinstance(m, nn.LayerNorm):
         zeros_(m.bias)
         ones_(m.weight)
Exemplo n.º 2
0
 def _initialize(self):
     # initialize
     for m in self.sublayers():
         if isinstance(m, nn.Conv2D):
             kaiming_normal_(m.weight)
         elif isinstance(m, nn.BatchNorm2D):
             ones_(m.weight)
             zeros_(m.bias)
Exemplo n.º 3
0
    def __init__(
        self,
        levels,
        channels,
        output_stride=32,
        in_chans=3,
        cardinality=1,
        base_width=64,
        block=DlaBottleneck,
        residual_root=False,
        drop_rate=0.0,
        global_pool="avg",
        class_dim=1000,
        with_pool=True,
    ):
        super(DLA, self).__init__()
        self.channels = channels
        self.class_dim = class_dim
        self.with_pool = with_pool
        self.cardinality = cardinality
        self.base_width = base_width
        self.drop_rate = drop_rate
        assert output_stride == 32  # FIXME support dilation

        self.base_layer = nn.Sequential(
            nn.Conv2D(
                in_chans,
                channels[0],
                kernel_size=7,
                stride=1,
                padding=3,
                bias_attr=False,
            ),
            nn.BatchNorm2D(channels[0]),
            nn.ReLU(),
        )
        self.level0 = self._make_conv_level(channels[0], channels[0],
                                            levels[0])
        self.level1 = self._make_conv_level(channels[0],
                                            channels[1],
                                            levels[1],
                                            stride=2)
        cargs = dict(cardinality=cardinality,
                     base_width=base_width,
                     root_residual=residual_root)
        self.level2 = DlaTree(levels[2],
                              block,
                              channels[1],
                              channels[2],
                              2,
                              level_root=False,
                              **cargs)
        self.level3 = DlaTree(levels[3],
                              block,
                              channels[2],
                              channels[3],
                              2,
                              level_root=True,
                              **cargs)
        self.level4 = DlaTree(levels[4],
                              block,
                              channels[3],
                              channels[4],
                              2,
                              level_root=True,
                              **cargs)
        self.level5 = DlaTree(levels[5],
                              block,
                              channels[4],
                              channels[5],
                              2,
                              level_root=True,
                              **cargs)
        self.feature_info = [
            # rare to have a meaningful stride 1 level
            dict(num_chs=channels[0], reduction=1, module="level0"),
            dict(num_chs=channels[1], reduction=2, module="level1"),
            dict(num_chs=channels[2], reduction=4, module="level2"),
            dict(num_chs=channels[3], reduction=8, module="level3"),
            dict(num_chs=channels[4], reduction=16, module="level4"),
            dict(num_chs=channels[5], reduction=32, module="level5"),
        ]

        self.num_features = channels[-1]

        if with_pool:
            self.global_pool = nn.AdaptiveAvgPool2D(1)

        if class_dim > 0:
            self.fc = nn.Conv2D(self.num_features, class_dim, 1)

        for m in self.sublayers():
            if isinstance(m, nn.Conv2D):
                n = m._kernel_size[0] * m._kernel_size[1] * m._out_channels
                normal_ = Normal(mean=0.0, std=math.sqrt(2.0 / n))
                normal_(m.weight)
            elif isinstance(m, nn.BatchNorm2D):
                ones_(m.weight)
                zeros_(m.bias)
Exemplo n.º 4
0
 def _init_weights(self, m):
     if isinstance(m, nn.LayerNorm):
         zeros_(m.bias)
         ones_(m.weight)