Exemple #1
0
 def __init__(self,
              in_channels,
              out_channels,
              expansion=0.5,
              pooling_kernel_size=(5, 9, 13),
              csp_act_cfg=dict(type='Mish'),
              **kwargs):
     super(SPPCSP, self).__init__()
     hidden_channels = int(2 * out_channels * expansion)  # hidden channels
     self.conv1 = Conv(in_channels,
                       hidden_channels,
                       kernel_size=1,
                       **kwargs)
     self.conv2 = nn.Conv2d(in_channels, hidden_channels, 1, 1, bias=False)
     self.conv3 = Conv(hidden_channels,
                       hidden_channels,
                       kernel_size=3,
                       **kwargs)
     self.conv4 = Conv(hidden_channels,
                       hidden_channels,
                       kernel_size=1,
                       **kwargs)
     self.maxpools = nn.ModuleList([
         nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2)
         for x in pooling_kernel_size
     ])
     self.conv5 = Conv(4 * hidden_channels,
                       hidden_channels,
                       kernel_size=1,
                       **kwargs)
     self.conv6 = Conv(hidden_channels,
                       hidden_channels,
                       kernel_size=3,
                       **kwargs)
     csp_norm_cfg = kwargs.get('norm_cfg', dict(type='BN')).copy()
     self.bn = build_norm_layer(csp_norm_cfg, 2 * hidden_channels)[-1]
     csp_act_cfg_ = csp_act_cfg.copy()
     if csp_act_cfg_['type'] not in [
             'Tanh', 'PReLU', 'Sigmoid', 'HSigmoid', 'Swish'
     ]:
         csp_act_cfg_.setdefault('inplace', True)
     self.csp_act = build_activation_layer(csp_act_cfg_)
     self.conv7 = Conv(2 * hidden_channels,
                       out_channels,
                       kernel_size=1,
                       **kwargs)
Exemple #2
0
 def __init__(self,
              in_channels,
              out_channels,
              repetition=1,
              shortcut=True,
              groups=1,
              expansion=0.5,
              csp_act_cfg=dict(type='Mish'),
              **kwargs):
     super(BottleneckCSP, self).__init__()
     hidden_channels = int(out_channels * expansion)  # hidden channels
     self.conv1 = Conv(in_channels,
                       hidden_channels,
                       kernel_size=1,
                       **kwargs)
     self.conv2 = nn.Conv2d(in_channels, hidden_channels, 1, 1, bias=False)
     self.conv3 = nn.Conv2d(hidden_channels,
                            hidden_channels,
                            1,
                            1,
                            bias=False)
     self.conv4 = Conv(2 * hidden_channels,
                       out_channels,
                       kernel_size=1,
                       **kwargs)
     csp_norm_cfg = kwargs.get('norm_cfg', dict(type='BN')).copy()
     self.bn = build_norm_layer(csp_norm_cfg, 2 * hidden_channels)[-1]
     csp_act_cfg_ = csp_act_cfg.copy()
     if csp_act_cfg_['type'] not in [
             'Tanh', 'PReLU', 'Sigmoid', 'HSigmoid', 'Swish'
     ]:
         csp_act_cfg_.setdefault('inplace', True)
     self.csp_act = build_activation_layer(csp_act_cfg_)
     self.bottlenecks = nn.Sequential(*[
         Bottleneck(hidden_channels,
                    hidden_channels,
                    shortcut,
                    groups,
                    expansion=1.0,
                    **kwargs) for _ in range(repetition)
     ])
Exemple #3
0
    def __init__(self,
                 *args,
                 equalized_lr_cfg=dict(gain=1., lr_mul=1.),
                 bias=True,
                 bias_init=0.,
                 act_cfg=None,
                 **kwargs):
        super().__init__()
        self.with_activation = act_cfg is not None
        # w/o bias in linear layer
        self.linear = EqualizedLRLinearModule(
            *args, bias=False, equalized_lr_cfg=equalized_lr_cfg, **kwargs)

        if equalized_lr_cfg is not None:
            self.lr_mul = equalized_lr_cfg.get('lr_mul', 1.)
        else:
            self.lr_mul = 1.

        # define bias outside linear layer
        if bias:
            self.bias = nn.Parameter(
                torch.zeros(self.linear.out_features).fill_(bias_init))
        else:
            self.bias = None

        if self.with_activation:
            act_cfg = deepcopy(act_cfg)
            if act_cfg['type'] == 'fused_bias':
                self.act_type = act_cfg.pop('type')
                assert self.bias is not None
                self.activate = partial(fused_bias_leakyrelu, **act_cfg)
            else:
                self.act_type = 'normal'
                self.activate = build_activation_layer(act_cfg)
        else:
            self.act_type = None