def __init__(self,
              channels,
              reduction=16,
              act_layer=nn.ReLU,
              divisor=1,
              reduction_channels=None,
              gate_layer='sigmoid'):
     super(SEWithNorm, self).__init__()
     self.avg_pool = nn.AdaptiveAvgPool2d(1)
     reduction_channels = reduction_channels or make_divisible(
         channels // reduction, divisor=divisor)
     print(reduction_channels)
     self.fc1 = nn.Conv2d(channels,
                          reduction_channels,
                          kernel_size=1,
                          padding=0,
                          bias=True)
     self.bn = nn.BatchNorm2d(reduction_channels)
     self.act = act_layer(inplace=True)
     self.fc2 = nn.Conv2d(reduction_channels,
                          channels,
                          kernel_size=1,
                          padding=0,
                          bias=True)
     self.gate = create_act_layer(gate_layer)
Exemplo n.º 2
0
 def _run(x, act_layer=''):
     if act_layer:
         # replace act layer if set
         m.act = create_act_layer(act_layer, inplace=inplace)
     out = m(x)
     l = (out - 0).pow(2).sum()
     return l
Exemplo n.º 3
0
 def __init__(self,
              in_channels,
              out_channels,
              kernel_size,
              padding=0,
              stride=1,
              use_batchnorm=True,
              act_layer='relu'):
     conv = nn.Conv2d(
         in_channels,
         out_channels,
         kernel_size,
         stride=stride,
         padding=padding,
         bias=not (use_batchnorm),
     )
     if act_layer == 'frelu':
         act = FReLU(out_channels)
     elif act_layer == 'dy_relu_b':
         act = DyReLUB(out_channels)
     elif act_layer == 'dy_relu_c':
         act = DyReLUC(out_channels)
     else:
         act = create_act_layer(act_layer)
     if use_batchnorm:
         bn = nn.BatchNorm2d(out_channels)
     else:
         bn = nn.Identity()
     super(Conv2dAct, self).__init__(conv, bn, act)
Exemplo n.º 4
0
 def __init__(self, act_layer="relu", inplace=True):
     super(MLP, self).__init__()
     self.fc1 = nn.Linear(1000, 100)
     self.act = create_act_layer(act_layer, inplace=inplace)
     self.fc2 = nn.Linear(100, 10)