Example #1
0
 def __init__(self, act_func: str, **kwargs):
     super(Activation, self).__init__()
     with self.name_scope():
         if act_func in ('relu', 'sigmoid', 'softrelu', 'softsign', 'tanh'):
             self.act = nn.Activation(act_func)
         elif act_func == 'leaky':
             self.act = nn.LeakyReLU(**kwargs)
         elif act_func == 'prelu':
             self.act = nn.PReLU(**kwargs)
         elif act_func == 'selu':
             self.act = nn.SELU()
         elif act_func == 'elu':
             self.act = nn.ELU(**kwargs)
         elif act_func == 'gelu':
             self.act = nn.GELU()
         elif act_func == 'relu6':
             self.act = ReLU6()
         elif act_func == 'hard_sigmoid':
             self.act = HardSigmoid()
         elif act_func == 'swish':
             self.act = nn.Swish()
         elif act_func == 'hard_swish':
             self.act = HardSwish()
         elif act_func == 'mish':
             self.act = Mish()
         else:
             raise NotImplementedError(
                 f"Not implemented activation: {act_func}")
Example #2
0
 def __init__(self, act_func, **kwargs):
     super(Activation, self).__init__(**kwargs)
     if act_func == "relu":
         self.act = nn.Activation('relu')
     elif act_func == "relu6":
         self.act = ReLU6()
     elif act_func == "hard_sigmoid":
         self.act = HardSigmoid()
     elif act_func == "swish":
         self.act = nn.Swish()
     elif act_func == "hard_swish":
         self.act = HardSwish()
     elif act_func == "leaky":
         self.act = nn.LeakyReLU(alpha=0.375)
     else:
         raise NotImplementedError
Example #3
0
    def __init__(self,
                 cfg,
                 cls_ch_squeeze,
                 cls_ch_expand,
                 multiplier=1.,
                 classes=1000,
                 norm_kwargs=None,
                 last_gamma=False,
                 final_drop=0.,
                 use_global_stats=False,
                 name_prefix='',
                 norm_layer=BatchNorm):
        super(_MobileNetV3, self).__init__(prefix=name_prefix)
        norm_kwargs = norm_kwargs if norm_kwargs is not None else {}
        if use_global_stats:
            norm_kwargs['use_global_stats'] = True
        # initialize residual networks
        k = multiplier
        self.last_gamma = last_gamma
        self.norm_kwargs = norm_kwargs
        self.inplanes = 16

        with self.name_scope():
            self.features = nn.HybridSequential(prefix='')
            self.features.add(nn.Conv2D(channels=make_divisible(k*self.inplanes), \
                                        kernel_size=3, padding=1, strides=2,
                                        use_bias=False, prefix='first-3x3-conv-conv2d_'))
            self.features.add(norm_layer(prefix='first-3x3-conv-batchnorm_'))
            self.features.add(HardSwish())
            i = 0
            for layer_cfg in cfg:
                layer = self._make_layer(
                    kernel_size=layer_cfg[0],
                    exp_ch=make_divisible(k * layer_cfg[1]),
                    out_channel=make_divisible(k * layer_cfg[2]),
                    use_se=layer_cfg[3],
                    act_func=layer_cfg[4],
                    stride=layer_cfg[5],
                    prefix='seq-%d' % i,
                )
                self.features.add(layer)
                i += 1
            self.features.add(nn.Conv2D(channels= \
                         make_divisible(k*cls_ch_squeeze), \
                         kernel_size=1, padding=0, strides=1,
                                        use_bias=False, prefix='last-1x1-conv1-conv2d_'))
            self.features.add(
                norm_layer(prefix='last-1x1-conv1-batchnorm_',
                           **({} if norm_kwargs is None else norm_kwargs)))
            self.features.add(HardSwish())
            self.features.add(nn.GlobalAvgPool2D())
            self.features.add(
                nn.Conv2D(channels=cls_ch_expand,
                          kernel_size=1,
                          padding=0,
                          strides=1,
                          use_bias=False,
                          prefix='last-1x1-conv2-conv2d_'))
            self.features.add(HardSwish())

            if final_drop > 0:
                self.features.add(nn.Dropout(final_drop))
            self.output = nn.HybridSequential(prefix='output_')
            with self.output.name_scope():
                self.output.add(
                    nn.Conv2D(in_channels=cls_ch_expand,
                              channels=classes,
                              kernel_size=1,
                              prefix='fc_'), nn.Flatten())