Exemplo n.º 1
0
 def __init__(self, act_func: str, **kwargs):
     super(Activation, self).__init__()
     with self.name_scope():
         if act_func in ('relu', 'sigmoid', 'softrelu', 'softsign', 'tanh'):
             self.act = nn.Activation(act_func)
         elif act_func == 'leaky':
             self.act = nn.LeakyReLU(**kwargs)
         elif act_func == 'prelu':
             self.act = nn.PReLU(**kwargs)
         elif act_func == 'selu':
             self.act = nn.SELU()
         elif act_func == 'elu':
             self.act = nn.ELU(**kwargs)
         elif act_func == 'gelu':
             self.act = nn.GELU()
         elif act_func == 'relu6':
             self.act = ReLU6()
         elif act_func == 'hard_sigmoid':
             self.act = HardSigmoid()
         elif act_func == 'swish':
             self.act = nn.Swish()
         elif act_func == 'hard_swish':
             self.act = HardSwish()
         elif act_func == 'mish':
             self.act = Mish()
         else:
             raise NotImplementedError(
                 f"Not implemented activation: {act_func}")
Exemplo n.º 2
0
 def __init__(self, output, drop_rate=0, activation='relu'):
     super(Dense, self).__init__()
     self.net = nn.Dense(units=output, flatten=False)
     if activation is None:
         self.act = None
     elif activation.lower() == 'selu':
         self.act = nn.SELU()
     else:
         self.act = nn.Activation(activation)
     self.drop = nn.Dropout(drop_rate) if drop_rate > 0 else None
Exemplo n.º 3
0
    def __init__(self, act_type, r, skernel, dilation, channels, useReLU, useGlobal, asBackbone,
                 stride, downsample=False, in_channels=0, norm_layer=BatchNorm,
                 norm_kwargs=None, **kwargs):
        super(ResBlockV2ATAC, self).__init__(**kwargs)
        self.bn1 = norm_layer(**({} if norm_kwargs is None else norm_kwargs))
        self.conv1 = _conv3x3(channels, stride, in_channels)
        self.bn2 = norm_layer(**({} if norm_kwargs is None else norm_kwargs))
        self.conv2 = _conv3x3(channels, 1, channels)
        if downsample:
            self.downsample = nn.Conv2D(channels, 1, stride, use_bias=False, in_channels=in_channels)
        else:
            self.downsample = None

        if act_type == 'relu':
            self.msAA1 = nn.Activation('relu')
            self.msAA2 = nn.Activation('relu')
        elif act_type == 'prelu':
            self.msAA1 = nn.PReLU()
            self.msAA2 = nn.PReLU()
        elif act_type == 'elu':
            self.msAA1 = nn.ELU()
            self.msAA2 = nn.ELU()
        elif act_type == 'selu':
            self.msAA1 = nn.SELU()
            self.msAA2 = nn.SELU()
        elif act_type == 'gelu':
            self.msAA1 = nn.GELU()
            self.msAA2 = nn.GELU()
        elif act_type == 'swish':
            self.msAA1 = nn.Swish()
            self.msAA2 = nn.Swish()
        elif act_type == 'ChaATAC':
            self.msAA1 = ChaATAC(channels=in_channels, r=r, useReLU=useReLU, useGlobal=useGlobal)
            self.msAA2 = ChaATAC(channels=channels, r=r, useReLU=useReLU, useGlobal=useGlobal)
        else:
            raise ValueError("Unknown act_type in ResBlockV2ATAC")
    def __init__(self, ctx=mx.cpu(), warmup=10, runs=50, inputs=None):
        # Set the default Inputs.
        # Default data is (32, 3, 256, 256) to mimic an input of batch_size=128 and a sample image of size 3*256*256.
        default_parameters = {"data": (32, 3, 256, 256),
                              "data_initializer": nd.normal,
                              "run_backward": True,
                              "dtype": "float32"}

        super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
                         custom_parameters=inputs)

        self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
                                   dtype=self.inputs["dtype"],
                                   initializer=self.inputs["data_initializer"],
                                   attach_grad=self.inputs["run_backward"])

        self.block = nn.SELU()

        self.block.initialize(ctx=self.ctx)
Exemplo n.º 5
0
def test_activations_selu():
    act_layer = nn.SELU()
    out = act_layer(mx.np.random.uniform(size=(10,)))
    out.asnumpy()
Exemplo n.º 6
0
    train_x = (-5, 5)
    test_x = np.arange(-20, 20, 0.1)

    n_model = 20
    n_batch = 5000
    batch_size = 64

    activations = {
        'ReLU': mx.nd.relu,
        'Sigmoid': mx.nd.sigmoid,
        'Tanh': mx.nd.tanh,
        'Relu6': lambda x: mx.nd.clip(mx.nd.relu(x), 0, 6),
        'LeakyRelu': mx.nd.LeakyReLU,
        'ELU': nn.ELU(),
        'SELU': nn.SELU(),
        'PReLU': nn.PReLU(),
        'Swish': nn.Swish(),
    }

    legends = []
    for act in activations:
        test_err = np.zeros_like(test_x)

        for i in range(n_model):
            print("Train:  %s %d/%d" % (act, i + 1, n_model))
            net = Net(act=activations[act])
            net.collect_params().initialize(mx.init.Xavier(), ctx=ctx)

            train(net, train_x[0], train_x[1], batch_size, n_batch)
            err = evaluate(net, test_x)
 def __init__(self):
     super(SELUTest, self).__init__()
     from mxnet.gluon import nn
     with self.name_scope():
         self.conv1 = nn.Conv2D(3, 32)
         self.relu = nn.SELU()
Exemplo n.º 8
0
    def __init__(self, layers, channels, classes,
                 act_type, r, skernel, dilation, useReLU, useGlobal, act_layers, replace_act,
                 act_order, asBackbone, norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
        super(ResNet20V2ATAC, self).__init__(**kwargs)
        assert len(layers) == len(channels) - 1
        with self.name_scope():
            self.features = nn.HybridSequential(prefix='')
            self.features.add(norm_layer(scale=False, center=False,
                                         **({} if norm_kwargs is None else norm_kwargs)))
            self.features.add(nn.Conv2D(channels[0], 3, 1, 1, use_bias=False))

            in_channels = channels[0]
            for i, num_layer in enumerate(layers):
                stride = 1 if i == 0 else 2
                if act_order == 'bac':
                    if i + act_layers < len(channels):
                        tmp_act_type = replace_act
                    else:
                        tmp_act_type = act_type
                elif act_order == 'pre':
                    if i + 1 > act_layers:
                        tmp_act_type = replace_act
                    else:
                        tmp_act_type = act_type
                else:
                    raise ValueError('Unknown act_order')
                self.features.add(self._make_layer(
                    layers=num_layer, channels=channels[i+1], in_channels=in_channels,
                    stride=stride, stage_index=i+1, act_type=tmp_act_type, r=r, skernel=skernel,
                    dilation=dilation, useReLU=useReLU, useGlobal=useGlobal,
                    asBackbone=asBackbone, norm_layer=norm_layer, norm_kwargs=norm_kwargs
                ))
                in_channels = channels[i+1]

            self.features.add(norm_layer(**({} if norm_kwargs is None else norm_kwargs)))

            if act_order == 'bac':
                if act_layers <= 0:
                    tmp_act_type = replace_act
                else:
                    tmp_act_type = act_type
            elif act_order == 'pre':
                if act_layers >= 4:
                    tmp_act_type = act_type
                else:
                    tmp_act_type = replace_act
            else:
                raise ValueError('Unknown act_order')

            if tmp_act_type == 'relu':
                self.features.add(nn.Activation('relu'))
            elif tmp_act_type == 'prelu':
                self.features.add(nn.PReLU())
            elif tmp_act_type == 'elu':
                self.features.add(nn.ELU())
            elif tmp_act_type == 'selu':
                self.features.add(nn.SELU())
            elif tmp_act_type == 'gelu':
                self.features.add(nn.GELU())
            elif tmp_act_type == 'swish':
                self.features.add(nn.Swish())
            elif tmp_act_type == 'ChaATAC':
                self.features.add(ChaATAC(channels=in_channels, r=r, useReLU=useReLU,
                                          useGlobal=useGlobal))
            else:
                raise ValueError("Unknown act_type in ResBlockV2ATAC")

            self.features.add(nn.GlobalAvgPool2D())
            self.features.add(nn.Flatten())
            self.output = nn.Dense(classes, in_units=in_channels)