示例#1
0
def test_18934_empty_leaky_relu():
    arr = np.random.rand(0,2)
    arr_grad = np.empty_like(arr)

    autograd.mark_variables([arr], [arr_grad])
    with autograd.record():
        res = npx.leaky_relu(arr)
    res.backward()
示例#2
0
 def forward(self, x):
     if self._mode == 'erf':
         return npx.leaky_relu(x, act_type='gelu')
     elif self._mode == 'tanh':
         return 0.5 * x\
                * (1.0 + np.tanh(math.sqrt(2.0 / math.pi) * (x + 0.044715 * (x ** 3))))
     elif self._mode == 'sigmoid':
         return x * npx.sigmoid(1.702 * x)
     else:
         raise NotImplementedError