Exemple #1
0
    def check_backward(self, x_data, y_grad):
        x = chainer.Variable(x_data)
        y = elu(x, alpha=self.alpha)
        y.grad = y_grad
        y.backward()

        func = y.creator
        f = lambda: func.forward((x.data,))
        gx, = gradient_check.numerical_grad(f, (x.data,), (y.grad,))

        gradient_check.assert_allclose(gx, x.grad)
Exemple #2
0
    def check_forward(self, x_data):
        x = chainer.Variable(x_data)
        y = elu(x, alpha=self.alpha)
        self.assertEqual(y.data.dtype, numpy.float32)

        expected = self.x.copy()
        for i in numpy.ndindex(self.x.shape):
            if self.x[i] < 0:
                expected[i] = self.alpha * (numpy.exp(expected[i]) - 1)

        gradient_check.assert_allclose(expected, y.data)
Exemple #3
0
import numpy as np
from chainer import Variable
import matplotlib.pyplot as plt

from elu import elu


xs = np.arange(-10, 10, 0.01, dtype=np.float32)
ys = elu(Variable(xs)).data

plt.plot(xs, ys)
plt.show()