def __call__(self, x): h = self.res_branch2a(x) h = selu(h) h = self.res_branch2b(h) h += x y = selu(h) return y
def fwd(self, x): h_input = F.relu(self.l_input(x)) h0 = F.selu(self.l0(h_input)) h1 = F.selu(self.l1(h0)) h2 = F.selu(self.l2(h1)) h3 = F.selu(self.l3(h2)) h_output = self.l_output(h3) return h_output
def __call__(self, x): temp = self.res_branch1(x) h = self.res_branch2a(x) h = selu(h) h = self.res_branch2b(h) h = temp + h y = selu(h) return y
def check_backward(self, x_data, y_grad): gradient_check.check_backward( lambda x: functions.selu(x, alpha=self.alpha, scale=self.scale), x_data, y_grad, dtype=numpy.float64, **self.check_backward_options)
def __call__(self, state): ''' state: state vector Q値の範囲が報酬体系によって負の値をとる場合、F.reluは負の値をとれないので、学習に適さない。 活性化関数は、負の値も取ることが可能なものを選択する必要がある。 例えば、F.leaky_relu等。勾配消失問題を考えると、これが良い感じ。 return: type: Variable of Chainer Q values of all actions ''' state32 = state.astype(np.float32) nrow, ncol = state32.shape #print('nrow, ncol =', nrow, ncol) twn_status = chainer.Variable( state32[:, 0:self.n_size_twn_status].astype(np.float32)) x_ray = state32[:, self.n_size_twn_status:self.n_size_twn_status + self.num_ray] eb_status = chainer.Variable( state32[:, self.n_size_twn_status + self.num_ray:self.n_size_twn_status + self.num_ray + self.n_size_eb_status].astype(np.float32)) x = x_ray.reshape(nrow, self.num_ray) h1 = self.ml1(x) h3_c = F.concat((twn_status, h1, eb_status), axis=1) h4 = F.selu(self.l4(h3_c)) h7 = self.ml5(h4) self.debug_info = (h7, h1, h3_c, h4) return h7
def check_forward(self, x_data): x = chainer.Variable(x_data) y = functions.selu(x, alpha=self.alpha, scale=self.scale) self.assertEqual(y.data.dtype, self.dtype) expected = numpy.where(self.x >= 0, self.x, self.alpha * (numpy.exp(self.x) - 1)) expected *= self.scale testing.assert_allclose(expected, y.data, **self.check_forward_options)
def check_forward(self, x_data): x = chainer.Variable(x_data) y = functions.selu(x, alpha=self.alpha, scale=self.scale) self.assertEqual(y.data.dtype, self.dtype) expected = numpy.where( self.x >= 0, self.x, self.alpha * (numpy.exp(self.x) - 1)) expected *= self.scale testing.assert_allclose( expected, y.data, **self.check_forward_options)
def check_forward(self, x_data): x = chainer.Variable(x_data) y = functions.selu(x, alpha=self.alpha, scale=self.scale) self.assertEqual(y.data.dtype, self.dtype) expected = self.x.copy() for i in numpy.ndindex(self.x.shape): if self.x[i] < 0: expected[i] = self.alpha * (numpy.exp(expected[i]) - 1) expected[i] *= self.scale testing.assert_allclose(expected, y.data, **self.check_forward_options)
def check_forward(self, x_data): x = chainer.Variable(x_data) y = functions.selu(x, alpha=self.alpha, scale=self.scale) self.assertEqual(y.data.dtype, self.dtype) expected = self.x.copy() for i in numpy.ndindex(self.x.shape): if self.x[i] < 0: expected[i] = self.alpha * (numpy.exp(expected[i]) - 1) expected[i] *= self.scale testing.assert_allclose( expected, y.data, **self.check_forward_options)
def check_backward(self, x_data, y_grad): gradient_check.check_backward( lambda x: functions.selu(x, alpha=self.alpha, scale=self.scale), x_data, y_grad, **self.check_backward_options)
def selu(self, x): return F.selu(x)
def __call__(self, x): h = self.conv(x) y = selu(h) return y
def __call__(self, x): h = F.selu(self.l1(x)) h = F.relu(self.l2(h)) return self.l3(h)
def forward(self, inputs, device): x, = inputs return functions.selu(x, alpha=self.alpha, scale=self.scale),