def forward(self, inputs, device): x, = inputs r = self.r.astype(x.dtype) r = device.send(r) with chainer.using_config('train', self.train): y = functions.rrelu(x, l=self.l, u=self.u, r=r) return y,
def f(x): return functions.rrelu( x, self.l, self.u, r=r.astype(x.dtype) # check_backward casts only x )
def check_forward(self, x_data): x = chainer.Variable(x_data) xp = backend.get_array_module(x) with chainer.using_config('train', self.train): y, r = functions.rrelu(x, l=self.l, u=self.u, return_r=True) self.assertEqual(y.data.dtype, self.dtype) expected = xp.where(x_data >= 0, x_data, x_data * r) testing.assert_allclose(expected, y.data, **self.check_forward_options)
def check_forward(self, x_data): x = chainer.Variable(x_data) xp = cuda.get_array_module(x) with chainer.using_config('train', self.train): y, r = functions.rrelu(x, l=self.l, u=self.u, return_r=True) self.assertEqual(y.data.dtype, self.dtype) expected = xp.where(x_data >= 0, x_data, x_data * r) testing.assert_allclose( expected, y.data, **self.check_forward_options)
def _check(self): r = self.r if self.specify_r else None with chainer.using_config('tarin', self.train): out, out_r = functions.rrelu( self.x, self.l, self.u, r=r, return_r=True) assert isinstance(out_r, type(out.array)) if r is None: assert out_r.shape == out.array.shape else: if chainer.config.train: assert out_r is r
def prediction(self, x): x = Variable(x) ecfp = self.build_ecfp(x) fcfp = self.build_fcfp(x) ecfp_fcfp = F.concat((ecfp, fcfp), axis=1) h1 = self.attention_layer1(ecfp_fcfp) #h2 = self.attention_layer2(h1) attentions_1 = F.rrelu(self.attention_layer2(h1)) attentions_2 = F.rrelu(self.attention_layer3(h1)) attentions = F.concat((attentions_1, attentions_2), axis=1) attentions = F.softmax(attentions) attentions = F.split_axis(attentions, 2, 1) attention_ecfp = attentions[0] attention_fcfp = attentions[1] attentioned_ecfc = F.concat( (attention_ecfp * ecfp, attention_fcfp * fcfp), axis=1) #attentioned_ecfc = F.concat((attention_ecfp * ecfp, attention_fcfp * fcfp),axis=1) pred = self.dnn(attentioned_ecfc) return pred, attention_ecfp, attention_fcfp
def f(x): return functions.rrelu(x, self.l, self.u, r=r)