def test_008_rnn_no_input(self): iW = np.zeros((self._SIZE, self._NFEATURES), dtype=sloika_dtype) sW = np.random.normal(size=(self._SIZE, self._SIZE)).astype(sloika_dtype) network = nn.Recurrent(self._NFEATURES, self._SIZE) network.set_params({'iW': iW, 'sW': sW}) f = network.compile() res = f(self.x) np.testing.assert_almost_equal(res, 0.0)
def test_007_rnn_no_state(self): sW = np.zeros((self._SIZE, self._SIZE), dtype=sloika_dtype) network = nn.Recurrent(self._NFEATURES, self._SIZE, has_bias=True, fun=activation.linear) network.set_params({'iW': self.W, 'sW': sW, 'b': self.b}) f = network.compile() res = f(self.x) np.testing.assert_almost_equal(res, self.res, decimal=5)
def test_010_birnn_no_input_with_bias(self): iW = np.zeros((self._SIZE, self._NFEATURES), dtype=sloika_dtype) sW = np.random.normal(size=(self._SIZE, self._SIZE)).astype(sloika_dtype) layer1 = nn.Recurrent(self._NFEATURES, self._SIZE, has_bias=True, fun=activation.linear) layer1.set_params({'iW': iW, 'sW': sW, 'b': self.b}) layer2 = nn.Recurrent(self._NFEATURES, self._SIZE, has_bias=True, fun=activation.linear) layer2.set_params({'iW': iW, 'sW': sW, 'b': self.b}) network = nn.birnn(layer1, layer2) f = network.compile() res = f(self.x) np.testing.assert_almost_equal(res[:, :, :self._SIZE], res[::-1, :, self._SIZE:])
def test_009_rnn_no_input_with_bias(self): iW = np.zeros((self._SIZE, self._NFEATURES), dtype=sloika_dtype) sW = rvs(self._SIZE).astype(sloika_dtype) network = nn.Recurrent(self._NFEATURES, self._SIZE, has_bias=True, fun=activation.linear) network.set_params({'iW': iW, 'sW': sW, 'b': self.b}) f = network.compile() res = f(self.x) res2 = np.zeros((self._NBATCH, self._SIZE), dtype=sloika_dtype) for i in range(self._NSTEP): res2 = res2.dot(sW.transpose()) + self.b np.testing.assert_almost_equal(res[i], res2)
def setUp(self): self.layer = nn.Recurrent(12, 64, has_bias=True)
def setUp(self): self.layer = nn.Recurrent(12, 64)