def test_bidirectional_rnn_returns_result_with_correct_shape(self): rnn = self.create_mdrnn(direction=Direction(1)) rnn = MultiDirectional(rnn) a = rnn.call(self.x, initial_state=self.initial_state, dtype=np.float) self.assertEqual((1, 3, 2), a.shape)
def test_returns_list_of_correct_length(self): rnn_setup = Rnn2dTestSetup(direction=Direction.south_east()) rnn = rnn_setup.make_rnn() rnn = MultiDirectional(rnn) x = rnn_setup.make_input() actual = rnn.call(x) # 1 element for output of RNN and 4 elements for states, 1 state per each direction self.assertEqual(5, len(actual))
def test_bidirectional_rnn_returns_correct_result(self): rnn = self.create_mdrnn(direction=Direction(1)) rnn = MultiDirectional(rnn) a = rnn.call(self.x, initial_state=self.initial_state, dtype=np.float) expected_result = np.array([ [2, 8], [4, 4], [8, 2] ]).reshape((1, 3, 2)) np.testing.assert_almost_equal(expected_result, a.numpy(), 8)
def test_results(self): rnn_setup = Rnn2dTestSetup(direction=Direction.south_east()) rnn = rnn_setup.make_rnn() rnn = MultiDirectional(rnn) x = rnn_setup.make_input() expected = rnn_setup.get_expected_result_for_multi_directional_rnn() actual = rnn.call(x) num_elements = 5 for i in range(num_elements): actual_output = actual[i] expected_output = expected[i] np.testing.assert_almost_equal(expected_output, actual_output.numpy(), 6)
def test_feed_multi_directional_rnn(self): rnn = MultiDirectional( MDRNN(units=16, input_shape=(5, 4, 10), activation='tanh', return_sequences=True)) output = rnn(np.zeros((1, 5, 4, 10))) self.assertEqual((1, 5, 4, 16 * 4), output.shape)
def make_rnns(self, return_sequences, return_state, go_backwards=False): rnn, keras_rnn = make_rnns(return_sequences, return_state, go_backwards) rnn = MultiDirectional(rnn) keras_rnn = tf.keras.layers.Bidirectional(keras_rnn, merge_mode='concat') return rnn, keras_rnn
def make_rnn(self, return_sequences, return_state): shape = self.x.shape[1:] rnn = MDRNN(units=self.units, input_shape=shape, return_sequences=return_sequences, return_state=return_state, activation='tanh') return MultiDirectional(rnn)
def test_with_functor(self): rnn = self.create_mdrnn(direction=Direction(1)) rnn = MultiDirectional(rnn) a = rnn(self.x, initial_state=self.initial_state) expected_result = np.array([ [2, 8], [4, 4], [8, 2] ]).reshape((1, 3, 2)) np.testing.assert_almost_equal(expected_result, a.numpy(), 8)
def make_rnns(self, return_sequences, return_state): seed = 1 kwargs = dict(units=3, input_shape=(None, 5), kernel_initializer=initializers.glorot_uniform(seed), recurrent_initializer=initializers.he_normal(seed), bias_initializer=initializers.Constant(2), return_sequences=return_sequences, return_state=return_state, activation='relu' ) rnn = MultiDirectional(MDRNN(**kwargs)) keras_rnn = tf.keras.layers.Bidirectional(tf.keras.layers.SimpleRNN(**kwargs)) return rnn, keras_rnn
def test_fit_multi_directional_MDLSTM(self): x = np.zeros((10, 2, 3, 6)) y = np.zeros(( 10, 40, )) model = tf.keras.Sequential() model.add(tf.keras.layers.Input(shape=(2, 3, 6))) model.add(MultiDirectional(MDLSTM(10, input_shape=[2, 3, 6]))) model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001, clipnorm=100), loss='categorical_crossentropy', metrics=['acc']) model.summary() model.fit(x, y, epochs=1)
def fit_mdrnn(target_image_size=10, rnn_units=128, epochs=30, batch_size=32): # get MNIST examples (x_train, y_train), (x_test, y_test) = mnist.load_data() # down sample images to speed up the training and graph building process for mdrnn x_train = down_sample(x_train, target_image_size) x_test = down_sample(x_test, target_image_size) inp = tf.keras.layers.Input(shape=(target_image_size, target_image_size, 1)) # create multi-directional MDRNN layer rnn = MultiDirectional( MDRNN(units=rnn_units, input_shape=[target_image_size, target_image_size, 1])) dense = tf.keras.layers.Dense(units=10, activation='softmax') # build a model x = inp x = rnn(x) outputs = dense(x) model = tf.keras.Model(inp, outputs) # choose Adam optimizer, set gradient clipping to prevent gradient explosion, # set a categorical cross-entropy loss function model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001, clipnorm=100), loss='categorical_crossentropy', metrics=['acc']) model.summary() # fit the model model.fit(x_train, tf.keras.utils.to_categorical(y_train), epochs=epochs, validation_data=(x_test, tf.keras.utils.to_categorical(y_test)), batch_size=batch_size)
def create_default_mdrnn(self, **kwargs): return MultiDirectional( MDLSTM(units=6, input_shape=(None, None, 5), **kwargs))