Ejemplo n.º 1
0
    def test_stacked_lstm(self):
        x_train, x_test, y_train, y_test = self.data
        network = algorithms.RMSProp(
            [
                layers.Input(self.n_time_steps),
                layers.Embedding(self.n_categories, 10),
                layers.LSTM(
                    10, only_return_final=False, weights=init.Normal(0.1)),
                layers.LSTM(2, weights=init.Normal(0.1)),
                layers.Sigmoid(1),
            ],
            step=0.05,
            verbose=False,
            batch_size=1,
            error='binary_crossentropy',
        )
        network.train(x_train, y_train, x_test, y_test, epochs=10)

        y_predicted = network.predict(x_test).round()
        accuracy = (y_predicted.T == y_test).mean()

        self.assertGreaterEqual(accuracy, 0.9)
Ejemplo n.º 2
0
    def test_lstm_modify_only_one_weight_parameter(self):
        lstm_layer = layers.LSTM(
            2, weights=dict(weight_in_to_ingate=init.Constant(0)))

        layers.join(
            layers.Input((5, 3)),
            lstm_layer,
        )

        for key, value in lstm_layer.weights.items():
            if key == 'weight_in_to_ingate':
                self.assertIsInstance(value, init.Constant)
            else:
                self.assertIsInstance(value, init.XavierUniform)
Ejemplo n.º 3
0
    def test_stacked_lstm_with_enabled_backwards_option(self):
        x_train, x_test, y_train, y_test = self.data
        x_train = x_train[:, ::-1]
        x_test = x_test[:, ::-1]

        network = algorithms.RMSProp(
            [
                layers.Input(self.n_time_steps),
                layers.Embedding(self.n_categories, 10),
                layers.LSTM(10, only_return_final=False, backwards=True),
                layers.LSTM(2, backwards=True),
                layers.Sigmoid(1),
            ],
            step=0.1,
            verbose=False,
            batch_size=1,
            error='binary_crossentropy',
        )
        network.train(x_train, y_train, x_test, y_test, epochs=20)

        y_predicted = network.predict(x_test).round()
        accuracy = (y_predicted.T == y_test).mean()

        self.assertGreaterEqual(accuracy, 0.9)
Ejemplo n.º 4
0
    def train_lstm(self, data, **lstm_options):
        x_train, x_test, y_train, y_test = data
        network = algorithms.RMSProp(
            [
                layers.Input(self.n_time_steps),
                layers.Embedding(self.n_categories, 10),
                layers.LSTM(20, **lstm_options),
                layers.Sigmoid(1),
            ],
            step=0.05,
            verbose=False,
            batch_size=16,
            error='binary_crossentropy',
        )
        network.train(x_train, y_train, x_test, y_test, epochs=20)

        y_predicted = network.predict(x_test).round()
        accuracy = (y_predicted.T == y_test).mean()
        return accuracy
Ejemplo n.º 5
0
    def test_lstm_with_4d_input(self):
        x_train, x_test, y_train, y_test = self.data
        network = algorithms.RMSProp(
            [
                layers.Input(self.n_time_steps),
                layers.Embedding(self.n_categories, 10),
                # Make 4D input
                layers.Reshape((self.n_time_steps, 5, 2), name='reshape'),
                layers.LSTM(10),
                layers.Sigmoid(1),
            ],
            step=0.1,
            verbose=False,
            batch_size=1,
            error='binary_crossentropy',
        )
        network.train(x_train, y_train, x_test, y_test, epochs=2)

        reshape = network.connection.end('reshape')
        # +1 for batch size
        output_dimension = len(reshape.output_shape) + 1
        self.assertEqual(4, output_dimension)
Ejemplo n.º 6
0

if __name__ == '__main__':
    window_size = 40

    print("Loading Shakespeare's text ...")
    preprocessor = TextPreprocessing(filepath=TEXT_FILE)
    n_characters = preprocessor.n_characters

    x_train, x_test, y_train, y_test = preprocessor.load_samples(
        window_size, stride=10)

    network = algorithms.RMSProp(
        [
            layers.Input((window_size, n_characters)),
            layers.LSTM(128),
            layers.Softmax(n_characters),
        ],
        step=0.01,
        verbose=True,
        batch_size=128,
        loss='categorical_crossentropy',
    )
    network.train(x_train, y_train, x_test, y_test, epochs=10)

    # Number of symbols that will be generated
    n_new_symbols = 1000
    # Which samples to use from the test data
    test_sample_id = 0

    test_sample = x_test[test_sample_id]
Ejemplo n.º 7
0
 def test_lstm_connection_exceptions(self):
     with self.assertRaises(LayerConnectionError):
         layers.Input(1) > layers.LSTM(10)
Ejemplo n.º 8
0
    def test_lstm_connection_exceptions(self):
        network = layers.join(layers.LSTM(10), layers.Reshape())

        with self.assertRaises(LayerConnectionError):
            layers.join(layers.Input(1), network)
Ejemplo n.º 9
0

if __name__ == '__main__':
    window_size = 40

    print("Loading Shakespeare's text ...")
    preprocessor = TextPreprocessing(filepath=TEXT_FILE)
    n_characters = preprocessor.n_characters

    x_train, x_test, y_train, y_test = preprocessor.load_samples(window_size,
                                                                 stride=10)

    network = algorithms.RMSProp(
        [
            layers.Input((window_size, n_characters)),
            layers.LSTM(128, unroll_scan=True),
            layers.Softmax(n_characters),
        ],
        step=0.01,
        verbose=True,
        batch_size=128,
        error='categorical_crossentropy',
    )
    network.train(x_train, y_train, x_test, y_test, epochs=10)

    # Number of symbols that will be generated
    n_new_symbols = 1000
    # Which samples to use from the test data
    test_sample_id = 0

    test_sample = x_test[test_sample_id]
Ejemplo n.º 10
0
    def test_lstm_initialization_exceptions(self):
        with self.assertRaisesRegexp(ValueError, 'callable'):
            layers.LSTM(1, activation_functions=dict(ingate=10))

        with self.assertRaises(TypeError):
            layers.LSTM(1, activation_functions=lambda x: x)