Beispiel #1
0
    def test_stacked_gru(self):
        x_train, x_test, y_train, y_test = self.data
        network = algorithms.RMSProp(
            [
                layers.Input(self.n_time_steps),
                layers.Embedding(self.n_categories, 10),
                layers.GRU(10, only_return_final=False),
                layers.GRU(1),
                layers.Sigmoid(1),
            ],
            step=0.01,
            verbose=False,
            batch_size=1,
            error='binary_crossentropy',
        )
        network.train(x_train, y_train, x_test, y_test, epochs=10)

        y_predicted = network.predict(x_test).round()
        accuracy = (y_predicted.T == y_test).mean()

        self.assertGreaterEqual(accuracy, 0.8)
    def test_gru_modify_only_one_weight_parameter(self):
        gru_layer = layers.GRU(
            2, weights=dict(weight_in_to_updategate=init.Constant(0)))

        layers.join(
            layers.Input((5, 3)),
            gru_layer,
        )

        for key, value in gru_layer.weights.items():
            if key == 'weight_in_to_updategate':
                self.assertIsInstance(value, init.Constant)
            else:
                self.assertIsInstance(value, init.XavierUniform)
Beispiel #3
0
    def test_stacked_gru_with_enabled_backwards_option(self):
        x_train, x_test, y_train, y_test = self.data
        x_train = x_train[:, ::-1]
        x_test = x_test[:, ::-1]

        network = algorithms.RMSProp(
            [
                layers.Input(self.n_time_steps),
                layers.Embedding(self.n_categories, 10),
                layers.GRU(10, only_return_final=False, backwards=True),
                layers.GRU(2, backwards=True),
                layers.Sigmoid(1),
            ],
            step=0.01,
            verbose=False,
            batch_size=10,
            loss='binary_crossentropy',
        )
        network.train(x_train, y_train, x_test, y_test, epochs=20)

        y_predicted = network.predict(x_test).round()
        accuracy = (y_predicted.T == y_test).mean()

        self.assertGreaterEqual(accuracy, 0.8)
    def train_gru(self, data, **gru_options):
        x_train, x_test, y_train, y_test = data
        network = algorithms.RMSProp(
            [
                layers.Input(self.n_time_steps),
                layers.Embedding(self.n_categories, 10),
                layers.GRU(20, **gru_options),
                layers.Sigmoid(1),
            ],
            step=0.05,
            verbose=False,
            batch_size=16,
            error='binary_crossentropy',
        )
        network.train(x_train, y_train, x_test, y_test, epochs=20)

        y_predicted = network.predict(x_test).round()
        accuracy = (y_predicted.T == y_test).mean()
        return accuracy
    def test_gru_with_4d_input(self):
        x_train, x_test, y_train, y_test = self.data
        network = algorithms.RMSProp(
            [
                layers.Input(self.n_time_steps),
                layers.Embedding(self.n_categories, 10),
                # Make 4D input
                layers.Reshape((self.n_time_steps, 5, 2), name='reshape'),
                layers.GRU(10),
                layers.Sigmoid(1),
            ],
            step=0.1,
            verbose=False,
            batch_size=1,
            error='binary_crossentropy',
        )
        network.train(x_train, y_train, x_test, y_test, epochs=2)

        reshape = network.connection.end('reshape')
        # +1 for batch size
        output_dimension = len(reshape.output_shape) + 1
        self.assertEqual(4, output_dimension)
Beispiel #6
0
x_train, x_test, y_train, y_test = train_test_split(data,
                                                    labels,
                                                    train_size=0.8)

n_categories = len(reber.avaliable_letters) + 1  # +1 for zero paddings
n_time_steps = x_train.shape[1]

network = algorithms.RMSProp(
    [
        layers.Input(n_time_steps),
        # shape: (n_samples, n_time_steps)
        layers.Embedding(n_categories, 10),
        # shape: (n_samples, n_time_steps, 10)

        # unroll_scan - speed up calculation for short sequences
        layers.GRU(20, unroll_scan=True),
        # shape: (n_samples, 20)
        layers.Sigmoid(1),
        # shape: (n_samples, 1)
    ],
    step=0.05,
    verbose=True,
    batch_size=64,
    error='binary_crossentropy',
)
network.train(x_train, y_train, x_test, y_test, epochs=20)

y_predicted = network.predict(x_test).round()
accuracy = (y_predicted.T == y_test).mean()
print("Test accuracy: {:.2%}".format(accuracy))
 def test_gru_connection_exceptions(self):
     with self.assertRaises(LayerConnectionError):
         layers.Input(1) > layers.GRU(10)
Beispiel #8
0
    def test_gru_connection_exceptions(self):
        network = layers.join(layers.GRU(10), layers.Reshape())

        with self.assertRaises(LayerConnectionError):
            layers.join(layers.Input(1), network)
Beispiel #9
0
    def test_gru_initialization_exceptions(self):
        with self.assertRaisesRegexp(ValueError, 'callable'):
            layers.GRU(1, activation_functions=dict(ingate=10))

        with self.assertRaises(TypeError):
            layers.GRU(1, activation_functions=lambda x: x)