Beispiel #1
0
    def test_to_model_params_conv_net_values_are_close_to_original(self):
        model = conv_model(mnist_single_items())
        sut, model_weights, is_bias, biases = self.get_sut_and_test_input(
            model)
        input = self.weights_as_single_vector(model_weights)

        recreated_params = sut.to_model_params(input)

        recreated_params = self.unwrap_list(recreated_params)
        for weights_matrix, is_b, bias, recreated_weights in zip(
                model_weights, is_bias, biases, recreated_params):
            self.is_close(recreated_weights, is_b, bias, weights_matrix)
Beispiel #2
0
def test_two_plots():
    model = conv_model(mnist_single_items())
    _, _, x_test, y_test = mnist_single_items()
    plot_loss_3D(model, ("levels", "3d"), x_test, y_test)
def test_unlearned_conv_model():
    model = conv_model(mnist_single_items())
    _, _, x_test, y_test = mnist_single_items()
    plot_loss_3D(model, "levels", x_test, y_test)
Beispiel #4
0
def test_unlearned_conv_model():
    model = conv_model(mnist_single_items())
    _, _, x_test, y_test = mnist_single_items()
    plot_loss(model, (x_test, y_test))
Beispiel #5
0
 def setup(cls):
     if not hasattr(
             cls, 'initialized'):  # avoid learning networks multiple times
         cls.initialized = True
         cls.dense = dense_model(toy_dataset())
         cls.conv = conv_model(mnist_dataset())
Beispiel #6
0
def test_conv_mnist_model():
    model = conv_model(mnist_dataset())
    _, _, x_test, y_test = mnist_dataset()
    plot_loss_3D(model, "levels", x_test, y_test)