def test_bug_issue_1(self):
        non_liniarity = tf.nn.relu
        regularizer_coeff = 0.01
        last_layer = InputLayer(self.mnist_data.features_shape,
                                # drop_out_prob=.5,
                                layer_noise_std=1.
                                )

        last_layer = HiddenLayer(last_layer, 100, self.session, non_liniarity=non_liniarity,
                                 batch_normalize_input=True)

        output = CategoricalOutputLayer(last_layer, self.mnist_data.labels_shape, self.session,
                                        batch_normalize_input=True,
                                        regularizer_weighting=regularizer_coeff)

        output.train_till_convergence(self.mnist_data.train, self.mnist_data.validation,
                                      learning_rate=.1)

        last_layer.resize(110)

        output.train_till_convergence(self.mnist_data.train, self.mnist_data.validation,
                                      learning_rate=.1)

        last_layer.resize(90)

        output.train_till_convergence(self.mnist_data.train, self.mnist_data.validation,
                                      learning_rate=.1)
    def test_bug_issue_2(self):
        last_layer = InputLayer((1,))
        last_layer = HiddenLayer(last_layer, 2, self.session,
                                 batch_normalize_input=True)

        output = CategoricalOutputLayer(last_layer, (1,), self.session,
                                        batch_normalize_input=True)

        print output.activate_predict([[0.]])

        last_layer.resize(4)

        print output.activate_predict([[0.]])
    def test_adding_hidden_layer_with_resize(self):
        non_liniarity = tf.nn.relu
        regularizer_coeff = None
        layer = InputLayer(self.mnist_data.features_shape)

        layer = HiddenLayer(layer, 100, self.session, non_liniarity=non_liniarity,
                            batch_normalize_input=False)

        output = CategoricalOutputLayer(layer, self.mnist_data.labels_shape, self.session,
                                        batch_normalize_input=True,
                                        regularizer_weighting=regularizer_coeff)

        output.train_till_convergence(self.mnist_data.train, self.mnist_data.validation,
                                      learning_rate=.1)

        layer.add_intermediate_cloned_layer()
        layer.resize(110)

        self.session.run(output.activation_predict,
                         feed_dict={output.input_placeholder: self.mnist_data.train.features[:3],
                                    output.target_placeholder: self.mnist_data.train.labels[:3]})
    def test_bug_issue_with_state(self):
        non_liniarity = tf.nn.relu
        regularizer_coeff = 0.01
        layer = InputLayer(self.mnist_data.features_shape, layer_noise_std=1.)

        layer = HiddenLayer(layer, 6, self.session, non_liniarity=non_liniarity,
                            batch_normalize_input=True)

        output = CategoricalOutputLayer(layer, self.mnist_data.labels_shape, self.session,
                                        batch_normalize_input=True,
                                        regularizer_weighting=regularizer_coeff)

        state = output.get_network_state()

        layer.resize(10)

        output.train_till_convergence(self.mnist_data.train, self.mnist_data.validation,
                                      learning_rate=.1)

        output.set_network_state(state)

        output.train_till_convergence(self.mnist_data.train, self.mnist_data.validation,
                                      learning_rate=.1)