Esempio n. 1
0
    def test(self):
        np.random.seed(234354346)
        # need many samples to get close to optimum and stable numbers
        n = 1000

        a_s = np.asarray([1, 0]).reshape((1, 2))
        a_d = np.asarray([1, 1]).reshape((1, 2))
        y = np.random.uniform(size=(n, 1))
        eps = np.random.rand(n, 1)

        X = y * a_s + eps * a_d

        model = keras.models.Sequential([
            keras.layers.Dense(1, input_shape=(2, ), use_bias=True),
        ])
        model.compile(optimizer=keras.optimizers.Adam(lr=1), loss="mse")
        history = model.fit(X, y, epochs=20, verbose=0).history
        #print(history)
        self.assertTrue(model.evaluate(X, y, verbose=0) < 0.05)

        pc = PatternComputer(model, pattern_type="linear")
        A = pc.compute(X)[0]
        W = model.get_weights()[0]

        #print(a_d, model.get_weights()[0])
        #print(a_s, A)

        def allclose(a, b):
            return np.allclose(a, b, rtol=0.05, atol=0.05)

        # perpendicular to a_d
        self.assertTrue(allclose(a_d.ravel(), abs(W.ravel())))
        # estimated pattern close to true pattern
        self.assertTrue(allclose(a_s.ravel(), A.ravel()))
Esempio n. 2
0
 def method(model):
     return PatternComputer(model, pattern_type="relu.positive")
Esempio n. 3
0
 def method(model):
     return PatternComputer(model, pattern_type="linear")
Esempio n. 4
0
 def method(model):
     return PatternComputer(model,
                            pattern_type="dummy",
                            compute_layers_in_parallel=False)