예제 #1
0
파일: test_layer.py 프로젝트: jwoz/j2learn
    def test_jacobian(self):
        mndata = MNISTData(path='../mnist')
        images, labels = mndata.training()

        r = 49  # a nice number three
        image_layer = Image(image_data=images[r], label=labels[r])
        dense = Dense(reLU(), (1, 1))

        model = Model(layers=[image_layer, dense])
        model.compile(build=True)
        weight_count = model.weight_count()

        # bump and grind
        gradients = finite_differences(model, False)
        jacobian = list(flatten(dense.jacobian()))
        mod_jacb = list(flatten(model.jacobian()))
        assert len(gradients) == len(jacobian)
        for g, j, m in zip(gradients.values(), jacobian, mod_jacb):
            print(f'{g[0]:8.6f} {j:8.6f} {m:8.6f}')
        for g, j, m in zip(gradients.values(), jacobian, mod_jacb):
            self.assertAlmostEqual(g[0], j, delta=0.001)
            self.assertAlmostEqual(j, m, delta=0.000001)
예제 #2
0
r = 49  # a nice number three
image_layer = Image(image_data=images[r], label=labels[r])
cnn = CNN(reLU(), (3, 3), (0, 0))
dense = Dense(reLU(), (10, 1))
category = Category([i for i in range(10)])

model = Model(layers=[image_layer, cnn, dense, category])
model.compile(build=True)

cat = model.predict()
print(cat)
prob = model.probability()
print(prob)

n_weights = model.weight_count()

gradient = 0
i = 0
while gradient == 0 and i < n_weights:
    gradient = finite_difference(model, i, epsilon=0.01)
    i += 1
print(gradient)

weights = model.weights()
print(weights)

weight_counts = model.weight_counts()
print(weight_counts)

n_weights = model.weight_count()