Exemple #1
0
    def test_mnist_image(self):
        image_data = [
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 44.25, 122.75, 126.5, 70.5,
            0.0, 0.0, 0.0, 0.0, 0.0, 6.5, 1.5, 0.0, 16.75, 203.75, 249.5,
            252.0, 252.0, 246.5, 214.75, 37.5, 0.0, 0.0, 0.0, 25.25, 6.0, 0.0,
            7.0, 123.5, 166.75, 162.25, 201.25, 252.5, 181.5, 9.25, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 30.0, 167.5, 248.75, 240.5, 129.0, 27.5,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 212.75, 252.5, 208.75,
            26.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 34.25,
            71.5, 214.75, 202.0, 31.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 13.25, 169.5, 167.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 148.25, 190.5, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 12.75, 210.0, 159.5, 65.75, 167.5, 209.75, 252.5, 78.25,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.25, 168.0, 252.5, 251.25, 238.25,
            163.75, 58.75, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 15.75,
            106.5, 60.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
        ]

        model = Model(layers=[
            Image(image_data=image_data, maximum=256),
            CNN(reLU(), (3, 3)),
            Dense(reLU(), (3, 1)),
            Category([1, 2, 3]),
        ])
        model.compile(build=True)
        model.jacobian()
        weights = model.weights()
        derivatives = [w.derivative() for w in weights]
        pass
Exemple #2
0
    def test_jacobian(self):
        image_layer = Image(image_data=[0.1, 0.3, 0.7], shape=(3, 1))
        softmax = SoftMax([1, 2])

        model = Model(layers=[image_layer, softmax])
        model.compile(build=True)

        # bump and grind
        gradients = finite_differences(model, False)
        gradients = list(flatten(gradients.values()))
        jacobian = list(flatten(softmax.jacobian(cache={})))
        mod_jacb = list(flatten(model.jacobian()))
        assert len(gradients) == len(jacobian)
        for g, j, m in zip(gradients, jacobian, mod_jacb):
            print(f'{g:8.6f} {j:8.6f} {m:8.6f}')
        for g, j, m in zip(gradients, jacobian, mod_jacb):
            self.assertAlmostEqual(g, j, delta=0.001)
            self.assertAlmostEqual(j, m, delta=0.000001)
Exemple #3
0
    def test_jacobian(self):
        mndata = MNISTData(path='../mnist')
        images, labels = mndata.training()

        r = 49  # a nice number three
        image_layer = Image(image_data=images[r], label=labels[r])
        dense = Dense(reLU(), (1, 1))

        model = Model(layers=[image_layer, dense])
        model.compile(build=True)
        weight_count = model.weight_count()

        # bump and grind
        gradients = finite_differences(model, False)
        jacobian = list(flatten(dense.jacobian()))
        mod_jacb = list(flatten(model.jacobian()))
        assert len(gradients) == len(jacobian)
        for g, j, m in zip(gradients.values(), jacobian, mod_jacb):
            print(f'{g[0]:8.6f} {j:8.6f} {m:8.6f}')
        for g, j, m in zip(gradients.values(), jacobian, mod_jacb):
            self.assertAlmostEqual(g[0], j, delta=0.001)
            self.assertAlmostEqual(j, m, delta=0.000001)
Exemple #4
0
pixels = 81
image = Image(image_data=[random.randint(59, 59) for _ in range(pixels)])
cnn_a = CNN(reLU(), (3, 3), name='a')
cnn_b = CNN(reLU(), (3, 3), name='b')
cnn_c = CNN(reLU(), (3, 3), name='c')
dense = Dense(reLU(), (10, 1), name='d')
category = Category([i for i in range(10)])
model = Model(layers=[
    image,
    cnn_a,
    cnn_b,
    cnn_c,
    dense,
    category,
])

model.compile(build=True)

v = model.value()
# v = model.probability()
analytic_jacobian = model.jacobian()
# finite_differences(model, False)
print(v)

model.update_data_layer([random.randint(199, 199) for _ in range(pixels)])
v = model.value()
# analytic_jacobian = model.jacobian()
finite_differences(model, False, nmax=500)
print(v)
pass