Пример #1
0
    def test_mnist_image(self):
        image_data = [
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 44.25, 122.75, 126.5, 70.5,
            0.0, 0.0, 0.0, 0.0, 0.0, 6.5, 1.5, 0.0, 16.75, 203.75, 249.5,
            252.0, 252.0, 246.5, 214.75, 37.5, 0.0, 0.0, 0.0, 25.25, 6.0, 0.0,
            7.0, 123.5, 166.75, 162.25, 201.25, 252.5, 181.5, 9.25, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 30.0, 167.5, 248.75, 240.5, 129.0, 27.5,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 212.75, 252.5, 208.75,
            26.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 34.25,
            71.5, 214.75, 202.0, 31.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 13.25, 169.5, 167.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 148.25, 190.5, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 12.75, 210.0, 159.5, 65.75, 167.5, 209.75, 252.5, 78.25,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.25, 168.0, 252.5, 251.25, 238.25,
            163.75, 58.75, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 15.75,
            106.5, 60.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
        ]

        model = Model(layers=[
            Image(image_data=image_data, maximum=256),
            CNN(reLU(), (3, 3)),
            Dense(reLU(), (3, 1)),
            Category([1, 2, 3]),
        ])
        model.compile(build=True)
        model.jacobian()
        weights = model.weights()
        derivatives = [w.derivative() for w in weights]
        pass
Пример #2
0
 def test_jacobian_dense_11_cnn_11(self):
     image = [random.randint(0, 255) for _ in range(1)]
     small_image = Image(image_data=image)
     dense = Dense(reLU(), (1, 1))
     cnn = CNN(reLU(), (1, 1), (0, 0))
     model = Model(layers=[small_image, cnn, dense])
     self._run_derivatives_test(model)
Пример #3
0
 def test_jacobian_cnn_31_dense_11(self):
     random.seed(44009)
     image = [random.randint(0, 255) for _ in range(3)]
     model = Model(layers=[
         Image(image_data=image, shape=(3, 1)),
         CNN(reLU(), (3, 1), name='a'),
         Dense(reLU(), (1, 1), name='d'),
     ])
     self._run_derivatives_test(model)
Пример #4
0
 def test_jacobian_dense_12_11(self):
     image = Image(image_data=[random.randint(0, 255) for _ in range(2)],
                   shape=(1, 2))
     dense_a = Dense(reLU(), (1, 2))
     dense_b = Dense(reLU(), (1, 1))
     model = Model(layers=[
         image,
         dense_a,
         dense_b,
     ])
     self._run_derivatives_test(model)
Пример #5
0
 def test_jacobian_cnn_11_11(self):
     image = [random.randint(0, 255) for _ in range(2)]
     image = Image(image_data=image, shape=(1, 2))
     cnn_a = CNN(reLU(), (1, 1), (0, 0), name='a')
     cnn_b = CNN(reLU(), (1, 1), (0, 0), name='b')
     model = Model(layers=[
         image,
         cnn_a,
         cnn_b,
     ])
     self._run_derivatives_test(model)
Пример #6
0
 def test_jacobian_dense_101_51_softmax_5(self):
     random.seed(42)
     image = Image(image_data=[random.randint(0, 255) for _ in range(2)],
                   shape=(1, 2))
     dense_a = Dense(reLU(), (10, 1), name='a')
     dense_b = Dense(reLU(), (5, 1), name='b')
     softmax = SoftMax([1, 2, 3, 4, 5], name='c')
     model = Model(layers=[
         image,
         dense_a,
         dense_b,
         softmax,
     ])
     self._run_derivatives_test(model)
Пример #7
0
 def test_jacobian_dense_12_12_11(self):
     random.seed(42)
     image = Image(image_data=[random.randint(0, 255) for _ in range(2)],
                   shape=(1, 2))
     dense_a = Dense(reLU(), (1, 2), name='a')
     dense_b = Dense(reLU(), (1, 2), name='b')
     dense_c = Dense(reLU(), (1, 1), name='c')
     model = Model(layers=[
         image,
         dense_a,
         dense_b,
         dense_c,
     ])
     self._run_derivatives_test(model)
Пример #8
0
 def test_jacobian_cnn_33_33_33_dense_11(self):
     image = [random.randint(0, 255) for _ in range(49)]
     image = Image(image_data=image)
     cnn_a = CNN(reLU(), (3, 3), name='a')
     cnn_b = CNN(reLU(), (3, 3), name='b')
     cnn_c = CNN(reLU(), (3, 3), name='c')
     dense = Dense(reLU(), (1, 1), name='d')
     model = Model(layers=[
         image,
         cnn_a,
         cnn_b,
         cnn_c,
         dense,
     ])
     self._run_derivatives_test(model)
Пример #9
0
 def test_jacobian_cnn_31(self):
     image = [random.randint(0, 255) for _ in range(3)]
     model = Model(layers=[
         Image(image_data=image, shape=(3, 1)),
         CNN(reLU(), (3, 1), (0, 0), name='a'),
     ])
     weights = self._run_derivatives_test(model)
     self.assertEqual(len(weights), 7)
Пример #10
0
 def test_softmax(self):
     image_data = [0.2, 0.5, 0.3]
     categories = [1, 2, 4]
     model = Model(layers=[
         Image(image_data=image_data, shape=(3, 1), maximum=1),
         Dense(reLU(), (3, 1)),
         SoftMax(categories),
     ])
     self._run_derivatives_test(model)
Пример #11
0
    def test_jacobian(self):
        mndata = MNISTData(path='../mnist')
        images, labels = mndata.training()

        r = 49  # a nice number three
        image_layer = Image(image_data=images[r], label=labels[r])
        dense = Dense(reLU(), (1, 1))

        model = Model(layers=[image_layer, dense])
        model.compile(build=True)
        weight_count = model.weight_count()

        # bump and grind
        gradients = finite_differences(model, False)
        jacobian = list(flatten(dense.jacobian()))
        mod_jacb = list(flatten(model.jacobian()))
        assert len(gradients) == len(jacobian)
        for g, j, m in zip(gradients.values(), jacobian, mod_jacb):
            print(f'{g[0]:8.6f} {j:8.6f} {m:8.6f}')
        for g, j, m in zip(gradients.values(), jacobian, mod_jacb):
            self.assertAlmostEqual(g[0], j, delta=0.001)
            self.assertAlmostEqual(j, m, delta=0.000001)
Пример #12
0
    def test_jacobian_dense_31_category_3(self):
        image_data = [0.2, 0.5, 0.3]
        categories = [1, 2, 4]
        model = Model(layers=[
            Image(image_data=image_data, shape=(3, 1), maximum=1),
            Dense(reLU(), (3, 1)),
            Category(categories),
        ])
        self._run_derivatives_test(model)
        v = model.value()

        # compute value manually for this image_data:
        manual_dense_layer = []
        for i in range(3):
            weights = model._layers[1]._nodes[i]._weights
            manual_dense_layer.append(
                sum([w.weight() * d for w, d in zip(weights, image_data)]))
        max_value = max(manual_dense_layer)
        imax_value = int(np.argmax(np.array(manual_dense_layer)))
        self.assertEqual(v, [max_value])

        p = model.predict()
        self.assertEqual(p, [categories[imax_value]])
        pass
Пример #13
0
from j2learn.data.mnist_images import MNISTData
from j2learn.function.function import reLU
from j2learn.layer.category import Category
from j2learn.layer.cnn import CNN
from j2learn.layer.dense import Dense
from j2learn.layer.image import Image

mndata = MNISTData(path='../mnist')

images, labels = mndata.training()

r = 49  # a nice number three
image_layer = Image(image_data=images[r], label=labels[r])
print(image_layer.display())
### test a non-convoluted CNN. Need the same output
identity_cnn = CNN(reLU(), (1, 1), (0, 0), image_layer, weight=1)
for i in range(identity_cnn.shape()[0] * identity_cnn.shape()[1]):
    assert identity_cnn.node(i).value() == identity_cnn._underlying_layer.node(
        i).value()
print(identity_cnn.display())

### test convolution
first_cnn = CNN(reLU(), (3, 3), (0, 0), identity_cnn)
print(first_cnn.display(threshold=0.5))
second_cnn = CNN(reLU(), (3, 3), (0, 0), first_cnn)
print(second_cnn.display(threshold=0.5))

### more layers
dense = Dense(reLU(), (10, 1), second_cnn)
category = Category([i for i in range(10)], dense)
print(category.node(0).value())
Пример #14
0
 def test_jacobian_dense_21_cnn_12(self):
     image = Image(image_data=[random.randint(0, 255) for _ in range(4)])
     dense = Dense(reLU(), (2, 1))
     cnn = CNN(reLU(), (1, 2), (0, 0))
     model = Model(layers=[image, cnn, dense])
     self._run_derivatives_test(model)
Пример #15
0
 def test_jacobian_cnn_11(self):
     image = [random.randint(0, 255) for _ in range(2)]
     image = Image(image_data=image, shape=(2, 1))
     cnn = CNN(reLU(), (1, 1), (0, 0))
     model = Model(layers=[image, cnn])
     self._run_derivatives_test(model)
Пример #16
0
import random

from j2learn.etc.tools import finite_differences
from j2learn.function.function import reLU
from j2learn.layer.cnn import CNN
from j2learn.layer.dense import Dense
from j2learn.layer.category import Category
from j2learn.layer.image import Image
from j2learn.model.model import Model

pixels = 81
image = Image(image_data=[random.randint(59, 59) for _ in range(pixels)])
cnn_a = CNN(reLU(), (3, 3), name='a')
cnn_b = CNN(reLU(), (3, 3), name='b')
cnn_c = CNN(reLU(), (3, 3), name='c')
dense = Dense(reLU(), (10, 1), name='d')
category = Category([i for i in range(10)])
model = Model(layers=[
    image,
    cnn_a,
    cnn_b,
    cnn_c,
    dense,
    category,
])

model.compile(build=True)

v = model.value()
# v = model.probability()
analytic_jacobian = model.jacobian()
Пример #17
0
from j2learn.data.mnist_images import MNISTData
from j2learn.function.function import reLU
from j2learn.layer.category import Category
from j2learn.layer.cnn import CNN
from j2learn.layer.dense import Dense
from j2learn.layer.image import Image
from j2learn.model.model import Model
from j2learn.etc.tools import finite_difference

mndata = MNISTData(path='../mnist')

images, labels = mndata.training()

r = 49  # a nice number three
image_layer = Image(image_data=images[r], label=labels[r])
cnn = CNN(reLU(), (3, 3), (0, 0))
dense = Dense(reLU(), (10, 1))
category = Category([i for i in range(10)])

model = Model(layers=[image_layer, cnn, dense, category])
model.compile(build=True)

cat = model.predict()
print(cat)
prob = model.probability()
print(prob)

n_weights = model.weight_count()

gradient = 0
i = 0
Пример #18
0
from j2learn.etc.tools import reduce as reduce_image
from j2learn.function.function import reLU
from j2learn.layer.dense import Dense
from j2learn.layer.image import Image
from j2learn.layer.softmax import SoftMax
from j2learn.model.model import Model
from j2learn.regression.gradient_descent import GradientDescent

# ## Use reduced image size?
reduce = True
if reduce:
    nx = ny = 14
else:
    nx = ny = 28

activation = reLU()

# ## Define model
predict_labels = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
model = Model(layers=[
    Image(shape=(nx, ny)),
    Dense(activation, (100, 1), name='d1'),
    SoftMax(predict_labels, name='s1'),
])
model.compile(build=True)

# ## Prepare images for training
mndata = MNISTData(path='../test/mnist')
images, labels = mndata.training()
train_images = []
train_labels = []