Пример #1
0
    def test_variable_dict(self):
        xv = np.array([0.5, 0.1, 0.5])
        yv = np.array([0.2, 0.4, 0.5])
        valin = ['x', 'y']
        x = Input(valin, 'x')
        y = Input(valin, 'y')
        xyv = [xv, yv]
        Wxv = np.array([[2.1, 3.1, 2.2], [2.2, 3.2, 4.2], [2.2, 5.2, 4.2]])
        Wyv = np.array([[2.1, 2.1, 2.2], [1.6, 1.2, 6.2], [2.1, 3.1, 2.2]])
        Wx = MWeight(3, 3, weights=Wxv)
        Wy = MWeight(3, 3, weights=Wyv)

        net = ComputationalGraphLayer(Sigmoid(Wx.dot(x)) + Tanh(Wy.dot(y)))
        netDict = Sequential(VariableDictLayer(valin), net)
        out = net.forward(xyv)
        self.assertEqual(out.shape, (3, ))
        assert_almost_equal(out, sigmoid(Wxv.dot(xv)) + np.tanh(Wyv.dot(yv)))
        dJdy = net.backward(np.array([1.0, 1.0, 1.0]))

        self.assertEqual(len(dJdy), 2)
        for ind, key in enumerate(dJdy):
            self.assertEqual(dJdy[ind].shape, xyv[ind].shape)
            assert_almost_equal(dJdy[ind],
                                np.sum(net.numeric_gradient(xyv)[ind], 0))

        auxdict = {'x': 0, 'y': 1}
        out = netDict.forward({'x': xv, 'y': yv})
        self.assertEqual(out.shape, (3, ))
        assert_almost_equal(out, sigmoid(Wxv.dot(xv)) + np.tanh(Wyv.dot(yv)))
        dJdy = netDict.backward(np.array([1.0, 1.0, 1.0]))
        self.assertEqual(len(dJdy), 2)
        for key in dJdy:
            self.assertEqual(dJdy[key].shape, xyv[auxdict[key]].shape)
            assert_almost_equal(
                dJdy[key], np.sum(net.numeric_gradient(xyv)[auxdict[key]], 0))
Пример #2
0
    def test_calc_delta(self):
        l1 = SoftMaxLayer()
        n = Sequential([l1])
        x = np.array([15.0, 10.0, 2.0])
        y = n.forward(x)
        self.assertEqual(y.shape, (3, ))
        nll = NegativeLogLikelihoodLoss()
        t = np.array([0.0, 0.0, 1.0])
        self.assertEqual(y.shape, t.shape)
        J1 = nll.loss(y, t)
        self.assertEqual(J1.shape, (3, ))
        assert_almost_equal(J1, [0.0, 0.0, 13.0067176], decimal=5)

        cel = CrossEntropyLoss()
        t = np.array([0.0, 0.0, 1.0])
        J2 = cel.loss(x, t)
        self.assertEqual(J2.shape, (3, ))
        assert_almost_equal(J2, [0.0, 0.0, 13.0067176], decimal=5)

        delta_in = -nll.dJdy_gradient(y, t)
        assert_almost_equal(delta_in, [0.0, 0.0, 445395.349996])
        delta_out1 = n.backward(delta_in)
        assert_almost_equal(delta_out1, [-0.9933049, -0.0066928, 0.9999978],
                            decimal=5)
        #

        delta_out2 = -cel.dJdy_gradient(x, t)
        assert_almost_equal(delta_out2, [-0.9933049, -0.0066928, 0.9999978],
                            decimal=5)
Пример #3
0
    def test_SigmoidLayer(self):
        l1 = SigmoidLayer()
        n = Sequential([l1])
        y = n.forward(np.array([0]))
        self.assertEqual(y.shape, (1, ))
        assert_array_equal(y, np.array([0.5]))

        d = n.backward(np.array([1]))
        self.assertEqual(d.shape, (1, ))
        assert_array_equal(d, np.array([0.25]))
Пример #4
0
 def test_calc_loss(self):
     l1 = SoftMaxLayer()
     n = Sequential([l1])
     x = np.array([15.0, 10.0, 2.0])
     y = n.forward(x)
     self.assertEqual(y.shape, (3, ))
     nll = NegativeLogLikelihoodLoss()
     t = np.array([0.0, 0.0, 1.0])
     self.assertEqual(y.shape, t.shape)
     J = nll.loss(y, t)
     self.assertEqual(J.shape, (3, ))
     assert_almost_equal(J, [0.0, 0.0, 13.0067176], decimal=5)
Пример #5
0
    def test_complex(self):
        xv = np.array([0.5, 0.1])
        hv = np.array([0.2, 0.4, 0.5])
        cv = np.array([1.3, 2.4, 0.2])
        vars2 = ['xh', 'c']
        xsize = 2
        hcsize = 3
        xh = Input(vars2, 'xh')
        c = Input(vars2, 'c')
        Wf = MWeight(xsize + hcsize, hcsize)
        bf = VWeight(hcsize)
        net = Sequential(VariableDictLayer(vars2),
                         ComputationalGraphLayer(Sigmoid(Wf.dot(xh) + bf) * c))
        xhc = {'xh': np.hstack([xv, hv]), 'c': cv}
        out = net.forward(xhc)
        self.assertEqual(out.shape, (3, ))
        assert_almost_equal(
            out,
            sigmoid(Wf.net.W.get().dot(np.hstack([xv, hv])) + bf.net.W.get()) *
            cv)

        vars3 = ['x', 'h', 'c']
        x = Input(vars3, 'x')  #xsize
        h = Input(vars3, 'h')  #hcsize
        c = Input(vars3, 'c')  #hcsize
        Wf = MWeight(xsize + hcsize, hcsize)
        bf = VWeight(hcsize)
        net = Sequential(
            VariableDictLayer(vars3),
            ComputationalGraphLayer(Sigmoid(Wf.dot(Concat([x, h])) + bf) * c))
        print net
        xhc = {'x': xv, 'h': hv, 'c': cv}
        out = net.forward(xhc)
        self.assertEqual(out.shape, (3, ))
        assert_almost_equal(
            out,
            sigmoid(Wf.net.W.get().dot(np.hstack([xv, hv])) + bf.net.W.get()) *
            cv)
Пример #6
0
    def test_LinearLayer(self):
        l1 = LinearLayer(5, 6, 'ones')
        n = Sequential([l1])
        y = n.forward(np.array([2.0, 1.0, 2.0, 3.0, 4.0]))
        self.assertEqual(y.shape, (6, ))
        assert_array_equal(y, np.array([
            13.0,
            13.0,
            13.0,
            13.0,
            13.0,
            13.0,
        ]))

        l2 = LinearLayer(6, 2, 'ones')
        n.add(l2)
        y = n.forward(np.array([2.0, 1.0, 2.0, 3.0, 4.0]))
        self.assertEqual(y.shape, (2, ))
        assert_array_equal(y, np.array([79.0, 79.0]))

        d = n.backward(np.array([2.0, 3.0]))
        self.assertEqual(d.shape, (5, ))
        assert_array_equal(d, np.array([30., 30., 30., 30., 30.]))
Пример #7
0
    def test_neuron_one_input(self):
        xv = np.array([0.5, 0.1, 0.5])
        x = Input(['x'], 'x')
        Wv = np.array([[0.1, 0.1, 0.2], [0.5, 0.2, 0.2]])
        W = MWeight(3, 2, weights=Wv)
        bv = np.array([0.3, 0.1])
        b = VWeight(2, weights=bv)
        net = ComputationalGraphLayer(Sigmoid(W.dot(x) + b))
        out = net.forward(xv)
        self.assertEqual(out.shape, (2, ))
        check_out = 1.0 / (1.0 + np.exp(-Wv.dot(xv) - bv))
        assert_almost_equal(out, check_out)
        dJdy = net.backward(np.array([1.0, 1.0]))
        self.assertEqual(dJdy.shape, (3, ))
        assert_almost_equal(dJdy, np.sum(net.numeric_gradient(xv), 0))
        assert_almost_equal(dJdy, (check_out * (1 - check_out)).dot(Wv))

        net2 = Sequential(
            LinearLayer(3, 2, weights=np.hstack([Wv, bv.reshape(2, 1)])),
            SigmoidLayer)
        out2 = net2.forward(xv)
        assert_almost_equal(out, out2)
        dJdy2 = net.backward(np.array([1.0, 1.0]))
        assert_almost_equal(dJdy, dJdy2)
Пример #8
0
#     #SigmoidLayer(),
#     LinearLayer(5, 4, weights='random'),
#     # Parallel([
#     #     LinearLayer(5, 1, weights='random'),
#     #     LinearLayer(5, 1, weights='random'),
#     #     LinearLayer(5, 1, weights='random'),
#     #     LinearLayer(5, 1, weights='random'),
#     # ]),
#     # SigmoidLayer(),
#     SoftMaxLayer()
# ])

#
y1 = []
for i, (x, target) in enumerate(train):
    y1.append(model.forward(x))
#
y2 = []
for i, (x, target) in enumerate(test):
    y2.append(model.forward(x))

# p.compare_data(1, train_data, train_targets, y1, num_classes, classes)
# p.compare_data(1, test_data, test_targets, y2, num_classes, classes)
# plt.title('Before Training')

# print_data(1, train_data, train_targets, ['gray','gray','gray','gray'], classes)
# print_data(1, test_data, test_targets, ['gray','gray','gray','gray'], classes)
# print_data(1, train_data, y1, colors, ['x','x','x','x'])
# print_data(1, train_data, y2, colors, ['x','x','x','x'])
#plt.title('Before Training')
Пример #9
0
mean_val = [np.zeros(784) for i in range(10)]
tot_val = np.zeros(10)
for x, t in train:
    mean_val[np.argmax(t)] += x
    tot_val[np.argmax(t)] += 1

normalization_net = Sequential(
    NormalizationLayer(0, 255, -1, 1),
    SignLayer,
)

for i in range(10):
    mean_val[i] = mean_val[i] / tot_val[i]
    num = mean_val[i].reshape(28, 28)
    plt.imshow(normalization_net.forward(num), cmap=plt.get_cmap('Greys'))
    # plt.imshow(num))
    plt.show()

hop_net = Hopfield(784)

stored_numers = [0, 1]  #numbers stored in the network

for i in stored_numers:
    hop_net.store(normalization_net.forward(mean_val[i]))

model = Sequential(normalization_net, hop_net)

for (x, t) in train:
    y = model.forward(x)
    plot_compare(x.reshape(28, 28), y.reshape(28, 28))
Пример #10
0
    # TanhLayer
)

# from computationalgraph import Input, HotVect
# x = Input(['x','a'],'x')
# a = Input(['x','a'],'a')
# c=ComputationalGraphLayer(x*HotVect(a))
# c.forward([[10,10],1])

printer = ShowTraining(epochs_num=epochs, weights_list={'Q': W1})
trainer = Trainer(show_training=True, show_function=printer.show)

data_train = np.random.rand(1000, 2) * 5
train = []
for x in data_train:
    out = Q_hat.forward(x)
    train.append(
        Q_hat.forward(x) * utils.to_one_hot_vect(np.argmax(out), out.size))

data_test = np.random.rand(1000, 2) * 5
test = []
for x in data_test:
    out = Q_hat.forward(x)
    test.append(
        Q_hat.forward(x) * utils.to_one_hot_vect(np.argmax(out), out.size))

J_list, dJdy_list, J_test = trainer.learn(
    model=Q,
    train=zip(data_train, train),
    test=zip(data_test, test),
    # loss = NegativeLogLikelihoodLoss(),