示例#1
0
    def test_backward(self):
        layer = Sigmoid()
        x = np.random.rand(2)
        y = layer.forward(x)
        deriv_grad = layer.backward(np.ones(1))

        numerical_grad_matrix = numerical_gradient.calc(layer.forward, x)

        # the numerical grad in this case is a matrix made of zeros with
        # dJ/dx_i only in the diagonal
        num_grad = np.diagonal(numerical_grad_matrix)

        numerical_gradient.assert_are_similar(deriv_grad, num_grad)
示例#2
0
    layer1 = Dense(trainingData.shape[2], 16)
    activation1 = Sigmoid()
    layer2 = Dense(16, 10)
    activation2 = SoftMax()
    cost = CostMeanSquared()

for epoch in range(EPOCHS):
    print('Epoch: ' + str(epoch + 1) + '/' + str(EPOCHS))
    print('')
    correct = 0
    for batch in range(total // BATCH_SIZE):

        ### SOCHASIC GRADIENT DESCENT ###

        layer1.forward(trainingData[batch])
        activation1.forward(layer1.outputs)
        layer2.forward(activation1.outputs)
        activation2.forward(layer2.outputs)
        cost.forward(activation2.outputs, labels[batch], 10)

        for sample in range(activation2.outputs.shape[1]):
            if np.argmax(activation2.outputs[:, sample]) == np.argmax(
                    labels[batch, sample]):
                correct += 1

        cost.backward(activation2.outputs, labels[batch], 10)
        activation2.backward(layer2.outputs, layer2.weights.shape[0],
                             BATCH_SIZE)
        layer2.backward(activation1.outputs)
        activation1.backward(layer1.outputs)
        layer1.backward(trainingData[batch])
示例#3
0
 def test_forward(self):
     layer = Sigmoid()
     x = np.array([2., 3., 4.])
     y = layer.forward(x)
示例#4
0
import numpy as np

try:
    sys.path.append(os.path.join(Path(os.getcwd()).parent, 'lib'))
    from layers import Sigmoid
except ImportError:
    print('Library Module Can Not Found')

# Test1(Vector)
layer = Sigmoid()

x = np.array([0.1, -0.2, 0.3, -0.4, 0.5])
print(x)

y = layer.forward(x)
print(y)
print(layer.out)

dout = np.array([-0.1, -0.2, -0.3, 0.4, -0.5])
dout = layer.backward(dout)
print(dout)

print('=========================================')

# Test2(Matrix)
x = np.array([
    [0.1, -0.5, 1.0],
    [0.2, -0.6, 2.0],
    [0.3, -0.7, 3.0],
    [0.4, -0.8, 4.0]