Пример #1
0
    sig = sigmoid(sum)
    return sig * (1 - sig)


def tanh_derivative(sum):
    return 1 - math.tanh(math.tanh(sum))


np.set_printoptions(precision=4)

# 4.3 A. NOR Gate
nn = NeuralNetwork(
    3, [], [NeuronInfo(partial(threshold, 0), weights=[-1, -1, -1, 0])])
print('NOR Gate:\n{}\n'.format(nn))

print('[0, 0, 0] -> {}'.format(nn.activate([0, 0, 0])))
print('[0, 0, 1] -> {}'.format(nn.activate([0, 0, 1])))
print('[0, 1, 0] -> {}'.format(nn.activate([0, 1, 0])))
print('[0, 1, 1] -> {}'.format(nn.activate([0, 1, 1])))
print('[1, 0, 0] -> {}'.format(nn.activate([1, 0, 0])))
print('[1, 0, 1] -> {}'.format(nn.activate([1, 0, 1])))
print('[1, 1, 0] -> {}'.format(nn.activate([1, 1, 0])))
print('[1, 1, 1] -> {}'.format(nn.activate([1, 1, 1])))
print()

# 4.3 A. Adder
nn = NeuralNetwork(
    2,
    [[
        NeuronInfo(partial(threshold, 0.5), weights=[0.5, 0.5, 0]),
        NeuronInfo(partial(threshold, 1.0), weights=[0.5, 0.5, 0]),