Exemple #1
0
def numerical_gradient_net():
    gradient = {
        'w1': numerical_gradient2(loss, params['w1']),
        'b1': numerical_gradient2(loss, params['b1'])
    }

    return gradient
Exemple #2
0
    sys.path.append(os.path.join(Path(os.getcwd()).parent, 'lib'))
    from mnist import load_mnist
    from common import softmax, cross_entropy_error, numerical_gradient2

except ImportError:
    print('Library Module Can Not Fount')

x = np.array([0.6, 0.9])  # 입력(x)              2 vector
t = np.array([0., 0., 1.])  # label(one-hot)      3 vector


def forward_progation(w):
    a = np.dot(x, w)
    y = softmax(a)
    return y  #softmax(x @ w)


def loss(w):  #softmax
    y = forward_progation(w)
    e = cross_entropy_error(y, t)

    return e


_w = np.array([[0.02, 0.224, 0.135],
               [0.01, 0.052, 0.345]])  # weight,             2*3 matrix

g = numerical_gradient2(loss, _w)

print(g)