예제 #1
0
    def loss(self, x, t):
        x = np.array(x).astype(np.float)
        t = np.array(t).astype(np.float)

        y = afm.softmax(self.predict(x))
        loss = lf1.loss_cross_entropy(y, t)

        return loss
예제 #2
0
 def dW(self, x, y, delta=1e-4):
     H = np.zeros_like(self.W)
     #print('delta sparse matrix check =', H)
     gradient_Weight = np.zeros_like(self.W)
     for i in range(self.W.shape[0]):
         for j in range(self.W.shape[1]):
             H[i][j] = delta
             W_forward = self.W + H
             W_backward = self.W - H
             loss_f = lf1.loss_cross_entropy(
                 afm.softmax(np.dot(x, W_forward)), y)
             loss_b = lf1.loss_cross_entropy(
                 afm.softmax(np.dot(x, W_backward)), y)
             gradient = (loss_f - loss_b) / (2 * delta)
             #print('current gradient = ', gradient)
             H[i][j] = 0
             gradient_Weight[i][j] = gradient
     #print('gradient_Weight matrix check =', gradient_Weight)
     return gradient_Weight
def predict(ntw, x):
    W1, W2, W3 = ntw['W1'],ntw['W2'],ntw['W3']
    b1, b2, b3 = ntw['b1'],ntw['b2'],ntw['b3']

    a1 = np.dot(x, W1) + b1
    z1 = afm.sigmoid(a1)
    
    a2 = np.dot(z1, W2) + b2
    z2 = afm.sigmoid(a2)
    
    a3 = np.dot(z2, W3) + b3
    z3 = afm.softmax(a3)
    
    return z3
 def predict(self, x):
     W1, W2 = self.params['W1'], self.params['W2']
     b1, b2 = self.params['b1'], self.params['b2']
     p1 = afm.sigmoid(np.dot(x, W1) + b1)
     p2 = afm.softmax(np.dot(p1, W2) + b2)
     return p2
    """
    # print(inspect.getsource(nn_1)) # shows all source code. not recommended.
    # print(inspect.getsource(np))
    print('version 3 test')
    sl_input = np.array([0.1,0.2, 0.3, 0.4, 0.5, 0.4, 0.3, 0.2, 0.1])
    sl = single_layer_3(
                        initial_input=sl_input,
                        layers=[3, 4, 3, 2, 6, 4, 10, 4, 3, 5, 2],
                        output_size=10,
                        activation_function=afm.softmax)
    x = sl.product()[0]
    print( (x - np.mean(x)) / np.std(x) )  # z-centralisation
    print( (x - np.min(x)) / (np.max(x) - np.min(x))) # min-max normalization.
    # it seems, we may not need to add z-centralisation or min-max normalization
    print(x)
    print(np.sum(afm.softmax(sl_input))) # = 1.0
    print('\n')

    """How can we access files, which are not on this directory?
"""
    #print(inspect.getsource(os.path))
    print(os.path.relpath(
        'C:/Users/Azerates/PycharmProjects/ITWILL/SCRATCH_LECT_11', os.getcwd()
    )) #..\..\..\PycharmProjects\ITWILL\SCRATCH_LECT_11

    files = os.listdir(os.path.join('..', '..', '..','PycharmProjects', 'ITWILL', 'SCRATCH_LECT_11'))
    for i in files:
        print(i)
        """
azerates20191211a.py
breast-cancer-wisconsin-data.csv