Exemple #1
0
 def fit(self, X, Y):
     X = np.insert(X, 0, 1, axis=1)
     N, D = X.shape
     self.W = np.random.random(D) / np.sqrt(D)
     self.costs = []
     for i in range(self.n_iter):
         Y_pred = feed_forward(X, self.W, 0)
         cost = -1* (np.sum(Y * np.log(Y_pred) + (1-Y) * np.log(1-Y_pred)) + self.regularizer.cost(self.W))
         self.costs.append(cost)
         grad = -(Y - Y_pred).dot(X) + self.regularizer.gradient(self.W)
         self.W = self.W - self.lr * grad
     plt.plot(self.costs)
     plt.show()
Exemple #2
0
 def predict(self, X):
     X = np.insert(X, 0, 1, axis=1)
     Y_pred = feed_forward(X, self.W, 0)
     return Y_pred
from utils.common_function import sigmoid, feed_forward, cross_entropy_cost, cross_entropy_gradient_descent_w, cross_entropy_gradient_descent_bias
from utils.generate_data import generate_XOR_data
import numpy as np
import matplotlib.pyplot as plt



if __name__ == '__main__':
    N = 1000
    X, Y = generate_XOR_data(N)
    XY = (X[:,0] * X[:,1]).reshape(N, 1)
    X = np.concatenate((XY, X), axis=1)
    D = 3
    learning_rate = 0.001
    cost = []
    W = np.random.randn(D) / np.sqrt(D)
    B = 0
    iterations = 50000
    for i in range(iterations):
        Y_pred = feed_forward(X, W, B)
        i_cost = cross_entropy_cost(Y, Y_pred)
        cost.append(i_cost)
        W = cross_entropy_gradient_descent_w(W, learning_rate, Y, Y_pred, X)
        B = cross_entropy_gradient_descent_bias(B, learning_rate, Y, Y_pred)
    accuracy = np.mean(Y == np.round(Y_pred))
    print(f"accuracy : {accuracy}")    
    plt.plot(cost)
    plt.show()