Пример #1
0
def classify_binary(dataset_path):
    X, Y = getBinaryData(dataset_path)
    X0, X1 = X[Y == 0, :], X[Y == 1, :]
    X1 = np.repeat(X1, 9, axis=0)  #duplicate the data 9x
    X, Y = np.vstack([X0, X1]), np.array([0] * len(X0) + [1] * len(X1))

    #this is for classify a binary classification
    model = ANN(100)
    model.fit_2class(X, Y, show_fig=True)
    model.score(X, Y)
Пример #2
0
def main():
    X, Y = getBinaryData()
    X0 = X[Y == 0, :]
    X1 = X[Y == 1, :]
    X1 = np.repeat(X1, 9, axis=0)
    X = np.vstack([X0, X1])
    Y = np.array([0] * len(X0) + [1] * len(X1))

    model = ANN(100, activation_func='relu')
    model.fit(X, Y, show_fig=True)
def main():
    X, Y = getBinaryData()
    X0, Y0 = X[Y != 1, :], Y[Y != 1]
    X1 = X[Y == 1, :]
    X1 = np.repeat(X1, 9, axis=0)
    X = np.vstack([X0, X1])
    Y = np.concatenate((Y0, [1] * len(X1)))

    model = ANN(100)
    model.fit(X, Y, show_fig=True)
def main():
    X, Y = getBinaryData()

    X0 = X[Y==0, :]
    X1 = X[Y==1, :]
    X1 = np.repeat(X1, 9, axis=0)
    X = np.vstack([X0, X1])
    Y = np.array([0]*len(X0) + [1]*len(X1))
    
    model = ANN(100)
    model.fit(X, Y, show_fig=True)
Пример #5
0
def main():
    X, Y = getBinaryData()
    # print("X.shape"+str(X.shape))
    # print("Y.shape"+str(Y.shape))
    # X0 = X[Y==0]
    # X1 = X[Y==1]
    # print("X0.shape"+str(X0.shape))
    # print("X1.shape"+str(X1.shape))

    model = ANN(100)
    model.fit(X,Y,show_fig = True)
Пример #6
0
def main():
	X, Y = getBinaryData()
	X0 = X[Y==0, :]
	X1 = X[Y==1, :]
	X1 = np.repeat(X1, 9, axis=0)
	X = np.vstack([X0, X1])
	Y = np.array([0]*len(X0)+[1]*len(X1))

	model = LogisticModel()
	model.fit(X, Y, show_fig=True)
	model.score()
def main():
    X, Y = getBinaryData()
    X0 = X[Y==0, :]
    X1 = X[Y==1, :]
    X1 = np.repeat(X1, 9, axis=0)
    X = np.vstack([X0, X1])
    Y = np.array([0]*len(X0) + [1]*len(X1))
    # N, D = X.shape
    # print "N:", N
    # print "p(Y=0):", np.sum(Y == 0) / float(N), "p(Y=1):", np.sum(Y == 1) / float(N)
    
    model = LogisticModel()
    model.fit(X, Y, show_fig=True)
    model.score(X, Y)
Пример #8
0
def main():
    X, Y = getBinaryData()

    X0 = X[Y == 0, :]
    X1 = X[Y == 1, :]
    X1 = np.repeat(X1, 9, axis=0)
    X = np.vstack([X0, X1])
    Y = np.array([0] * len(X0) + [1] * len(X1))

    model = LogisticModel()
    model.fit(X, Y, show_fig=True)
    model.score(X, Y)
    scores = cross_val_score(model, X, Y, cv=5)
    print("score mean:", np.mean(scores), "stdev:", np.std(scores))
Пример #9
0
def main():
    X, Y = getBinaryData()

    X0 = X[Y == 0, :]
    X1 = X[Y == 1, :]
    X1 = np.repeat(X1, 9, axis=0)
    X = np.vstack([X0, X1])
    Y = np.array([0] * len(X0) + [1] * len(X1))

    # N, D = X.shape
    # print "N:", N
    # print "p(Y=0):", np.sum(Y == 0) / float(N), "p(Y=1):", np.sum(Y == 1) / float(N)

    model = LogisticModel()
    model.fit(X, Y, show_fig=True)
    model.score(X, Y)
def main():
    X, Y = getBinaryData()

    X0 = X[
        Y ==
        0, :]  # This means everytime the value of Y == 0, then select the entire corresponding row in X. Done due to binary classification
    X1 = X[
        Y ==
        1, :]  # This means everytime the value of Y == 1, then select the entire corresponding row in X. Done due to binary classification
    X1 = np.repeat(
        X1, 9, axis=0
    )  # To address label imbalance, we lengthen label 1 by repeating it 9 times
    X = np.vstack([X0, X1])
    Y = np.array([0] * len(X0) + [1] * len(X1))

    model = ANN(100)
    model.fit(X, Y, show_fig=True)
    model.score(X, Y)
Пример #11
0
def main():
    X, Y = getBinaryData()

    X0 = X[Y==0, :]
    X1 = X[Y==1, :]
    X1 = np.repeat(X1, 9, axis=0)
    X = np.vstack([X0, X1])
    Y = np.array([0]*len(X0) + [1]*len(X1))
    X, Y = shuffle(X, Y)
    K = len(np.unique(Y))
    N = len(Y)
    T = np.zeros((N, K))
    for i in range(N):
        T[i, Y[i]] = 1 # one hot encoding for targets

    m = nn.NeuralNetwork(numHiddenLayer=1,numHiddenUnits=100,actFunc="Sigmoid")
    trainCost, validCost, accTrain, accValid = m.train(X, T, epochs=10000, learning_rate=5*10e-7)

    print("Final Train Accuracy {}".format(accTrain))
    print("Final Valid Accuracy {}".format(accValid))
    legend1, = plt.plot(trainCost, label='training error')
    legend2, = plt.plot(validCost, label='validation error')
    plt.legend([legend1, legend2])
    plt.show()
Пример #12
0
        print("best validation error:", best_validation_error)
        if show_fig:
            plt.plot(costs)
            plt.show()

    def forward(self, X):
        return util.sigmoid(X.dot(self.w + self.b))

    def predict(self, X, Y):
        pY = self.forward(X)
        return np.round(pY)

    def score(self, X, Y):
        predictions = self.predict(X)
        return 1 - util.error_rate(Y, predictions)


if __name__ == '__main__':
    X, Y = util.getBinaryData()
    # balance classes (class 1 has 9x less occurrences than class 0)
    X0 = X[Y == 0, :]
    X1 = X[Y == 1, :]
    X1 = np.repeat(X1, 9, axis=0)
    X = np.vstack([X0, X1])
    Y = np.array([0] * len(X0) + [1] * len(X1))

    model = LogisticModel()
    model.fit(X, Y, show_fig=True)
    model.score(X, Y)
Пример #13
0
    def predict(self, X):
        pY = self.forward(X)
        return np.round(pY)

    def score(self, X, Y):
        prediction = self.predict(X)
        return 1 - error_rate(Y, prediction)


# In[3]:

import datetime

start = datetime.datetime.now()

X, Y = getBinaryData(
    facial_data_csv_kaggle='c:/Users/Denis/Desktop/fer2013/fer2013')
X0 = X[Y == 0, :]
X1 = X[Y == 1, :]
X1 = np.repeat(X1, 9, axis=0)
X = np.vstack([X0, X1])
Y = np.array([0] * len(X0) + [1] * len(X1))

model = LogisticModel()
model.fit(X, Y, show_fig=True)
model.score(X, Y)

end = datetime.datetime.now()
took = end - start

print("This training took %.1f seconds" % (took.seconds))