Exemplo n.º 1
0
                X[i][j] = (X[i][j] - MIN[j]) / (MAX[j] - MIN[j])

        X = np.hstack((np.ones((len(X), 1)), X))

    testing_X = np.array(X)

    return training_X, training_Y, testing_X


def sigmoid(z):
    return 1 / (1 + np.exp(-z))


if __name__ == "__main__":

    training_X, training_y, testing_X = extract_features(normalization=True)

    model = LogisticRegression(training_X, training_y)

    weight = model.GradientDescent(optimizer="Adagrad")

    testing_y = sigmoid(np.dot(testing_X, weight))

    with open("output.csv", "w") as f:
        print("id,label", file=f)
        for i in range(testing_y.shape[0]):
            if testing_y[i] >= 0.5:
                print("{},1".format(i + 1), file=f)
            else:
                print("{},0".format(i + 1), file=f)
Exemplo n.º 2
0
        X = np.hstack((np.ones((len(X), 1)), X))
        for r in [1, 2, 4, 5, 6]:
            X = np.hstack((X, np.array([X[:, r]**2]).T))

    testing_X = np.array(X)

    return training_X, training_Y, testing_X


def sigmoid(z):
    return 1 / (1 + np.exp(-z))


if __name__ == "__main__":

    training_X, training_y, testing_X = extract_features(normalization=True)

    model = LogisticRegression(training_X, training_y)

    weight = model.GradientDescent()

    testing_y = sigmoid(np.dot(testing_X, weight))

    with open("output.csv", "w") as f:
        print("id,label", file=f)
        for i in range(testing_y.shape[0]):
            if testing_y[i] >= 0.5:
                print("{},1".format(i + 1), file=f)
            else:
                print("{},0".format(i + 1), file=f)