コード例 #1
0
    train_x = np.hstack((np.array(train_x), np.ones((len(train_x), 1))))
    test_x = np.hstack((np.array(test_x), np.ones(((len(test_x), 1)))))
    train_y = np.array(train_y).reshape(len(train_y))
    test_y = np.array(test_y).reshape(len(test_y))

    lr = LogisticRegression(learning_rate=0.00015,
                            initial_w=np.zeros(train_x.shape[1]))
    # batch gradient descent
    # history_loss, history_test_loss, history_score,_ = lr.train_gradient_descent(
    #    epoch=150000, epoch_per_round=10000, train_x=train_x, train_y=train_y, test_x=test_x, test_y=test_y)

    # stochastic gradient descent
    history_loss, history_test_loss, history_score, _ = lr.train_stochastic_gradient_descent(
        iteration_num=500000,
        iter_per_round=100,
        batch_size=1,
        train_x=train_x,
        train_y=train_y,
        test_x=test_x,
        test_y=test_y)
    print('Coefficient:', lr.w)
    variable_x = range(100, 500001, 100)
    Drawer().draw_score(variable_x, history_score, ifSGD=True)
    Drawer().draw_loss(variable_x, history_loss, ifSGD=True)
    Drawer().draw_loss(variable_x, history_test_loss, ifSGD=True)

    # Error of stochastic gradient descent with different batch_size
    all_train_loss: List[float] = []
    all_test_loss: List[float] = []
    all_train_score: List[float] = []
    all_test_score: List[float] = []
    for i in range(10, 401, 10):