print('DONE FILE 19') Bernoulli.BernoulliClass(reading.train_A, reading.words_of_tweets, reading.extra_features, 8, 2, dir + '\\Bernoulli\\RFE + One-Hot.txt') print('DONE FILE 20') Bernoulli.BernoulliClass(reading.train_A, reading.words_of_tweets, reading.extra_features, 8, 3, dir + '\\Bernoulli\\RFE + Bigrams.txt') print('DONE FILE 21') ''' ############################################################################################################################################################## # Call Logistic Regression to predict irony and evaluate the outcome ############################################################################################################################################################## ''' LogisticRegression.Logistic_Regression(reading.train_A, reading.words_of_tweets, reading.extra_features, 7, 1, dir + '\\LogisticRegression\\Univariate Selection + TF-IDF.txt') print('DONE FILE 1') LogisticRegression.Logistic_Regression(reading.train_A, reading.words_of_tweets, reading.extra_features, 7, 2, dir + '\\LogisticRegression\\Univariate Selection + One-Hot.txt') print('DONE FILE 2') LogisticRegression.Logistic_Regression(reading.train_A, reading.words_of_tweets, reading.extra_features, 7, 3, dir + '\\LogisticRegression\\Univariate Selection + Bigrams.txt') print('DONE FILE 3') LogisticRegression.Logistic_Regression(reading.train_A, reading.words_of_tweets, reading.extra_features, 10, 1, dir + '\\LogisticRegression\\SVD + TF-IDF.txt') print('DONE FILE 4') LogisticRegression.Logistic_Regression(reading.train_A, reading.words_of_tweets, reading.extra_features, 10, 2, dir + '\\LogisticRegression\\SVD + One-Hot.txt') print('DONE FILE 5')
data_path = "E:\\github\\LogisticRegression\\data\\" if __name__ == "__main__": # 线性模型,最小二乘法测试********************************************************************************* # feature = [[1, 1, 1], [1, 2, 3], [3, 7, 3]] # label = [[3], [6], [13]] # linear_regression = lnr.Linear_Regression() # print(linear_regression.least_squares_method(feature, label)) # ***************************************************************************************************** # 逻辑回归模型********************************************************************************* data_csv = pd.read_csv(data_path + "test.csv") feature_mat = data_csv.iloc[:, 0:4] label_mat = data_csv["label"].to_frame() test_lr = lr.Logistic_Regression() # 梯度下降法 grad_ascent_model = test_lr.grad_ascent(feature_mat, label_mat, 0.001, 500) print("梯度下降法:") print(grad_ascent_model) print("*******************************") # 随机梯度上升法 origin_model = test_lr.stoc_grad_ascent(feature_mat, label_mat, 0.001) print("随机梯度上升法:") stoc_grad_ascent_model = [] for item in origin_model: new_list = [item] stoc_grad_ascent_model.append(new_list) print(stoc_grad_ascent_model) print("*******************************") print("小样本随机梯度上升法")