def _learn_granular(self, feat_mat, grades): min_grade = min(grades) max_grade = max(grades) grade_to_model = {} cur_center_grade = min_grade + 1 while cur_center_grade < max_grade: inds_within_one_grade = [ind for ind, grade in enumerate(grades) if abs(grade - cur_center_grade) < 1.1] cur_feat_mat = np.vstack([feat_mat[ind, :] for ind in inds_within_one_grade]) cur_grades = [grades[ind] for ind in inds_within_one_grade] learner = LinearRegression(intercept=True, debug=params.DEBUG) learner.train( cur_feat_mat, cur_grades, self.ds_train.getEssaySet(), self.ds_train.getDomain(), {"feature_selection": "inclusive"}, ) grade_to_model[cur_center_grade] = learner cur_center_grade += 1 grade_to_model[min_grade] = grade_to_model[min_grade + 1] grade_to_model[max_grade] = grade_to_model[max_grade - 1] return grade_to_model
def test_correlations(self): X = np.array([[1, 1], [2, 3], [3, 1]]) Y = np.array([1, 3, 1]) lr = LinearRegression() correlations = lr.get_feature_grade_correlations(X, Y) expected = [0, 1] for feat_ind, corr in enumerate(correlations): self.assertAlmostEqual(expected[feat_ind], corr)
def test_predict(self): X = [[1, 0], [0, 2]] Y = [3, 2] lr = LinearRegression() lr.train(X, Y) self.assertAlmostEqual(3, lr.predict([1,0])) self.assertAlmostEqual(4, lr.predict([1,1]))
def test_solve(self): X = [[1, 0], [0, 2]] Y = [3, 2] lr = LinearRegression() lr.train(X, Y) self.assertFalse(lr.has_intercept) self.assertAlmostEqual(3, lr.params[0]) self.assertAlmostEqual(1, lr.params[1]) self.assertEqual(2, len(lr.params)) X = [[1, 0], [0, 2], [0, 3]] Y = [3, 2, 1] lr = LinearRegression(intercept=True) lr.train(X, Y) self.assertTrue(lr.has_intercept) self.assertAlmostEqual(-1, lr.params[0]) self.assertAlmostEqual(-1, lr.params[1]) self.assertEqual(2, len(lr.params)) self.assertAlmostEqual(4, lr.intercept)