Esempio n. 1
0
def test_gradient_descent(samples, test_data, scikit_pred):
    reg = LinearRegressor(samples, labels)
    gr = GradientDescentRunner(reg.get_gradient(), len(samples[0]) + 1)
    _, weights = gr.run_once()
    reg.weights = weights
    p = reg.predict(test_data)
    print "Prediction from gradient descent: ", p
    def test_lets_just_look_at_the_outputs(self):
        gr = GradientDescentRunner(self.logreg.get_gradient(), len(self.samples[0]) + 1,
                                   self.logreg.get_error_function(), alpha=1e-8, max_iter=300)

        _, weights = gr.run_once()
        self.logreg.weights = weights

        predictions = [self.logreg.get_probability(d) for d in self.train_dataset]
        import pprint
        pprint.pprint(zip(predictions, self.train_labels))
Esempio n. 3
0
    def test_lets_just_look_at_the_outputs(self):
        gr = GradientDescentRunner(self.logreg.get_gradient(),
                                   len(self.samples[0]) + 1,
                                   self.logreg.get_error_function(),
                                   alpha=1e-8,
                                   max_iter=300)

        _, weights = gr.run_once()
        self.logreg.weights = weights

        predictions = [
            self.logreg.get_probability(d) for d in self.train_dataset
        ]
        import pprint
        pprint.pprint(zip(predictions, self.train_labels))