Beispiel #1
0
    def apply(self):
        loss = [
            "squared_loss", "huber", "epsilon_insensitive",
            "squared_epsilon_insensitive"
        ][self.loss_function]
        penalty = ["l1", "l2", "elasticnet"][self.penalty_type]
        learning_rate = ["invscaling", "constant"][self.learning_rate]
        common_args = dict(
            loss=loss,
            alpha=self.alpha,
            epsilon=self.epsilon,
            eta0=self.eta0,
            l1_ratio=self.l1_ratio,
            power_t=self.power_t,
            penalty=penalty,
            learning_rate=learning_rate,
            n_iter=self.n_iter,
        )

        learner = linear.SGDRegressionLearner(preprocessors=self.preprocessors,
                                              **common_args)
        learner.name = self.learner_name

        predictor = None
        if self.data is not None:
            predictor = learner(self.data)
            predictor.name = self.learner_name

        self.send("Learner", learner)
        self.send("Predictor", predictor)
Beispiel #2
0
 def test_SGDRegression(self):
     nrows = 500
     ncols = 5
     x = np.sort(10 * np.random.rand(nrows, ncols))
     y = np.sum(np.sin(x), axis=1).reshape(nrows, 1)
     x1, x2 = np.split(x, 2)
     y1, y2 = np.split(y, 2)
     t = data.Table(x1, y1)
     learn = lr.SGDRegressionLearner()
     clf = learn(t)
     z = clf(x2)
     self.assertTrue((abs(z.reshape(-1, 1) - y2) < 4.0).all())