def _fit_regressor(self, X, y, alpha, C, loss, learning_rate,
                       sample_weight, n_iter):
        dataset, intercept_decay = make_dataset(X, y, sample_weight)

        self.coef_ = np.zeros((3,), dtype=np.float64, order="C")

        loss_function = self._get_loss_function(loss)
        penalty_type = self._get_penalty_type(self.penalty)
        learning_rate_type = self._get_learning_rate_type(learning_rate)

        if self.t_ is None:
            self.t_ = 1.0

        random_state = check_random_state(self.random_state)
        # numpy mtrand expects a C long which is a signed 32 bit integer under
        # Windows
        seed = random_state.randint(0, np.iinfo(np.int32).max)

        if self.average > 0:
            self.standard_coef_, self.standard_intercept_, \
            self.average_coef_, self.average_intercept_ = \
                average_sgd(self.standard_coef_,
                            self.standard_intercept_[0],
                            self.average_coef_,
                            self.average_intercept_[0],
                            loss_function,
                            penalty_type,
                            alpha, C,
                            self.l1_ratio,
                            dataset,
                            n_iter,
                            int(self.fit_intercept),
                            int(self.verbose),
                            int(self.shuffle),
                            seed,
                            1.0, 1.0,
                            learning_rate_type,
                            self.eta0, self.power_t, self.t_,
                            intercept_decay, self.average)

            self.average_intercept_ = np.atleast_1d(self.average_intercept_)
            self.standard_intercept_ = np.atleast_1d(self.standard_intercept_)
            self.t_ += n_iter * X.shape[0]

            if self.average <= self.t_ - 1.0:
                self.coef_ = self.average_coef_
                self.intercept_ = self.average_intercept_
            else:
                self.coef_ = self.standard_coef_
                self.intercept_ = self.standard_intercept_

        else:
            self.coef_, self.intercept_ = \
                self.parallelizer(self.coef_,
                                  0.0,
                                  loss_function,
                                  penalty_type,
                                  alpha, C,
                                  self.l1_ratio,
                                  dataset,
                                  n_iter,
                                  int(self.fit_intercept),
                                  int(self.verbose),
                                  int(self.shuffle),
                                  seed,
                                  1.0, 1.0,
                                  learning_rate_type,
                                  self.eta0, self.power_t, self.t_,
                                  intercept_decay)

            print(self.coef_)
            print(self.intercept_)
            self.t_ += n_iter * X.shape[0]
            self.intercept_ = np.atleast_1d(self.intercept_)
示例#2
0
    def _fit_regressor(self, X, y, alpha, C, loss, learning_rate,
                       sample_weight, n_iter):
        dataset, intercept_decay = make_dataset(X, y, sample_weight)

        self.coef_ = np.zeros((3, ), dtype=np.float64, order="C")

        loss_function = self._get_loss_function(loss)
        penalty_type = self._get_penalty_type(self.penalty)
        learning_rate_type = self._get_learning_rate_type(learning_rate)

        if self.t_ is None:
            self.t_ = 1.0

        random_state = check_random_state(self.random_state)
        # numpy mtrand expects a C long which is a signed 32 bit integer under
        # Windows
        seed = random_state.randint(0, np.iinfo(np.int32).max)

        if self.average > 0:
            self.standard_coef_, self.standard_intercept_, \
            self.average_coef_, self.average_intercept_ = \
                average_sgd(self.standard_coef_,
                            self.standard_intercept_[0],
                            self.average_coef_,
                            self.average_intercept_[0],
                            loss_function,
                            penalty_type,
                            alpha, C,
                            self.l1_ratio,
                            dataset,
                            n_iter,
                            int(self.fit_intercept),
                            int(self.verbose),
                            int(self.shuffle),
                            seed,
                            1.0, 1.0,
                            learning_rate_type,
                            self.eta0, self.power_t, self.t_,
                            intercept_decay, self.average)

            self.average_intercept_ = np.atleast_1d(self.average_intercept_)
            self.standard_intercept_ = np.atleast_1d(self.standard_intercept_)
            self.t_ += n_iter * X.shape[0]

            if self.average <= self.t_ - 1.0:
                self.coef_ = self.average_coef_
                self.intercept_ = self.average_intercept_
            else:
                self.coef_ = self.standard_coef_
                self.intercept_ = self.standard_intercept_

        else:
            self.coef_, self.intercept_ = \
                self.parallelizer(self.coef_,
                                  0.0,
                                  loss_function,
                                  penalty_type,
                                  alpha, C,
                                  self.l1_ratio,
                                  dataset,
                                  n_iter,
                                  int(self.fit_intercept),
                                  int(self.verbose),
                                  int(self.shuffle),
                                  seed,
                                  1.0, 1.0,
                                  learning_rate_type,
                                  self.eta0, self.power_t, self.t_,
                                  intercept_decay)

            print(self.coef_)
            print(self.intercept_)
            self.t_ += n_iter * X.shape[0]
            self.intercept_ = np.atleast_1d(self.intercept_)