def fit(self, X, y):

        # If polynomial transformation
        if self.poly_degree:
            X = polynomial_features(X, degree=self.poly_degree)

        n_samples, n_features = np.shape(X)

        X_X = X.T.dot(X)

        # Least squares approximate of beta
        beta_hat = np.linalg.pinv(X_X).dot(X.T).dot(y)

        # The posterior parameters can be determined analytically since we assume
        # conjugate priors for the likelihoods.

        # Normal prior / likelihood => Normal posterior
        mu_n = np.linalg.pinv(X_X + self.omega0).dot(
            X_X.dot(beta_hat) + self.omega0.dot(self.mu0))
        omega_n = X_X + self.omega0
        # Scaled inverse chi-squared prior / likelihood => Scaled inverse chi-squared posterior
        nu_n = self.nu0 + n_samples
        sigma_sq_n = (1.0 / nu_n) * (self.nu0 * self.sigma_sq0 + \
                                     (y.T.dot(y) + self.mu0.T.dot(self.omega0).dot(self.mu0) - mu_n.T.dot(
                                         omega_n.dot(mu_n))))

        # Simulate parameter values for n_draws
        beta_draws = np.empty((self.n_draws, n_features))
        for i in range(self.n_draws):
            sigma_sq = self._draw_scaled_inv_chi_sq(n=1,
                                                    df=nu_n,
                                                    scale=sigma_sq_n)
            beta = multivariate_normal.rvs(size=1,
                                           mean=mu_n[:, 0],
                                           cov=sigma_sq *
                                           np.linalg.pinv(omega_n))
            # Save parameter draws
            beta_draws[i, :] = beta

        # Select the mean of the simulated variables as the ones used to make predictions
        self.w = np.mean(beta_draws, axis=0)

        # Lower and upper boundary of the credible interval
        l_eti = 50 - self.cred_int / 2
        u_eti = 50 + self.cred_int / 2
        self.eti = np.array([[np.percentile(beta_draws[:, i], q=l_eti), np.percentile(beta_draws[:, i], q=u_eti)] \
                             for i in range(n_features)])
    def predict(self, X, eti=False):

        # If polynomial transformation
        if self.poly_degree:
            X = polynomial_features(X, degree=self.poly_degree)

        y_pred = X.dot(self.w)
        # If the lower and upper boundaries for the 95%
        # equal tail interval should be returned
        if eti:
            lower_w = self.eti[:, 0]
            upper_w = self.eti[:, 1]
            y_lower_pred = X.dot(lower_w)
            y_upper_pred = X.dot(upper_w)
            return y_pred, y_lower_pred, y_upper_pred

        return y_pred
    def predict(self, X, eti=False):

        # If polynomial transformation
        if self.poly_degree:
            X = polynomial_features(X, degree=self.poly_degree)

        y_pred = X.dot(self.w)
        # If the lower and upper boundaries for the 95%
        # equal tail interval should be returned
        if eti:
            lower_w = self.eti[:, 0]
            upper_w = self.eti[:, 1]
            y_lower_pred = X.dot(lower_w)
            y_upper_pred = X.dot(upper_w)
            return y_pred, y_lower_pred, y_upper_pred

        return y_pred
    def fit(self, X, y):

        # If polynomial transformation
        if self.poly_degree:
            X = polynomial_features(X, degree=self.poly_degree)

        n_samples, n_features = np.shape(X)

        X_X = X.T.dot(X)

        # Least squares approximate of beta
        beta_hat = np.linalg.pinv(X_X).dot(X.T).dot(y)

        # The posterior parameters can be determined analytically since we assume
        # conjugate priors for the likelihoods.

        # Normal prior / likelihood => Normal posterior
        mu_n = np.linalg.pinv(X_X + self.omega0).dot(X_X.dot(beta_hat)+self.omega0.dot(self.mu0))
        omega_n = X_X + self.omega0
        # Scaled inverse chi-squared prior / likelihood => Scaled inverse chi-squared posterior
        nu_n = self.nu0 + n_samples
        sigma_sq_n = (1.0/nu_n)*(self.nu0*self.sigma_sq0 + \
            (y.T.dot(y) + self.mu0.T.dot(self.omega0).dot(self.mu0) - mu_n.T.dot(omega_n.dot(mu_n))))

        # Simulate parameter values for n_draws
        beta_draws = np.empty((self.n_draws, n_features))
        for i in range(self.n_draws):
            sigma_sq = self._draw_scaled_inv_chi_sq(n=1, df=nu_n, scale=sigma_sq_n)
            beta = multivariate_normal.rvs(size=1, mean=mu_n[:,0], cov=sigma_sq*np.linalg.pinv(omega_n))
            # Save parameter draws
            beta_draws[i, :] = beta

        # Select the mean of the simulated variables as the ones used to make predictions
        self.w = np.mean(beta_draws, axis=0)

        # Lower and upper boundary of the credible interval
        l_eti = 50 - self.cred_int/2
        u_eti = 50 + self.cred_int/2
        self.eti = np.array([[np.percentile(beta_draws[:,i], q=l_eti), np.percentile(beta_draws[:,i], q=u_eti)] \
                                for i in range(n_features)])
 def fit(self, X, y):
     X_transformed = polynomial_features(X, degree=self.degree)
     super(PolynomialRegression, self).fit(X_transformed, y)
Beispiel #6
0
 def fit(self, X, y):
     X_transformed = normalize(polynomial_features(X, degree=self.degree))
     super(LassoRegression, self).fit(X_transformed, y)
Beispiel #7
0
 def fit(self, X, y):
     X = normalize(polynomial_features(X, degree=self.degree))
     super(ElasticNet, self).fit(X, y)
Beispiel #8
0
 def predict(self, X):
     X = normalize(polynomial_features(X, degree=self.degree))
     return super(ElasticNet, self).predict(X)
Beispiel #9
0
 def fit(self, X, y):
     X = normalize(polynomial_features(X, degree=self.degree))
     super(PolynomialRidgeRegression, self).fit(X, y)
Beispiel #10
0
 def predict(self, X):
     X = normalize(polynomial_features(X, degree=self.degree))
     return super(PolynomialRidgeRegression, self).predict(X)
Beispiel #11
0
 def fit(self, X, y):
     X = polynomial_features(X, degree=self.degree)
     super(PolynomialRegression, self).fit(X, y)
Beispiel #12
0
 def predict(self, X):
     X = polynomial_features(X, degree=self.degree)
     return super(PolynomialRegression, self).predict(X)
Beispiel #13
0
 def predict(self, X):
     X_transformed = normalize(polynomial_features(X, degree=self.degree))
     return super(LassoRegression, self).predict(X_transformed)
 def fit(self, X, y):
     X = polynomial_features(X, degree=self.degree)
     super(PolynomialRegression, self).fit(X, y)
 def predict(self, X):
     X = normalize(polynomial_features(X, degree=self.degree))
     return super(ElasticNet, self).predict(X)
 def fit(self, X, y):
     X = normalize(polynomial_features(X, degree=self.degree))
     super(ElasticNet, self).fit(X, y)
 def predict(self, X):
     X = normalize(polynomial_features(X, degree=self.degree))
     return super(PolynomialRidgeRegression, self).predict(X)
 def fit(self, X, y):
     X = normalize(polynomial_features(X, degree=self.degree))
     super(PolynomialRidgeRegression, self).fit(X, y)
 def predict(self, X):
     X = polynomial_features(X, degree=self.degree)
     return super(PolynomialRegression, self).predict(X)