def test_add_dummy_feature(n_samples, n_features): X = np.random.rand(n_samples, n_features) new_skratch = skratch.add_dummy_feature(X) new_sklearn = sklearn.add_dummy_feature(X) assert (new_sklearn == new_skratch).all()
def fit(self, X, y): if self.fit_intercept: X = add_dummy_feature(X) self.coef_ = np.linalg.inv(X.T.dot(X)).dot(X.T).dot(y) return self.coef_
def predict(self, X, weights=None): if self.fit_intercept: X = add_dummy_feature(X) if weights is None: weights = self.coef_ return X.dot(weights)
def _loss_gradient(self, X, y): features = X if self.fit_intercept: features = add_dummy_feature(features) prediction_loss_gradient = lambda weights: (self.predict( X, weights) - y).dot(features) / len(features) regularization_loss_gradient = lambda weights: self.regularizer.gradient( weights) return lambda weights: prediction_loss_gradient( weights) + regularization_loss_gradient(weights)
def get_X_y_weights(n_samples, n_features, fit_intercept): X = np.random.rand(n_samples, n_features) weights = np.random.rand(X.shape[1] + fit_intercept) X = StandardScaler().fit_transform(X) features = X if fit_intercept: features = add_dummy_feature(features) y = features.dot(weights) return X, y, weights
y[-1] -= 2 fig = plt.figure(figsize=(10, 10)) ax = fig.add_subplot(111) ax.set_ylim([-2.5, 2.5]) ims = [] for weights_, new_loss in reg._fit(X, y): X_ = np.linspace(1.5 * MIN, 1.5 * MAX, n_samples) X_ = np.array([[x**i for i in range(1, degree + 1)] for x in X_.squeeze()]) lines = [] y_ = add_dummy_feature(X_).dot(weights_) not_noisy = np.array( [i for i in range(n_samples) if i not in noisy_instances]) correct, = ax.plot(X[not_noisy, 0].squeeze(), y[not_noisy], '.g') # noise, = ax.plot(X[noisy_instances,0].squeeze(), y[noisy_instances], 'Xr') prediction, = ax.plot(X_[:, 0].squeeze(), y_, 'b') ax.legend([correct, prediction], ["correct", "prediction"]) lines.append(prediction) lines.append(correct) ims.append(lines) anim = animation.ArtistAnimation(fig,