def test_BasisfunctionRegression_simple(): x = np.arange(10.).reshape((10, 1)) y = np.arange(10.) + 1 dy = 1 mu = np.arange(11.)[:, None] sigma = 1.0 clf = BasisFunctionRegression(mu=mu, sigma=sigma).fit(x, y, dy) y_true = clf.predict(x) assert_allclose(y, y_true, atol=1E-10)
z_sample, mu_sample, dmu = generate_mu_z(100, random_state=0) cosmo = Cosmology() z = np.linspace(0.01, 2, 1000) mu_true = np.asarray(map(cosmo.mu, z)) #------------------------------------------------------------ # Define our classifiers basis_mu = np.linspace(0, 2, 15)[:, None] basis_sigma = 3 * (basis_mu[1] - basis_mu[0]) subplots = [221, 222, 223, 224] classifiers = [ LinearRegression(), PolynomialRegression(4), BasisFunctionRegression('gaussian', mu=basis_mu, sigma=basis_sigma), NadarayaWatson('gaussian', h=0.1) ] text = [ 'Straight-line Regression', '4th degree Polynomial\n Regression', 'Gaussian Basis Function\n Regression', 'Gaussian Kernel\n Regression' ] # number of constraints of the model. Because # Nadaraya-watson is just a weighted mean, it has only one constraint n_constraints = [2, 5, len(basis_mu) + 1, 1] #------------------------------------------------------------ # Plot the results fig = plt.figure(figsize=(8, 8)) fig.subplots_adjust(left=0.1,
def fit_BasisFunction(features_train, labels_train, features_pred, kernel='gaussian', mu=mu0, sigma=0.1): model = BasisFunctionRegression(kernel, mu=mu, sigma=sigma) model.fit(features_train, labels_train) labels_pred = model.predict(features_pred) return labels_pred