for i in range(N_iter):
    X_pred = np.zeros((N_test, dimensions))  # Test Prediction
    for j in range(N_test):
        for k in range(dimensions):
            X_pred[j, k] = np.random.rand()

    X_pred_mean.append(X_pred)

X_pred = np.array(X_pred_mean).mean(axis=0)

y = (10 * np.sin(np.pi * X_pred[:, 0] * X_pred[:, 1]) + 20 * pow(
    (X_pred[:, 2] - 1 / 2), 2) + 10 * X_pred[:, 3] + 5 * X_pred[:, 4])

# Fit
alpha, variance_mp, mu_mp, sigma_mp = rvm_r.fit(X_train, variance,
                                                target_train, kernel, N_train,
                                                dimensions, N_test)
relevant_vectors = alpha[1].astype(int)

# Predict
y_pred = rvm_r.predict(X_train, X_pred, relevant_vectors, variance_mp, mu_mp,
                       sigma_mp, kernel, dimensions, N_test)

# Check Performance
print('RMSE:', sqrt(mean_squared_error(y, y_pred)))
print('Number of relevant vectors:', len(relevant_vectors) - 1)
plt.scatter(range(N_test), y, label='Real')
plt.scatter(range(N_test), y_pred, c='orange', label='Predicted')
plt.legend()
plt.show()
variance = 0.01

# Choose kernel between linear_spline or exponential
kernel = "linear_spline"

X_train = np.linspace(-10, 10, N_train)  # Training
X_test = np.linspace(-10, 10, N_test)  # Test
y_train = np.zeros(N_train)
y = np.zeros(N_test)

for i in range(len(X_train)):
    y_train[i] = math.sin(X_train[i]) / X_train[i] + np.random.uniform(
        -0.2, 0.2)

alpha, variance_mp, mu_mp, sigma_mp = rvm_r.fit(
    np.reshape(X_train, (N_train, dimensions)), variance, y_train, kernel,
    N_train, dimensions, N_test)
relevant_vectors = alpha[1].astype(int)

print("Running RVM testing...")
for i in range(N_test):
    y[i] = math.sin(X_test[i]) / X_test[i]
y_pred = rvm_r.predict(X_train, X_test, relevant_vectors, variance_mp, mu_mp,
                       sigma_mp, kernel, dimensions, N_test)
print('RMSE:', sqrt(mean_squared_error(y, y_pred)))
print('Maximum error between predicted samples and true: ',
      max(abs(y - y_pred))**2)
print('Number of relevant vectors:', len(relevant_vectors) - 1)
plt.plot(X_test, y_pred, c='r', label='Predicted values')
plt.scatter(X_train, y_train, label='Training samples')
plt.plot(X_test, y, c='black', label='True function')
Exemple #3
0
    pow(X_train[:, 1] * X_train[:, 2] - 1 /
        (X_train[:, 1] * X_train[:, 3]), 2), 1 / 2)

# Adding noise
for i in range(N_train):
    y_train[i] += np.random.normal(0, y_train.std() / 3)

# Reshape to create scalar
y_train = np.reshape(y_train, (len(y_train), 1))

# Scaling the dimensions to make proper comparisons
MinMaxScaler = preprocessing.MinMaxScaler()
X_train = MinMaxScaler.fit_transform(X_train)

alpha, variance_mp, mu_mp, sigma_mp = rvm_r.fit(X_train, variance, y_train,
                                                kernel, X_train.shape[0],
                                                dimensions, N_test)
relevant_vectors = alpha[1].astype(int)
print("Number of relevant vectors:", len(relevant_vectors) - 1)

# Generation of testing
X_test = np.zeros((N_test, dimensions))
y = np.zeros(N_test)

print("Running RVM testing...")
for i in tqdm(range(tests)):
    X_test[:, 0] = np.random.uniform(0, 100, N_test)
    X_test[:, 1] = np.random.uniform(40 * np.pi, 560 * np.pi, N_test)
    X_test[:, 2] = np.random.uniform(0, 1, N_test)
    X_test[:, 3] = np.random.uniform(1, 11, N_test)
    test_array.append(X_test)
Exemple #4
0
# Normalizing the dimensions to make proper comparisons
MinMaxScaler = preprocessing.MinMaxScaler()
X_train = MinMaxScaler.fit_transform(X_train)
X_test = MinMaxScaler.fit_transform(X_test)

# Choose kernel between linear_spline or exponential
kernel = "gaussian"

# Initialize variance
variance = 0.01
N = X_train.shape[0]  # 480
dimensions = X_train.shape[1]  #14
N_test_size = X_test.shape[0]

# Fit
alpha, variance_mp, mu_mp, sigma_mp = rvm_r.fit(X_train, variance, y_train,
                                                kernel, N, dimensions, N)
relevant_vectors = alpha[1].astype(int)

# Predict
y_pred = rvm_r.predict(X_train, X_test, relevant_vectors, variance_mp, mu_mp,
                       sigma_mp, kernel, dimensions, N)

# Check Performance
print('RMSE for RVM:', sqrt(mean_squared_error(y_test, y_pred)))
print('Number of relevant vectors:', len(relevant_vectors))

# Performance with SVM from sklearn
clf = svm.SVR(kernel="rbf", gamma=(1 / dimensions))
clf.fit(X_train, y_train)
svm_pred = clf.predict(X_test)
print('Number of support vectors:', len(clf.support_))