コード例 #1
0
def test_linreg():
    '''
        Helper function that tests LinearRegression.

        @param:
            None
        @return:
            None
    '''

    X_train, X_test, Y_train, Y_test = import_wine(WINE_FILE_PATH)

    num_features = X_train.shape[1]

    # Padding the inputs with a bias
    X_train_b = np.append(X_train, np.ones((len(X_train), 1)), axis=1)
    X_test_b = np.append(X_test, np.ones((len(X_test), 1)), axis=1)

    #### Stochastic Gradient Descent ######
    print('---------- LINEAR REGRESSION w/ SGD ----------')
    sgd_model = LinearRegression(num_features, sgd=True)
    sgd_model.train(X_train_b, Y_train)
    print('Average Training Loss:', sgd_model.average_loss(X_train_b, Y_train))
    print('Average Testing Loss:', sgd_model.average_loss(X_test_b, Y_test))

    #### Matrix Inversion ######
    print('---- LINEAR REGRESSION w/ Matrix Inversion ---')
    solver_model = LinearRegression(num_features)
    solver_model.train(X_train_b, Y_train)
    print('Average Training Loss:',
          solver_model.average_loss(X_train_b, Y_train))
    print('Average Testing Loss:', solver_model.average_loss(X_test_b, Y_test))
コード例 #2
0
def test_linreg():
    '''
        Helper function that tests LinearRegression.

        @param:
            None
        @return:
            None
    '''
    m = np.array([[2, 3], [1, 0]])
    mm = np.array([[2, 3, 4, 5], [1, 1, 3, 0]])
    for l in range(2):
        print(mm[l, range(2)])
    ###print(m)
    n = np.append(m, np.ones((len(m), 1)), axis=1)
    #print(n)
    #print(m.shape[1])
    s = LinearRegression(m.shape[1])
    #print(s.weights)

    X_train, X_test, Y_train, Y_test = import_wine(WINE_FILE_PATH)
    #print(X_train.shape[1])
    num_features = X_train.shape[1]

    # Padding the inputs with a bias
    X_train_b = np.append(X_train, np.ones((len(X_train), 1)), axis=1)
    X_test_b = np.append(X_test, np.ones((len(X_test), 1)), axis=1)
    #print(6.7**2)
    #### Matrix Inversion ######
    print('---- LINEAR REGRESSION w/ Matrix Inversion ---')
    solver_model = LinearRegression(num_features)
    solver_model.train(X_train_b, Y_train)
    print('Average Training Loss:',
          solver_model.average_loss(X_train_b, Y_train))
    print('Average Testing Loss:', solver_model.average_loss(X_test_b, Y_test))
コード例 #3
0
def test_models(dataset, epochs, test_size=0.2):
    '''
        Tests LinearRegression, OneLayerNN, TwoLayerNN on a given dataset.

        :param dataset The path to the dataset
        :return None
    '''

    # Check if the file exists
    if not os.path.exists(dataset):
        print('The file {} does not exist'.format(dataset))
        exit()

    # Load in the dataset
    data = np.loadtxt(dataset, skiprows=1)
    X, Y = data[:, 1:], data[:, 0]

    # Normalize the features
    X = (X - np.mean(X, axis=0)) / np.std(X, axis=0)

    X_train, X_test, Y_train, Y_test = train_test_split(X,
                                                        Y,
                                                        test_size=test_size)

    print('Running models on {} dataset'.format(dataset))

    #### Linear Regression ######
    print('----- LINEAR REGRESSION -----')
    # Add a bias
    X_train_b = np.append(X_train, np.ones((len(X_train), 1)), axis=1)
    X_test_b = np.append(X_test, np.ones((len(X_test), 1)), axis=1)
    regmodel = LinearRegression()
    regmodel.train(X_train_b, Y_train)
    print('Average Training Loss:', regmodel.average_loss(X_train_b, Y_train))
    print('Average Testing Loss:', regmodel.average_loss(X_test_b, Y_test))

    #### 1-Layer NN ######
    print('----- 1-Layer NN -----')
    nnmodel = OneLayerNN()
    nnmodel.train(X_train_b, Y_train, epochs=epochs, print_loss=False)
    print('Average Training Loss:', nnmodel.average_loss(X_train_b, Y_train))
    print('Average Testing Loss:', nnmodel.average_loss(X_test_b, Y_test))

    #### 2-Layer NN ######
    print('----- 2-Layer NN -----')
    model = TwoLayerNN(5)
    # Use X without a bias, since we learn a bias in the 2 layer NN.
    model.train(X_train, Y_train, epochs=epochs, print_loss=False)
    print('Average Training Loss:', model.average_loss(X_train, Y_train))
    print('Average Testing Loss:', model.average_loss(X_test, Y_test))
コード例 #4
0
    # Add some noise to the observations
    noise_var = 0.5

    # Create random input and output data
    X = lhs(D_in, N)
    y = 5 * X + noise_var * np.random.randn(N, D_out)

    # Define the model
    model = LinearRegression(X, y)

    # Define an optimizer
    optimizer = SGD(model.num_params, lr=1e-3, momentum=0.9)
    #    optimizer = Adam(model.num_params, lr = 1e-3)
    #    optimizer = RMSprop(model.num_params, lr = 1e-3)

    # Train the model
    model.train(10000, optimizer)

    # Print the learned parameters
    print('w = %e, sigma_sq = %e' %
          (model.theta[:-1], np.exp(model.theta[-1])))

    # Make predictions
    y_pred = model.predict(X)

    # Plot
    plt.figure(1)
    plt.plot(X, y, 'o')
    plt.plot(X, y_pred)
    plt.show()
コード例 #5
0
from sklearn.metrics import mean_squared_error, r2_score
import config_loader

window_size = config_loader.get_window_size()
num_days = config_loader.get_num_predicted_days()

# Load the training data with n days held out
print("Loading training data...")
X_train, y_train, y_test = DataLoader.get_date_separated_testing_data(
    window_size,
    num_days,
)
# train the model (and save it to file)
print("Training the model...")
model = LinearRegression()
model.train(X_train, y_train)

# get metrics for the multi-day predictions in each state
states = DataLoader.get_states()
all_errors = np.empty((0, num_days))
all_predictions = np.empty((0, num_days))
all_actual = np.empty((0, num_days))
all_control = np.empty((0, num_days))
for state_name, state_abbrev in states.items():
    if state_abbrev != "US":
        # get the multi-day predictions
        case_df = DataLoader.get_daily_cases_df(state_abbrev)[:-num_days]
        vax_df = DataLoader.get_daily_vaccinations_df(state_abbrev)[:-num_days]
        future_vaccinations = DataLoader.get_assumed_vaccinations_dict(
            vax_df, num_days, multiplier=1)
        predictions_dict = DataLoader.get_predictions(
コード例 #6
0
# Test scipt
lm = LinearRegression()
n = 50

# Generate some noisy train data
noise = np.random.randn(n)
x = np.linspace(0, 20, n)
y = 32.3 + 5.2 * (x + noise)

x = scaleFeature(x)
y = scaleFeature(y)

x = x.reshape((-1, 1))

# Train the lm, print out the parameters, plot the fit
lm.train(x, y, 0.5, 100, 0)
print(lm.parameters)

# Plot the fit of the linear model
y_hat = lm.predict(x)

# Plot the fit of line and train data
plt.plot(x, y, 'o')
plt.plot(x, y_hat)
plt.ylabel("y")
plt.xlabel("x")
plt.title("Plot of the Models Fit")
plt.show()

wait()