def smooth_unsmoothed(input_directory, output_directory):
    """
    Example
    ----------
    input_directory = '../data/unsmoothed/15_kw_m2/'
    output_directory = '../data/smoothed/15_kw_m2/'

    smooth_unsmoothed(input_directory, output_directory)
    """
    for in_file in os.listdir(input_directory):
        f = in_file.split('.')
        if 'csv' == f[1]:
            if in_file not in os.listdir(output_directory):
                data = pd.read_csv(input_directory + in_file)
                hyperparams = 10, 10, 10**-6
                TC1 = gaussian_process(data.time.values,
                                       hyperparams,
                                       y=data.tc_1.values,
                                       x_star=data.time.values)
                var = TC1.approx_var()
                TC1.optimize_lml()

                tc1_star, tc1_var = TC1.smoother(variance=var)

                TC2 = gaussian_process(data.time.values,
                                       hyperparams,
                                       y=data.tc_2.values,
                                       x_star=data.time.values)
                var = TC2.approx_var()
                TC2.optimize_lml()

                tc2_star, tc2_var = TC2.smoother(variance=var)

                data['tc_1'] = tc1_star
                data['tc_2'] = tc2_star

                data.to_csv(output_directory + in_file)
            else:
                print('File already exists')
Beispiel #2
0
# Sort X data for plotting purposes
x_star = X.drop_duplicates().sort_values().values

# Set hyperparameters
b = 20
tau_1_squared = 10
tau_2_squared = 10**-6

# Pack hyperparameters for passing to model
hyperparams = b, tau_1_squared, tau_2_squared

# Create a guassian process object from data and prediction vector
GP = gaussian_process(X,
                      hyperparams,
                      y=Y,
                      x_star=x_star,
                      cov='squared_exponential')

# Create arrays for evaluating and saving the hyperparameters
b = np.linspace(50, 80, num=10)
tau_1_squared = np.linspace(30, 60, num=10)
Z = np.zeros((len(b), len(tau_1_squared)))

# Approximate the variance by fitting a GP once and evaluating the residual sum of squared errors
variance = GP.approx_var()

# Loops over each b and tau_1_squared and evaluates the log of the marginal likelihood
for i in range(len(b)):
    for j in range(len(tau_1_squared)):
        hyperparams = b[i], tau_1_squared[j], tau_2_squared
Beispiel #3
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Aug  9 15:36:05 2018

@author: nimishawalgaonkar
"""

import numpy as np
import gaussian_process
import sys
sys.path.append('../')
import llbnn

X_train = np.linspace(-1, 1, 100)[:, None]
Y_train = gaussian_process.gaussian_process(X_train)[:, 0]
X_test = np.linspace(np.min(X_train), np.max(X_train), 200)[:, None]

batch_size = X_train.shape[0]

LLBNN = llbnn.LastLayerBayesianDeepNetRegressor(num_epochs=2000,
                                                batch_size=batch_size)
LLBNN.fit(X_train, Y_train)
print('Training is done')

# Testing on training data itself
r = LLBNN.predict(X_test)
#print ('Mean')
#print (r[0])
#print ('SD')
#print (r[1].shape)