def getWeatherFeature(): data = readData.loadResidentialData() sumLoad = np.zeros((35040,)) #userLoad = readData.getUserData(data, 0) for i in range(144): sumLoad += readData.getUserData(data, i) load_1hr = changeInterval.From15minTo1hour(sumLoad) # load weather data #weather_data = pd.read_csv('F:/OneDrive/Load Forecast/residential/data/weather2013_Austin.csv') weather_data = pd.read_csv('F:/SkyDrive/Load Forecast/residential/data/weather2013_Austin.csv') # test correlations corr_temp = sp.pearsonr(load_1hr, np.array(weather_data['temperature']))[0] corr_humidity = sp.pearsonr(load_1hr, np.array(weather_data['humidity']))[0] corr_pressure = sp.pearsonr(load_1hr, np.array(weather_data['pressure']))[0] corr_precip = sp.pearsonr(load_1hr, np.array(weather_data['precip_intensity']))[0] print(corr_temp, corr_humidity, corr_pressure, corr_precip) # selected features: temperature, humidity, pressure temperature = np.array(weather_data['temperature']) temperature = normalize(temperature) humidity = np.array(weather_data['humidity']) humidity = normalize(humidity) pressure = np.array(weather_data['pressure']) pressure = normalize(pressure) return(temperature, humidity, pressure)
plt.show() print('MAPE: %.2f, RMSPE: %.2f' % (mape, rmspe)) tf.reset_default_graph() sess.close() return (mape, rmspe) if __name__ == "__main__": # import load data data = readData.loadResidentialData() sumLoad = np.zeros((35040, )) #userLoad = readData.getUserData(data, 0) for i in range(144): sumLoad += readData.getUserData(data, i) # import weather data (temperature, humidity, pressure) = load_weather_corr.getWeatherFeature() # import cycles data (dailycycle, weeklycycle) = cycles.getCycles() sumLoad = changeInterval.From15minTo1hour(sumLoad) MAPE_sum = 0.0 RMSPR_sum = 0.0 for curr_day in range(56, 364): print(curr_day) (mape, rmspe) = RNN_LSTM(sumLoad, curr_day)
if __name__ == "__main__": # parameters T = 96 n_train = 365 n_lag = 2 # import load data data = readData.loadResidentialData() n_customer = data.shape[1] # load sum, 2 years of data sumLoad = np.zeros((365 * 2 * T,)) # sum up the load data for i in range(n_customer): customer_load = readData.getUserData(data, i) sumLoad += np.nan_to_num(customer_load) minLoad = np.min(sumLoad) maxLoad = np.max(sumLoad) sumLoad = (sumLoad - minLoad) / (maxLoad - minLoad) # call clustering function N_cluster = 3 (X_train0, y_train0, X_train1, y_train1, X_train2, y_train2, X_test0, X_test1, X_test2, y_test0, y_test1, y_test2) = clustering.Clustering(T, N_cluster, n_train, n_lag, sumLoad) # neural network forecast print("start NN forecast on group 0") (MAPE0, RMSPE0, days0) = CNN_forecast(n_lag, T, X_train0, y_train0, X_test0, y_test0, maxLoad, minLoad) print('forecast result group 0 : MAPE: %.2f, RMSPE: %.2f' % (MAPE0, RMSPE0))