def testGetLoadsData(self): stepsize = timedelta(minutes=15) loads = getLoadsData( self.dataFile, constructTimeStamps(self.start, self.end, stepsize)) self.assertEqual( len(constructTimeStamps(self.start, self.end, stepsize)), len(loads)) [self.assertGreaterEqual(load, 0) for load in loads]
def testGetLoadsData(self): stepsize = timedelta(minutes=15) loads = getPecanstreetData( self.dataFile, self.timeHeader, self.dataid, self.column, constructTimeStamps(self.start, self.end, stepsize), timedelta(days=self.offset), ) self.assertEqual( len(constructTimeStamps(self.start, self.end, stepsize)), len(loads)) [self.assertGreaterEqual(load, 0) for load in loads]
def testGetNinjaPvFile(self): data = getNinja( self.pvFile, constructTimeStamps(self.start, self.end, timedelta(hours=1))) self.assertEqual(len(data), 23) for electricity in data: self.assertGreaterEqual(electricity, 0)
def getPredictedPVValue(pvValue, timestamps, delta): config_main = ForecastConfig() config_pv = ForecastPvConfig(config_main) config_main.TIMESTAMPS = constructTimeStamps( datetime.strptime(config_pv.BEGIN, "20%y-%m-%d %H:%M:%S"), datetime.strptime(config_pv.END, "20%y-%m-%d %H:%M:%S"), datetime.strptime(config_pv.STEP_SIZE, "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) _, endValidation = get_split_indexes(config_main) # we drop the year a = datetime.strptime(timestamps[0].strftime("%m-%d"), "%m-%d") b = datetime.strptime( config_main.TIMESTAMPS[endValidation].strftime("%m-%d"), "%m-%d") assert (a - b).days >= 0 df = addMinutes(pvValue) df = addMonthOfYear(df) # , timestamps) # datas are normalized scaler = joblib.load(config_pv.MODEL_FILE_SC) print(scaler.data_max_) df = scaler.transform(df) x = np.empty( (len(df) - config_pv.LOOK_BACK, config_pv.LOOK_BACK, df.shape[1])) for i in range(len(df) - config_pv.LOOK_BACK): x[i] = df[i:i + config_pv.LOOK_BACK, :] model = loadModel(config_pv) res = model.predict(x) res = invertScaler(res, scaler) return res, config_pv.LOOK_BACK, config_pv.OUTPUT_SIZE
def testGetPriceDataDownsample(self): stepsize = timedelta(hours=2) prices = getPriceData( "./sample/pecan-iso_neiso-day_ahead_lmp_avg-201901010000-201902010000.csv", constructTimeStamps(self.start, self.end, stepsize), timedelta(days=self.offset), self.constantPrice, ) self.assertEqual( len(constructTimeStamps(self.start, self.end, stepsize)), len(prices)) [self.assertGreaterEqual(price_n, 0) for price_n in prices] self.assertAlmostEqual(prices[0], (25.308 + 20.291) / 1000 + self.constantPrice) self.assertAlmostEqual(prices[-1], (24.2 + 23.417) / 1000 + self.constantPrice)
def testGetLoadsDataOversample(self): stepsize = timedelta(minutes=1) loads = getLoadsData( self.dataFile, constructTimeStamps(self.start, self.end, stepsize)) self.assertEqual(len(loads), 22 * 60 + 1) self.assertEqual(loads[0], 2.444) for index in range(14): self.assertEqual(loads[index], loads[index + 1])
def testGetNinjaPvApi(self): try: metadata, data = getNinjaPvApi( 52.5170, 13.3889, constructTimeStamps(self.start, self.end, timedelta(hours=1)), ) self.assertEqual(len(data), 24) except NetworkException: self.assertTrue(True)
def testGetLoadsDataDownsample(self): stepsize = timedelta(hours=2) loads = getPecanstreetData( self.dataFile, self.timeHeader, self.dataid, self.column, constructTimeStamps(self.start, self.end, stepsize), timedelta(days=self.offset), ) self.assertEqual(len(loads), 2) self.assertAlmostEqual(loads[0], 1.071375) # 1.064125) self.assertAlmostEqual(loads[1], 1.22133334) # 1.206375)
def testGetLoadsDataOversample(self): stepsize = timedelta(minutes=1) loads = getPecanstreetData( self.dataFile, self.timeHeader, self.dataid, self.column, constructTimeStamps(self.start, self.end, stepsize), timedelta(days=self.offset), ) self.assertEqual(len(loads), 3 * 60 + 1) self.assertAlmostEqual(loads[0], 0.909) # 0.833 ) for index in range(14): self.assertEqual(loads[index], loads[index + 1])
def __init__(self): self.SEED = 15 self.BEGIN = "2019-05-01 00:00:00" self.END = "2019-10-31 23:45:00" self.STEPSIZE = "00:30:00" self.TIMESTAMPS = constructTimeStamps( datetime.strptime(self.BEGIN, "20%y-%m-%d %H:%M:%S"), datetime.strptime(self.END, "20%y-%m-%d %H:%M:%S"), datetime.strptime(self.STEPSIZE, "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) self.OUTPUT_FOLDER = "" self.TRAIN_FRACTION = 0.6 self.VALIDATION_FRACTION = (1 - self.TRAIN_FRACTION) / 2
def main(argv): config = ForecastLoadConfig() timestamps = constructTimeStamps( datetime.strptime(config.BEGIN, "20%y-%m-%d %H:%M:%S"), datetime.strptime(config.END, "20%y-%m-%d %H:%M:%S"), datetime.strptime(config.STEPSIZE, "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) loadsData = getData(config, timestamps).values plotDay(timestamps, loadsData) plotPart(timestamps[:96], loadsData[:96]) plotPart(timestamps[96 * 10:96 * 11], loadsData[96 * 10:96 * 11]) plotPart(timestamps[96 * 100:96 * 101], loadsData[96 * 100:96 * 101])
def getPredictedLoadValue(loadsData, timestamps, timedelta): config = ForecastConfig() loadConfig = ForecastLoadConfig() input_data = addMinutes(loadsData) input_data = add_day_of_week(input_data) config.TIMESTAMPS = constructTimeStamps( datetime.strptime(loadConfig.BEGIN, "20%y-%m-%d %H:%M:%S"), datetime.strptime(loadConfig.END, "20%y-%m-%d %H:%M:%S"), datetime.strptime(loadConfig.STEPSIZE, "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) _, endValidation = get_split_indexes(config) # we drop the year a = datetime.strptime(timestamps[0].strftime("%m-%d"), "%m-%d") b = datetime.strptime(config.TIMESTAMPS[endValidation].strftime("%m-%d"), "%m-%d") assert (a - b).days >= 0 for load in loadConfig.APPLIANCES: appliance_data = getPecanstreetData( loadConfig.DATA_FILE, loadConfig.TIME_HEADER, loadConfig.DATAID, load, timestamps, timedelta, ) input_data = pd.concat([input_data, appliance_data], axis=1) scaler = joblib.load(loadConfig.MODEL_FILE_SC) input_data = scaler.transform(input_data) x = np.empty(( len(input_data) - loadConfig.LOOK_BACK, loadConfig.LOOK_BACK, input_data.shape[1], )) for i in range(len(input_data) - loadConfig.LOOK_BACK): x[i] = input_data[i:i + loadConfig.LOOK_BACK, :] model = loadModel(loadConfig) res = model.predict(x) res = invertScaler(res, scaler) return res, loadConfig.LOOK_BACK, loadConfig.OUTPUT_SIZE
def dataImport(config_main, config_pv): timestamps = constructTimeStamps( datetime.strptime(config_pv.BEGIN, "20%y-%m-%d %H:%M:%S"), datetime.strptime(config_pv.END, "20%y-%m-%d %H:%M:%S"), datetime.strptime(config_pv.STEP_SIZE, "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) # input datas : uncontrollable resource : solar production df = getPecanstreetData( config_pv.DATA_FILE, config_pv.TIME_HEADER, config_pv.DATAID, "solar", timestamps, ) df = addMinutes(df) df = addMonthOfYear(df) return df, np.array(timestamps)
def main(argv): config = ForecastConfig() pvConfig = ForecastPvConfig(config) config.OUTPUT_FOLDER = pvConfig.OUTPUT_FOLDER timestamps = constructTimeStamps( datetime.strptime(pvConfig.BEGIN, "20%y-%m-%d %H:%M:%S"), datetime.strptime(pvConfig.END, "20%y-%m-%d %H:%M:%S"), datetime.strptime(pvConfig.STEP_SIZE, "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) config.TIMESTAMPS = timestamps # input datas : uncontrollable resource : solar production df = getPecanstreetData(pvConfig.DATA_FILE, pvConfig.TIME_HEADER, pvConfig.DATAID, "solar", timestamps) df = addMinutes(df) df = addMonthOfYear(df) df_train, df_validation, df_test = splitData(config, df) print(timestamps[len(df_validation) + len(df_train)]) # datas are normalized scaler = MinMaxScaler() scaler.fit(df_train) df_train = scaler.transform(df_train) df_validation = scaler.transform(df_validation) df_test = scaler.transform(df_test) X, y = buildSet(df_test, pvConfig.LOOK_BACK, pvConfig.OUTPUT_SIZE) df_train = np.array([df_train[i, 0] for i in range(len(df_train))]) df_validation = np.array( [df_validation[i, 0] for i in range(len(df_validation))]) df_test = np.array([df_test[i, 0] for i in range(len(df_test))]) model = loadModel(pvConfig) testPredictY = model.predict(X) import matplotlib.pyplot as plt plt.plot(df_test[:100]) plt.show() plt.plot(y[0]) plt.show() # plot_baselines(config, df_train, df_test[:96], timestamps[len(df_train):len(df_train) + 96]) plotLSTM_Base_Real(config, df_train, testPredictY[72], "mean", y[72]) # plotLSTM_Base_Real(config, df_train, testPredictY[0], "1step", y[0]) print("Validation:") one_step_persistence_model(df_validation) print("Test:") one_step_persistence_model(df_test) print("Validation:") mean_baseline_one_day(config, df_train, df_validation) print("Test:") mean_baseline_one_day(config, df_train, df_test) print("Train on test and predict for Test:") mean_baseline_one_day(config, df_test, df_test) print("Validation:") predict_zero_one_day(config, df_validation) print("Test:") predict_zero_one_day(config, df_test) print("Validation:") predict_zero_one_step(df_validation) print("Test:") predict_zero_one_step(df_test)
def testGetNinjaWindFileOversample(self): stepsize = timedelta(minutes=1) data = getNinja(self.pvFile, constructTimeStamps(self.start, self.end, stepsize)) self.assertEqual(len(data), 22 * 60 + 1)
def testGetNinjaWindFileDownsample(self): stepsize = timedelta(hours=2) data = getNinja(self.windFile, constructTimeStamps(self.start, self.end, stepsize)) self.assertEqual(len(data), 12)
def __init__(self, config): # Global self.goal = Goal(config["GLOBAL"]["goal"]) self.loc_flag = "yes" == config["GLOBAL"]["loc"] self.loc_lat = float(config["GLOBAL"]["lat"]) self.loc_lon = float(config["GLOBAL"]["lon"]) self.loadResFlag = "yes" == config["GLOBAL"]["loadResFlag"] self.overwrite = "yes" == config["GLOBAL"]["overwrite"] self.calcAllFlag = "yes" == config["GLOBAL"]["calcAllFlag"] # Battery init (to be moved to a initialization file) self.SOC_bat_min = float(config["BAT"]["SOC_bat_min"]) self.SOC_bat_init = float(config["BAT"]["SOC_bat_init"]) self.SOC_bat_max = float(config["BAT"]["SOC_bat_max"]) self.E_bat_max = float(config["BAT"]["E_bat_max"]) self.eta_bat = float(config["BAT"]["eta_bat"]) self.P_bat_max = float(config["BAT"]["P_bat_max"]) self.ChargeConvertLoss = float(config["BAT"]["ConvertLoss"]) # EV init self.SOC_ev_min = float(config["EV"]["SOC_ev_min"]) self.SOC_ev_init = float(config["EV"]["SOC_ev_init"]) self.SOC_ev_max = float(config["EV"]["SOC_ev_max"]) self.P_ev_max = float(config["EV"]["P_ev_max"]) self.E_ev_max = float(config["EV"]["E_ev_max"]) self.eta_ev = float(config["EV"]["eta_ev"]) self.t_a_ev = datetime.strptime(config["EV"]["t_a_ev"], "%H:%M:%S") self.t_b_ev = datetime.strptime(config["EV"]["t_b_ev"], "%H:%M:%S") self.t_goal_ev = datetime.strptime(config["EV"]["t_goal_ev"], "%H:%M:%S") # verify we have enough day to build the set for the prediction assert ( datetime.strptime(config["TIME"]["start"], "20%y-%m-%d %H:%M:%S") - datetime.strptime(config["TIME"]["startPred"], "20%y-%m-%d %H:%M:%S") ).days >= 1, "a delay of at least 1 day is needed to predict" # Time frame of optimization self.timestamps = constructTimeStamps( datetime.strptime(config["TIME"]["start"], "20%y-%m-%d %H:%M:%S"), datetime.strptime(config["TIME"]["end"], "20%y-%m-%d %H:%M:%S"), datetime.strptime(config["TIME"]["stepsize"], "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) self.timestampsPredPV = constructTimeStamps( datetime.strptime(config["TIME"]["startPred"], "20%y-%m-%d %H:%M:%S"), datetime.strptime(config["TIME"]["end"], "20%y-%m-%d %H:%M:%S"), datetime.strptime(config["TIME"]["stepsizePredPV"], "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) self.timestampsPredLoad = constructTimeStamps( datetime.strptime(config["TIME"]["startPred"], "20%y-%m-%d %H:%M:%S"), datetime.strptime(config["TIME"]["end"], "20%y-%m-%d %H:%M:%S"), datetime.strptime(config["TIME"]["stepsizePredLoad"], "%H:%M:%S") - datetime.strptime("00:00:00", "%H:%M:%S"), ) self.stepsize = getStepsize(self.timestamps) self.stepsizeHour = self.stepsize.total_seconds() / 3600 self.stepsizeMinute = self.stepsize.total_seconds() / 60 # we add +1 because we are between 00:00 and 23:45 so < 1 day self.nbDay = (datetime.strptime( config["TIME"]["end"], "20%y-%m-%d %H:%M:%S") - datetime.strptime( config["TIME"]["start"], "20%y-%m-%d %H:%M:%S")).days + 1 # Generators self.P_dg_max = float(config["DIESEL"]["P_dg_max"]) self.P_dg_min = float(config["DIESEL"]["P_dg_min"]) self.dieselQuadraticCof = float(config["DIESEL"]["a_dg"]) self.dieselLinearCof = float(config["DIESEL"]["b_dg"]) self.dieselConstantCof = float(config["DIESEL"]["c_dg"]) self.dieselFuelPrice = float(config["DIESEL"]["c_gen"]) self.startUpCost = float(config["DIESEL"]["StartUpCost"]) self.dieselLeastRunHour = datetime.strptime( config["DIESEL"]["LeastRunningTime"], "%H:%M:%S").hour self.dieselLeastPauseHour = datetime.strptime( config["DIESEL"]["LeastPauseTime"], "%H:%M:%S").hour self.dieselLeastRunTimestepNumber = int( math.ceil(self.dieselLeastRunHour / self.stepsizeHour)) self.dieselLeastPauseTimestepNumber = int( math.ceil(self.dieselLeastPauseHour / self.stepsizeHour)) self.startUpHour = datetime.strptime(config["DIESEL"]["StartUpTime"], "%H:%M:%S").hour self.shutDownHour = datetime.strptime(config["DIESEL"]["ShutDownTime"], "%H:%M:%S").hour self.shutDownTimestepNumber = int( math.ceil(self.shutDownHour / self.stepsizeHour)) self.startUpTimestepNumber = int( math.ceil(self.startUpHour / self.stepsizeHour)) self.deltaShutDown = self.P_dg_min / self.shutDownHour * self.stepsizeHour self.deltaStartUp = self.P_dg_min / self.startUpHour * self.stepsizeHour self.pvFile = config["PV"]["file"] self.pvPdct = "yes" == config["PV"]["usePredicted"] self.showErr = "yes" == config["GLOBAL"]["showErr"] self.pvScale = float(config["PV"]["scale"]) self.windFile = config["WIND"]["file"] self.windScale = float(config["WIND"]["scale"]) self.windStart = datetime.strptime(config["WIND"]["windStart"], "20%y-%m-%d %H:%M:%S") self.windDelta = self.windStart - datetime.strptime( config["TIME"]["start"], "20%y-%m-%d %H:%M:%S") self.pvStart = datetime.strptime(config["PV"]["pvStart"], "20%y-%m-%d %H:%M:%S") self.pvDelta = self.pvStart - datetime.strptime( config["TIME"]["start"], "20%y-%m-%d %H:%M:%S") self.loadsFile = config["LOADS"]["file"] self.loadsPdct = "yes" == config["LOADS"]["usePredicted"] self.loadsScale = float(config["LOADS"]["scale"]) self.dataFile = config["DATA_PS"]["file"] self.dataPSLoads = "yes" == config["DATA_PS"]["loads"] self.dataPSPv = "yes" == config["DATA_PS"]["pv"] self.timeHeader = config["DATA_PS"]["timeHeader"] self.dataid = config["DATA_PS"]["dataid"] self.dataStart = datetime.strptime(config["DATA_PS"]["dataStart"], "20%y-%m-%d %H:%M:%S") self.dataDelta = self.dataStart - datetime.strptime( config["TIME"]["start"], "20%y-%m-%d %H:%M:%S") self.costFileGrid = config["COST"]["file_grid"] self.constantPrice = float(config["COST"]["constant_price"]) self.priceDataStart = datetime.strptime( config["COST"]["priceDataStart"], "20%y-%m-%d %H:%M:%S") self.priceDataDelta = self.priceDataStart - datetime.strptime( config["TIME"]["start"], "20%y-%m-%d %H:%M:%S") self.co2Grid = float(config["CO2"]["grid_CO2"]) self.co2Diesel = float(config["CO2"]["diesel_CO2"])
def testGetLoadsDataDownsample(self): stepsize = timedelta(hours=2) loads = getLoadsData( self.dataFile, constructTimeStamps(self.start, self.end, stepsize)) self.assertEqual(len(loads), 12) self.assertAlmostEqual(loads[0], 2.09075)