def test_queryvariables(): amodel = GFS() old_variables = amodel.variables new_variables = ['u-component_of_wind_height_above_ground'] data = amodel.get_data(_latitude, _longitude, _start, _end, query_variables=new_variables) data['u-component_of_wind_height_above_ground']
def get_irradiance(lat=latitude, lon=longitude, tz=tzo, intervals_of_3=1, time=datetime.datetime.now() + datetime.timedelta(hours=-5)): start = pd.Timestamp(time, tz=tz) end = start + pd.Timedelta(hours=3 * intervals_of_3) irrad_vars = ['ghi', 'dni', 'dhi'] print(start, end) model = GFS() raw_data = model.get_data(lat, lon, start, end) # print(raw_data.head()) data = raw_data data = model.rename(data) data['temp_air'] = model.kelvin_to_celsius(data['temp_air']) data['wind_speed'] = model.uv_to_speed(data) irrad_data = model.cloud_cover_to_irradiance(data['total_clouds']) data = data.join(irrad_data, how='outer') data = data[model.output_variables] data = model.rename(raw_data) irrads = model.cloud_cover_to_irradiance(data['total_clouds'], how='clearsky_scaling') # change this to list when taking more than one values return (irrads.ghi.values.tolist()), (data['total_clouds'].values / 100)
def load_GFS_data(latitude=33.8688, longitude=151.2093, tz='Australia/Sydney', days=7): # latitude, longitude, tz = 32.2, -110.9, 'US/Arizona' # latitude, longitude, tz = 32.2, -110.9, 'US/Arizona' # latitude = 33.8688 # longitude=151.2093 # tz='Australia/Sydney' start = pd.Timestamp(datetime.date.today(), tz=tz) end = start + pd.Timedelta(days=7) irrad_vars = ['ghi', 'dni', 'dhi'] model = GFS() raw_data = model.get_data(latitude, longitude, start, end) print(raw_data.head()) data = raw_data data = model.rename(data) data['temp_air'] = model.kelvin_to_celsius(data['temp_air']) data['wind_speed'] = model.uv_to_speed(data) irrad_data = model.cloud_cover_to_irradiance(data['total_clouds']) data = data.join(irrad_data, how='outer') data = data[model.output_variables] print(data.head()) data = model.process_data(raw_data) print(data.head()) data = model.get_processed_data(latitude, longitude, start, end) print(data.head()) return (data)
def test_queryvariables(): amodel = GFS() new_variables = ['u-component_of_wind_height_above_ground'] data = amodel.get_data(_latitude, _longitude, _start, _end, query_variables=new_variables) data['u-component_of_wind_height_above_ground']
def test_queryvariables(): with pytest.warns(pvlibDeprecationWarning): amodel = GFS() new_variables = ['u-component_of_wind_height_above_ground'] data = amodel.get_data(_latitude, _longitude, _start, _end, query_variables=new_variables) data['u-component_of_wind_height_above_ground']
def forecast(latitude, longitude, tz='UTC'): # specify time range. start = Timestamp(datetime.date.today(), tz=tz) end = start + Timedelta(days=7) irrad_vars = ['ghi', 'dni', 'dhi'] # 0.25 deg available model = GFS() # retrieve data. returns pandas.DataFrame object raw_data = model.get_data(latitude, longitude, start, end) #print(raw_data.head()) data = raw_data data = data.resample('15min').asfreq() data = data.interpolate(method='linear', limit_direction='forward', axis=0) # rename the columns according the key/value pairs in model.variables. data = model.rename(data) # convert temperature data['temp_air'] = model.kelvin_to_celsius(data['temp_air']) # convert wind components to wind speed data['wind_speed'] = model.uv_to_speed(data) # calculate irradiance estimates from cloud cover. # uses a cloud_cover to ghi to dni model or a # uses a cloud cover to transmittance to irradiance model. # this step is discussed in more detail in the next section irrad_data = model.cloud_cover_to_irradiance(data['total_clouds']) data = data.join(irrad_data, how='outer') # keep only the final data data = data[model.output_variables] fig = data[['ghi', 'dni', 'dhi']].plot().get_figure() return fig #latitude, longitude, tz = -34, -58, 'America/Argentina/Buenos_Aires' #fig=forecast(latitude, longitude, tz) #fig.savefig('irradiance.png')
def _get_forecast_gfs_day(lat: float, lon: float, date: datetime.date) -> pd.DataFrame: """ Suspected that multithreading this does not work, because of some internal logic within pvlib.forecast.ForecastModel.get_data """ logger.debug(f"GFS day {lat} {lon} {date}") today = datetime.datetime.today() date_after = date + datetime.timedelta(days=1) one_week_from_today = today + datetime.timedelta(weeks=1) assert lat % 0.25 == 0, "Latitude must be multiple of 0.25" assert lon % 0.25 == 0, "Longitude must be multiple of 0.25" # assert one_week_from_today.date() - date >= datetime.timedelta(), "Cannot be more than 1 week in the future" model = GFS(resolution="quarter") start = datetime.datetime.combine(date, datetime.time()) end = datetime.datetime.combine(date_after, datetime.time()) try: data: pd.DataFrame = model.get_data(lat, lon, start, end) except requests.exceptions.ConnectionError: raise ConnectionAbortedError("Connection Error while fetching forecast data. Check network connection.") return model.process_data(data)
# longitude = 31.468293 tz = 'Africa/Johannesburg' surface_tilt = 30 surface_azimuth = 180 albedo = 0.2 #Set beginning and end date end = pd.Timestamp(datetime.date.today(), tz=tz) start = end - timedelta(12) # Define forecast model fm = GFS() # Retrieve data from forecast API and perform data preparation previous_forecast = fm.get_data(latitude, longitude, start, end) previous_forecast.index = previous_forecast.index.strftime('%Y-%m-%d %H:%M:%S') previous_forecast.index = pd.to_datetime(previous_forecast.index) #resample to three hours to match weather data sampling rate data_res = data.resample('3H', offset='2H').mean() #set datetime limits of solar farm data to match weather data forecast_dates = previous_forecast.index start_datetime = forecast_dates[0] list_r = data_res.index stop_datetime = list_r[-5] date_ranges = [start_datetime, stop_datetime] data_res = data_res[start_datetime:stop_datetime]
def get_pvlib_data(latitude, longitude, tz, altitude, city, start_time, end_time): # getting turbidity tables pvlib_path = os.path.dirname(os.path.abspath(pvlib.clearsky.__file__)) filepath = os.path.join(pvlib_path, 'data', 'LinkeTurbidities.h5') def plot_turbidity_map(month, vmin=1, vmax=100): plt.figure() with tables.open_file(filepath) as lt_h5_file: ltdata = lt_h5_file.root.LinkeTurbidity[:, :, month - 1] plt.imshow(ltdata, vmin=vmin, vmax=vmax) # data is in units of 20 x turbidity plt.title('Linke turbidity x 20, ' + calendar.month_name[month]) plt.colorbar(shrink=0.5) plt.tight_layout() plot_turbidity_map(1) plot_turbidity_map(7) # getting clearsky estimates loc = Location(latitude, longitude, tz, altitude, city) times = pd.date_range(start=start_time, end=end_time, freq='H', tz=loc.tz) cs = loc.get_clearsky(times) # getting pvlib forecasted irradiance based on cloud_cover #irrad_vars = ['ghi', 'dni', 'dhi'] model = GFS() raw_data = model.get_data(latitude, longitude, start_time, end_time) data = raw_data # rename the columns according the key/value pairs in model.variables. data = model.rename(data) # convert temperature data['temp_air'] = model.kelvin_to_celsius(data['temp_air']) # convert wind components to wind speed data['wind_speed'] = model.uv_to_speed(data) # calculate irradiance estimates from cloud cover. # uses a cloud_cover to ghi to dni model or a # uses a cloud cover to transmittance to irradiance model. irrad_data = model.cloud_cover_to_irradiance(data['total_clouds']) # correcting timezone data.index = data.index.tz_convert(loc.tz) irrad_data.index = irrad_data.index.tz_convert(loc.tz) # joining cloud_cover and irradiance data frames data = data.join(irrad_data, how='outer') # renaming irradiance estimates cs.rename(columns={ 'ghi': 'GHI_clearsky', 'dhi': 'DHI_clearsky', 'dni': 'DNI_clearsky' }, inplace=True) data.rename(columns={ 'ghi': 'GHI_pvlib', 'dhi': 'DHI_pvlib', 'dni': 'DNI_pvlib' }, inplace=True) # joining clearsky with cloud_cover irradiances data = data.join(cs, how='outer') return (data)
Created on Mon Mar 10 05:00:26 2019 @author: tstone """ import matplotlib.pyplot as plt import numpy as np import pandas as pd import pvlib from pvlib.forecast import GFS location_path = "streetlight_locations_datasd_ids.csv" sll = pd.read_csv(location_path) # GFS model, defaults to 0.5 degree resolution model_gfs = GFS() #Create times to retrieve archive data tz = 'US/Pacific' end = pd.Timestamp.today(tz=tz) start = end - pd.Timedelta(days=7) for index, row in sll.iterrows(): #print(index, row.longitude, row.latitude, row.ID) latitude = row.latitude longitude = row.longitude raw_data = model_gfs.get_data(latitude, longitude, start, end) data = model_gfs.process_data(raw_data) data.to_csv("../Hackathon Datasets/Cloud/cloud_ID_" + str(row.ID) + ".csv")