def load_GFS_data(latitude=33.8688, longitude=151.2093, tz='Australia/Sydney', days=7): # latitude, longitude, tz = 32.2, -110.9, 'US/Arizona' # latitude, longitude, tz = 32.2, -110.9, 'US/Arizona' # latitude = 33.8688 # longitude=151.2093 # tz='Australia/Sydney' start = pd.Timestamp(datetime.date.today(), tz=tz) end = start + pd.Timedelta(days=7) irrad_vars = ['ghi', 'dni', 'dhi'] model = GFS() raw_data = model.get_data(latitude, longitude, start, end) print(raw_data.head()) data = raw_data data = model.rename(data) data['temp_air'] = model.kelvin_to_celsius(data['temp_air']) data['wind_speed'] = model.uv_to_speed(data) irrad_data = model.cloud_cover_to_irradiance(data['total_clouds']) data = data.join(irrad_data, how='outer') data = data[model.output_variables] print(data.head()) data = model.process_data(raw_data) print(data.head()) data = model.get_processed_data(latitude, longitude, start, end) print(data.head()) return (data)
def _get_forecast_gfs_day(lat: float, lon: float, date: datetime.date) -> pd.DataFrame: """ Suspected that multithreading this does not work, because of some internal logic within pvlib.forecast.ForecastModel.get_data """ logger.debug(f"GFS day {lat} {lon} {date}") today = datetime.datetime.today() date_after = date + datetime.timedelta(days=1) one_week_from_today = today + datetime.timedelta(weeks=1) assert lat % 0.25 == 0, "Latitude must be multiple of 0.25" assert lon % 0.25 == 0, "Longitude must be multiple of 0.25" # assert one_week_from_today.date() - date >= datetime.timedelta(), "Cannot be more than 1 week in the future" model = GFS(resolution="quarter") start = datetime.datetime.combine(date, datetime.time()) end = datetime.datetime.combine(date_after, datetime.time()) try: data: pd.DataFrame = model.get_data(lat, lon, start, end) except requests.exceptions.ConnectionError: raise ConnectionAbortedError("Connection Error while fetching forecast data. Check network connection.") return model.process_data(data)
Created on Mon Mar 10 05:00:26 2019 @author: tstone """ import matplotlib.pyplot as plt import numpy as np import pandas as pd import pvlib from pvlib.forecast import GFS location_path = "streetlight_locations_datasd_ids.csv" sll = pd.read_csv(location_path) # GFS model, defaults to 0.5 degree resolution model_gfs = GFS() #Create times to retrieve archive data tz = 'US/Pacific' end = pd.Timestamp.today(tz=tz) start = end - pd.Timedelta(days=7) for index, row in sll.iterrows(): #print(index, row.longitude, row.latitude, row.ID) latitude = row.latitude longitude = row.longitude raw_data = model_gfs.get_data(latitude, longitude, start, end) data = model_gfs.process_data(raw_data) data.to_csv("../Hackathon Datasets/Cloud/cloud_ID_" + str(row.ID) + ".csv")