def test_PVSystem_get_irradiance(): system = pvsystem.PVSystem(surface_tilt=32, surface_azimuth=135) times = pd.DatetimeIndex(start='20160101 1200-0700', end='20160101 1800-0700', freq='6H') location = Location(latitude=32, longitude=-111) solar_position = location.get_solarposition(times) irrads = pd.DataFrame({'dni':[900,0], 'ghi':[600,0], 'dhi':[100,0]}, index=times) irradiance = system.get_irradiance(solar_position['apparent_zenith'], solar_position['azimuth'], irrads['dni'], irrads['ghi'], irrads['dhi']) expected = pd.DataFrame(data=np.array( [[ 883.65494055, 745.86141676, 137.79352379, 126.397131 , 11.39639279], [ 0. , -0. , 0. , 0. , 0. ]]), columns=['poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse', 'poa_ground_diffuse'], index=times) assert_frame_equal(irradiance, expected, check_less_precise=2)
def test_get_irradiance(): system = tracking.SingleAxisTracker(max_angle=90, axis_tilt=30, axis_azimuth=180, gcr=2.0/7.0, backtrack=True) times = pd.DatetimeIndex(start='20160101 1200-0700', end='20160101 1800-0700', freq='6H') location = Location(latitude=32, longitude=-111) solar_position = location.get_solarposition(times) irrads = pd.DataFrame({'dni':[900,0], 'ghi':[600,0], 'dhi':[100,0]}, index=times) solar_zenith = solar_position['apparent_zenith'] solar_azimuth = solar_position['azimuth'] tracker_data = system.singleaxis(solar_zenith, solar_azimuth) irradiance = system.get_irradiance(irrads['dni'], irrads['ghi'], irrads['dhi'], solar_zenith=solar_zenith, solar_azimuth=solar_azimuth, surface_tilt=tracker_data['surface_tilt'], surface_azimuth=tracker_data['surface_azimuth']) expected = pd.DataFrame(data=np.array( [[ 961.80070, 815.94490, 145.85580, 135.32820, 10.52757492], [ nan, nan, nan, nan, nan]]), columns=['poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse', 'poa_ground_diffuse'], index=times) assert_frame_equal(irradiance, expected, check_less_precise=2)
def test_PVSystem_get_irradiance(): system = pvsystem.PVSystem(surface_tilt=32, surface_azimuth=135) times = pd.DatetimeIndex(start='20160101 1200-0700', end='20160101 1800-0700', freq='6H') location = Location(latitude=32, longitude=-111) solar_position = location.get_solarposition(times) irrads = pd.DataFrame({'dni':[900,0], 'ghi':[600,0], 'dhi':[100,0]}, index=times) irradiance = system.get_irradiance(solar_position['apparent_zenith'], solar_position['azimuth'], irrads['dni'], irrads['ghi'], irrads['dhi']) expected = pd.DataFrame(data=np.array( [[ 883.65494055, 745.86141676, 137.79352379, 126.397131 , 11.39639279], [ 0. , -0. , 0. , 0. , 0. ]]), columns=['poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse', 'poa_ground_diffuse'], index=times) irradiance = np.round(irradiance, 4) expected = np.round(expected, 4) assert_frame_equal(irradiance, expected)
def get_DNI(self): """ Extract DNI from METPV-11 data. Daily METPV-11 data records. The raw data in METPV-11 is the incidence on the horizontal plane, that is, DNI*cos(d). d is the incidence anlge. :return: a dataframe that contains the DNI """ ngo = Location(latitude=self.latitude, longitude=self.longitude, altitude=0, tz='Japan') solar_pos = ngo.get_solarposition( pd.DatetimeIndex(self.hour_df['avg_time'])) cosd = aoi_projection(surface_tilt=0, surface_azimuth=0, solar_zenith=solar_pos['apparent_zenith'], solar_azimuth=solar_pos['azimuth']) dni_arr = self.hour_df['DHI'] / cosd dni_arr = np.maximum(dni_arr, 0) return dni_arr
def test_get_irradiance(): system = tracking.SingleAxisTracker(max_angle=90, axis_tilt=30, axis_azimuth=180, gcr=2.0/7.0, backtrack=True) times = pd.DatetimeIndex(start='20160101 1200-0700', end='20160101 1800-0700', freq='6H') location = Location(latitude=32, longitude=-111) solar_position = location.get_solarposition(times) irrads = pd.DataFrame({'dni':[900,0], 'ghi':[600,0], 'dhi':[100,0]}, index=times) solar_zenith = solar_position['apparent_zenith'] solar_azimuth = solar_position['azimuth'] tracker_data = system.singleaxis(solar_zenith, solar_azimuth) irradiance = system.get_irradiance(tracker_data['surface_tilt'], tracker_data['surface_azimuth'], solar_zenith, solar_azimuth, irrads['dni'], irrads['ghi'], irrads['dhi']) expected = pd.DataFrame(data=np.array( [[961.80070, 815.94490, 145.85580, 135.32820, 10.52757492], [nan, nan, nan, nan, nan]]), columns=['poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse', 'poa_ground_diffuse'], index=times) assert_frame_equal(irradiance, expected, check_less_precise=2)
def compute_irradiance(latitude: float, longitude: float, dt: datetime, cloud_coverage: float) -> float: """Compute the irradiance received on a location at a specific time. This uses pvlib to 1) compute clear-sky irradiance as Global Horizontal Irradiance (GHI), which includes both Direct Normal Irradiance (DNI) and Diffuse Horizontal Irradiance (DHI). 2) adjust the GHI for cloud coverage """ site = Location(latitude, longitude, tz=dt.tzinfo) solpos = site.get_solarposition(pd.DatetimeIndex([dt])) ghi_clear = site.get_clearsky(pd.DatetimeIndex([dt]), solar_position=solpos).loc[dt]["ghi"] return ghi_clear_to_ghi(ghi_clear, cloud_coverage)
def tilt_irr(self, surface_tilt=None, surface_azimuth=180, include_solar_pos=False) -> pd.DataFrame: """ Calculate the irradiances(DNI) and angle of incidence (aoi) on a tilted surface :param surface_tilt: The surface tilt angle (in degree). :param surface_azimuth: The azimuth angle of the surface. Default is 180 degrees. :param include_solar_pos: whether to include solar position in the output dataframe. :return: a dataframe with calculated solar incidence. """ if surface_tilt is None: surface_tilt = self.latitude ngo = Location(latitude=self.latitude, longitude=self.longitude, altitude=0, tz='Japan') solar_pos = ngo.get_solarposition( pd.DatetimeIndex(self.hour_df['avg_time'])) dni_arr = self.get_DNI() irrad_df = total_irrad(surface_tilt=surface_tilt, surface_azimuth=surface_azimuth, apparent_zenith=solar_pos['apparent_zenith'], azimuth=solar_pos['azimuth'], dni=dni_arr, ghi=self.hour_df['GHI'], dhi=self.hour_df['dHI']) irrad_df['aoi'] = aoi(surface_tilt, surface_azimuth, solar_pos['zenith'], solar_pos['azimuth']) irrad_df['DNI'] = dni_arr n_df = pd.concat([self.hour_df, irrad_df], axis=1) if include_solar_pos: n_df = pd.concat([n_df, solar_pos], axis=1) return n_df
def single_axis_irr(self, include_track_angles=False): """ Calculate the irradiances incident on a surface that mounted on an ideal single-axis tracker :param include_track_angles: whether to include the tracker angles into the returned dataframe. :return: a dataframe of sun irradiances on the single-axis tracked surface. """ tracker = SingleAxisTracker(axis_tilt=0, axis_azimuth=0, max_angle=180, backtrack=False) ngo = Location(latitude=self.latitude, longitude=self.longitude, altitude=0, tz='Japan') solar_pos = ngo.get_solarposition( pd.DatetimeIndex(self.hour_df['avg_time'])) tracker_angle = tracker.singleaxis( apparent_azimuth=solar_pos['azimuth'], apparent_zenith=solar_pos['apparent_zenith']) dni_arr = self.get_DNI() irr = tracker.get_irradiance( dni=dni_arr, ghi=self.hour_df['GHI'], dhi=self.hour_df['dHI'], solar_zenith=solar_pos['apparent_zenith'], solar_azimuth=solar_pos['azimuth'], surface_tilt=tracker_angle['surface_tilt'], surface_azimuth=tracker_angle['surface_azimuth']) irr['DNI'] = dni_arr n_df = pd.concat([self.hour_df, irr], axis=1) if include_track_angles == True: n_df = pd.concat([n_df, tracker_angle], axis=1) return n_df
def test_get_irradiance(): system = tracking.SingleAxisTracker(max_angle=90, axis_tilt=30, axis_azimuth=180, gcr=2.0 / 7.0, backtrack=True) times = pd.date_range(start='20160101 1200-0700', end='20160101 1800-0700', freq='6H') location = Location(latitude=32, longitude=-111) solar_position = location.get_solarposition(times) irrads = pd.DataFrame({ 'dni': [900, 0], 'ghi': [600, 0], 'dhi': [100, 0] }, index=times) solar_zenith = solar_position['apparent_zenith'] solar_azimuth = solar_position['azimuth'] # invalid warnings already generated in horizon test above, # no need to clutter test output here with np.errstate(invalid='ignore'): tracker_data = system.singleaxis(solar_zenith, solar_azimuth) # some invalid values in irradiance.py. not our problem here with np.errstate(invalid='ignore'): irradiance = system.get_irradiance(tracker_data['surface_tilt'], tracker_data['surface_azimuth'], solar_zenith, solar_azimuth, irrads['dni'], irrads['ghi'], irrads['dhi']) expected = pd.DataFrame(data=np.array( [[961.80070, 815.94490, 145.85580, 135.32820, 10.52757492], [nan, nan, nan, nan, nan]]), columns=[ 'poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse', 'poa_ground_diffuse' ], index=times) assert_frame_equal(irradiance, expected, check_less_precise=2)
def test_get_irradiance(): system = tracking.SingleAxisTracker(max_angle=90, axis_tilt=30, axis_azimuth=180, gcr=2.0 / 7.0, backtrack=True) times = pd.DatetimeIndex(start='20160101 1200-0700', end='20160101 1800-0700', freq='6H') location = Location(latitude=32, longitude=-111) solar_position = location.get_solarposition(times) irrads = pd.DataFrame({ 'dni': [900, 0], 'ghi': [600, 0], 'dhi': [100, 0] }, index=times) solar_zenith = solar_position['apparent_zenith'] solar_azimuth = solar_position['azimuth'] tracker_data = system.singleaxis(solar_zenith, solar_azimuth) irradiance = system.get_irradiance( irrads['dni'], irrads['ghi'], irrads['dhi'], solar_zenith=solar_zenith, solar_azimuth=solar_azimuth, surface_tilt=tracker_data['surface_tilt'], surface_azimuth=tracker_data['surface_azimuth']) expected = pd.DataFrame(data=np.array( [[142.71652464, 87.50125991, 55.21526473, 44.68768982, 10.52757492], [nan, nan, nan, nan, nan]]), columns=[ 'poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse', 'poa_ground_diffuse' ], index=times) irradiance = np.round(irradiance, 4) expected = np.round(expected, 4) assert_frame_equal(irradiance, expected)
def test_get_irradiance(): system = tracking.SingleAxisTracker(max_angle=90, axis_tilt=30, axis_azimuth=180, gcr=2.0/7.0, backtrack=True) times = pd.date_range(start='20160101 1200-0700', end='20160101 1800-0700', freq='6H') location = Location(latitude=32, longitude=-111) solar_position = location.get_solarposition(times) irrads = pd.DataFrame({'dni': [900, 0], 'ghi': [600, 0], 'dhi': [100, 0]}, index=times) solar_zenith = solar_position['apparent_zenith'] solar_azimuth = solar_position['azimuth'] # invalid warnings already generated in horizon test above, # no need to clutter test output here with np.errstate(invalid='ignore'): tracker_data = system.singleaxis(solar_zenith, solar_azimuth) # some invalid values in irradiance.py. not our problem here with np.errstate(invalid='ignore'): irradiance = system.get_irradiance(tracker_data['surface_tilt'], tracker_data['surface_azimuth'], solar_zenith, solar_azimuth, irrads['dni'], irrads['ghi'], irrads['dhi']) expected = pd.DataFrame(data=np.array( [[961.80070, 815.94490, 145.85580, 135.32820, 10.52757492], [nan, nan, nan, nan, nan]]), columns=['poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse', 'poa_ground_diffuse'], index=times) assert_frame_equal(irradiance, expected, check_less_precise=2)
def matlab2datetime(matlab_datenum): day = dt.datetime.fromordinal(int(matlab_datenum)) dayfrac = dt.timedelta(days=matlab_datenum%1) - dt.timedelta(days = 366) return day + dayfrac + dt.timedelta(hours=8) time = [matlab2datetime(tval) for tval in mat['time_day']] # plot ghi to check ghi=pd.DataFrame(GHI,time,columns=['ghi']) ax=ghi.plot() ax.set_xlabel('Time [UTC]]') ax.set_ylabel('GHI [W/m²]') '''separate dhi and dni from ghi''' sd = Location(32.883729,-117.239341) sza=sd.get_solarposition(time).zenith # zenith angle out=pvlib.irradiance.erbs(GHI,sza,pd.DatetimeIndex(time)) #using erbs method # plot ghi dni dhi irr=pd.concat([ghi,out['dni'],out['dhi']],axis=1) #concatenate irradiances ax3=irr.plot(title='Erbs method') ax3.set_xlabel('Time [UTC]') ax3.set_ylabel('Irradiance [W/m²]') '''convert irradiance to power''' #load modules and inverters sandia_modules = pvlib.pvsystem.retrieve_sam('SandiaMod') cec_inverters = pvlib.pvsystem.retrieve_sam('cecinverter') # sma_cols = [col for col in cec_inverters.columns if 'SMA' in col] # use to look for strings # Equipment at EBU2:
class ForecastModel(object): """ An object for querying and holding forecast model information for use within the pvlib library. Simplifies use of siphon library on a THREDDS server. Parameters ---------- model_type: string UNIDATA category in which the model is located. model_name: string Name of the UNIDATA forecast model. set_type: string Model dataset type. Attributes ---------- access_url: string URL specifying the dataset from data will be retrieved. base_tds_url : string The top level server address catalog_url : string The url path of the catalog to parse. data: pd.DataFrame Data returned from the query. data_format: string Format of the forecast data being requested from UNIDATA. dataset: Dataset Object containing information used to access forecast data. dataframe_variables: list Model variables that are present in the data. datasets_list: list List of all available datasets. fm_models: Dataset TDSCatalog object containing all available forecast models from UNIDATA. fm_models_list: list List of all available forecast models from UNIDATA. latitude: list A list of floats containing latitude values. location: Location A pvlib Location object containing geographic quantities. longitude: list A list of floats containing longitude values. lbox: boolean Indicates the use of a location bounding box. ncss: NCSS object NCSS model_name: string Name of the UNIDATA forecast model. model: Dataset A dictionary of Dataset object, whose keys are the name of the dataset's name. model_url: string The url path of the dataset to parse. modelvariables: list Common variable names that correspond to queryvariables. query: NCSS query object NCSS object used to complete the forecast data retrival. queryvariables: list Variables that are used to query the THREDDS Data Server. time: DatetimeIndex Time range. variables: dict Defines the variables to obtain from the weather model and how they should be renamed to common variable names. units: dict Dictionary containing the units of the standard variables and the model specific variables. vert_level: float or integer Vertical altitude for query data. """ access_url_key = 'NetcdfSubset' catalog_url = 'https://thredds.ucar.edu/thredds/catalog.xml' base_tds_url = catalog_url.split('/thredds/')[0] data_format = 'netcdf' units = { 'temp_air': 'C', 'wind_speed': 'm/s', 'ghi': 'W/m^2', 'ghi_raw': 'W/m^2', 'dni': 'W/m^2', 'dhi': 'W/m^2', 'total_clouds': '%', 'low_clouds': '%', 'mid_clouds': '%', 'high_clouds': '%' } def __init__(self, model_type, model_name, set_type, vert_level=None): self.model_type = model_type self.model_name = model_name self.set_type = set_type self.connected = False self.vert_level = vert_level def connect_to_catalog(self): self.catalog = TDSCatalog(self.catalog_url) self.fm_models = TDSCatalog( self.catalog.catalog_refs[self.model_type].href) self.fm_models_list = sorted(list(self.fm_models.catalog_refs.keys())) try: model_url = self.fm_models.catalog_refs[self.model_name].href except ParseError: raise ParseError(self.model_name + ' model may be unavailable.') try: self.model = TDSCatalog(model_url) except HTTPError: try: self.model = TDSCatalog(model_url) except HTTPError: raise HTTPError(self.model_name + ' model may be unavailable.') self.datasets_list = list(self.model.datasets.keys()) self.set_dataset() self.connected = True def __repr__(self): return '{}, {}'.format(self.model_name, self.set_type) def set_dataset(self): ''' Retrieves the designated dataset, creates NCSS object, and creates a NCSS query object. ''' keys = list(self.model.datasets.keys()) labels = [item.split()[0].lower() for item in keys] if self.set_type == 'best': self.dataset = self.model.datasets[keys[labels.index('best')]] elif self.set_type == 'latest': self.dataset = self.model.datasets[keys[labels.index('latest')]] elif self.set_type == 'full': self.dataset = self.model.datasets[keys[labels.index('full')]] self.access_url = self.dataset.access_urls[self.access_url_key] self.ncss = NCSS(self.access_url) self.query = self.ncss.query() def set_query_time_range(self, start, end): """ Parameters ---------- start : datetime.datetime, pandas.Timestamp Must be tz-localized. end : datetime.datetime, pandas.Timestamp Must be tz-localized. Notes ----- Assigns ``self.start``, ``self.end``. Modifies ``self.query`` """ self.start = pd.Timestamp(start) self.end = pd.Timestamp(end) if self.start.tz is None or self.end.tz is None: raise TypeError('start and end must be tz-localized') self.query.time_range(self.start, self.end) def set_query_latlon(self): ''' Sets the NCSS query location latitude and longitude. ''' if (isinstance(self.longitude, list) and isinstance(self.latitude, list)): self.lbox = True # west, east, south, north self.query.lonlat_box(self.longitude[0], self.longitude[1], self.latitude[0], self.latitude[1]) else: self.lbox = False self.query.lonlat_point(self.longitude, self.latitude) def set_location(self, tz, latitude, longitude): ''' Sets the location for the query. Parameters ---------- tz: tzinfo Timezone of the query latitude: float Latitude of the query longitude: float Longitude of the query Notes ----- Assigns ``self.location``. ''' self.location = Location(latitude, longitude, tz=tz) def get_data(self, latitude, longitude, start, end, vert_level=None, query_variables=None, close_netcdf_data=True, **kwargs): """ Submits a query to the UNIDATA servers using Siphon NCSS and converts the netcdf data to a pandas DataFrame. Parameters ---------- latitude: float The latitude value. longitude: float The longitude value. start: datetime or timestamp The start time. end: datetime or timestamp The end time. vert_level: None, float or integer, default None Vertical altitude of interest. query_variables: None or list, default None If None, uses self.variables. close_netcdf_data: bool, default True Controls if the temporary netcdf data file should be closed. Set to False to access the raw data. **kwargs: Additional keyword arguments are silently ignored. Returns ------- forecast_data : DataFrame column names are the weather model's variable names. """ if not self.connected: self.connect_to_catalog() if vert_level is not None: self.vert_level = vert_level if query_variables is None: self.query_variables = list(self.variables.values()) else: self.query_variables = query_variables self.set_query_time_range(start, end) self.latitude = latitude self.longitude = longitude self.set_query_latlon() # modifies self.query self.set_location(self.start.tz, latitude, longitude) if self.vert_level is not None: self.query.vertical_level(self.vert_level) self.query.variables(*self.query_variables) self.query.accept(self.data_format) self.netcdf_data = self.ncss.get_data(self.query) # might be better to go to xarray here so that we can handle # higher dimensional data for more advanced applications self.data = self._netcdf2pandas(self.netcdf_data, self.query_variables, self.start, self.end) if close_netcdf_data: self.netcdf_data.close() return self.data def process_data(self, data, **kwargs): """ Defines the steps needed to convert raw forecast data into processed forecast data. Most forecast models implement their own version of this method which also call this one. Parameters ---------- data: DataFrame Raw forecast data Returns ------- data: DataFrame Processed forecast data. """ data = self.rename(data) return data def get_processed_data(self, *args, **kwargs): """ Get and process forecast data. Parameters ---------- *args: positional arguments Passed to get_data **kwargs: keyword arguments Passed to get_data and process_data Returns ------- data: DataFrame Processed forecast data """ return self.process_data(self.get_data(*args, **kwargs), **kwargs) def rename(self, data, variables=None): """ Renames the columns according the variable mapping. Parameters ---------- data: DataFrame variables: None or dict, default None If None, uses self.variables Returns ------- data: DataFrame Renamed data. """ if variables is None: variables = self.variables return data.rename(columns={y: x for x, y in variables.items()}) def _netcdf2pandas(self, netcdf_data, query_variables, start, end): """ Transforms data from netcdf to pandas DataFrame. Parameters ---------- data: netcdf Data returned from UNIDATA NCSS query. query_variables: list The variables requested. start: Timestamp The start time end: Timestamp The end time Returns ------- pd.DataFrame """ # set self.time try: time_var = 'time' self.set_time(netcdf_data.variables[time_var]) except KeyError: # which model does this dumb thing? time_var = 'time1' self.set_time(netcdf_data.variables[time_var]) data_dict = {} for key, data in netcdf_data.variables.items(): # if accounts for possibility of extra variable returned if key not in query_variables: continue squeezed = data[:].squeeze() if squeezed.ndim == 1: data_dict[key] = squeezed elif squeezed.ndim == 2: for num, data_level in enumerate(squeezed.T): data_dict[key + '_' + str(num)] = data_level else: raise ValueError('cannot parse ndim > 2') data = pd.DataFrame(data_dict, index=self.time) # sometimes data is returned as hours since T0 # where T0 is before start. Then the hours between # T0 and start are added *after* end. So sort and slice # to remove the garbage data = data.sort_index().loc[start:end] return data def set_time(self, time): ''' Converts time data into a pandas date object. Parameters ---------- time: netcdf Contains time information. Returns ------- pandas.DatetimeIndex ''' times = num2date(time[:].squeeze(), time.units, only_use_cftime_datetimes=False, only_use_python_datetimes=True) self.time = pd.DatetimeIndex(pd.Series(times), tz=self.location.tz) def cloud_cover_to_ghi_linear(self, cloud_cover, ghi_clear, offset=35, **kwargs): """ Convert cloud cover to GHI using a linear relationship. 0% cloud cover returns ghi_clear. 100% cloud cover returns offset*ghi_clear. Parameters ---------- cloud_cover: numeric Cloud cover in %. ghi_clear: numeric GHI under clear sky conditions. offset: numeric, default 35 Determines the minimum GHI. kwargs Not used. Returns ------- ghi: numeric Estimated GHI. References ---------- Larson et. al. "Day-ahead forecasting of solar power output from photovoltaic plants in the American Southwest" Renewable Energy 91, 11-20 (2016). """ offset = offset / 100. cloud_cover = cloud_cover / 100. ghi = (offset + (1 - offset) * (1 - cloud_cover)) * ghi_clear return ghi def cloud_cover_to_irradiance_clearsky_scaling(self, cloud_cover, method='linear', **kwargs): """ Estimates irradiance from cloud cover in the following steps: 1. Determine clear sky GHI using Ineichen model and climatological turbidity. 2. Estimate cloudy sky GHI using a function of cloud_cover e.g. :py:meth:`~ForecastModel.cloud_cover_to_ghi_linear` 3. Estimate cloudy sky DNI using the DISC model. 4. Calculate DHI from DNI and GHI. Parameters ---------- cloud_cover : Series Cloud cover in %. method : str, default 'linear' Method for converting cloud cover to GHI. 'linear' is currently the only option. **kwargs Passed to the method that does the conversion Returns ------- irrads : DataFrame Estimated GHI, DNI, and DHI. """ solpos = self.location.get_solarposition(cloud_cover.index) cs = self.location.get_clearsky(cloud_cover.index, model='ineichen', solar_position=solpos) method = method.lower() if method == 'linear': ghi = self.cloud_cover_to_ghi_linear(cloud_cover, cs['ghi'], **kwargs) else: raise ValueError('invalid method argument') dni = disc(ghi, solpos['zenith'], cloud_cover.index)['dni'] dhi = ghi - dni * np.cos(np.radians(solpos['zenith'])) irrads = pd.DataFrame({'ghi': ghi, 'dni': dni, 'dhi': dhi}).fillna(0) return irrads def cloud_cover_to_transmittance_linear(self, cloud_cover, offset=0.75, **kwargs): """ Convert cloud cover to atmospheric transmittance using a linear model. 0% cloud cover returns offset. 100% cloud cover returns 0. Parameters ---------- cloud_cover : numeric Cloud cover in %. offset : numeric, default 0.75 Determines the maximum transmittance. kwargs Not used. Returns ------- ghi : numeric Estimated GHI. """ transmittance = ((100.0 - cloud_cover) / 100.0) * offset return transmittance def cloud_cover_to_irradiance_liujordan(self, cloud_cover, **kwargs): """ Estimates irradiance from cloud cover in the following steps: 1. Determine transmittance using a function of cloud cover e.g. :py:meth:`~ForecastModel.cloud_cover_to_transmittance_linear` 2. Calculate GHI, DNI, DHI using the :py:func:`pvlib.irradiance.liujordan` model Parameters ---------- cloud_cover : Series Returns ------- irradiance : DataFrame Columns include ghi, dni, dhi """ # in principle, get_solarposition could use the forecast # pressure, temp, etc., but the cloud cover forecast is not # accurate enough to justify using these minor corrections solar_position = self.location.get_solarposition(cloud_cover.index) dni_extra = get_extra_radiation(cloud_cover.index) airmass = self.location.get_airmass(cloud_cover.index) transmittance = self.cloud_cover_to_transmittance_linear( cloud_cover, **kwargs) irrads = liujordan(solar_position['apparent_zenith'], transmittance, airmass['airmass_absolute'], dni_extra=dni_extra) irrads = irrads.fillna(0) return irrads def cloud_cover_to_irradiance(self, cloud_cover, how='clearsky_scaling', **kwargs): """ Convert cloud cover to irradiance. A wrapper method. Parameters ---------- cloud_cover : Series how : str, default 'clearsky_scaling' Selects the method for conversion. Can be one of clearsky_scaling or liujordan. **kwargs Passed to the selected method. Returns ------- irradiance : DataFrame Columns include ghi, dni, dhi """ how = how.lower() if how == 'clearsky_scaling': irrads = self.cloud_cover_to_irradiance_clearsky_scaling( cloud_cover, **kwargs) elif how == 'liujordan': irrads = self.cloud_cover_to_irradiance_liujordan( cloud_cover, **kwargs) else: raise ValueError('invalid how argument') return irrads def kelvin_to_celsius(self, temperature): """ Converts Kelvin to celsius. Parameters ---------- temperature: numeric Returns ------- temperature: numeric """ return temperature - 273.15 def isobaric_to_ambient_temperature(self, data): """ Calculates temperature from isobaric temperature. Parameters ---------- data: DataFrame Must contain columns pressure, temperature_iso, temperature_dew_iso. Input temperature in K. Returns ------- temperature : Series Temperature in K """ P = data['pressure'] / 100.0 # noqa: N806 Tiso = data['temperature_iso'] # noqa: N806 Td = data['temperature_dew_iso'] - 273.15 # noqa: N806 # saturation water vapor pressure e = 6.11 * 10**((7.5 * Td) / (Td + 273.3)) # saturation water vapor mixing ratio w = 0.622 * (e / (P - e)) temperature = Tiso - ((2.501 * 10.**6) / 1005.7) * w return temperature def uv_to_speed(self, data): """ Computes wind speed from wind components. Parameters ---------- data : DataFrame Must contain the columns 'wind_speed_u' and 'wind_speed_v'. Returns ------- wind_speed : Series """ wind_speed = np.sqrt(data['wind_speed_u']**2 + data['wind_speed_v']**2) return wind_speed def gust_to_speed(self, data, scaling=1 / 1.4): """ Computes standard wind speed from gust. Very approximate and location dependent. Parameters ---------- data : DataFrame Must contain the column 'wind_speed_gust'. Returns ------- wind_speed : Series """ wind_speed = data['wind_speed_gust'] * scaling return wind_speed
class PVModel(Forecast): """Model PV output based on irradiance or cloud coverage data""" def __init__(self, config, section='PVSystem'): """Initialize PVModel config configparser object with section [<section>] <section> defaults to 'PVSystem'""" try: self._pvversion = pvlib.__version__ if self._pvversion > '0.8.1': print("Warning --- pvmodel not tested with pvlib > 0.8.1") elif self._pvversion < '0.8.0': raise Exception("ERROR --- require pvlib >= 0.8.1") super().__init__() self.config = config self._cfg = section self.config['DEFAULT'][ 'NominalEfficiency'] = '0.96' # nominal inverter efficiency, default of pvwatts model self.config['DEFAULT'][ 'TemperatureCoeff'] = '-0.005' # temperature coefficient of module, default of pvwatts model self.config['DEFAULT'][ 'TemperatureModel'] = 'open_rack_glass_glass' # https://pvlib-python.readthedocs.io/en/stable/generated/pvlib.temperature.sapm_cell.html self.config['DEFAULT'][ 'Altitude'] = '0' # default altitude sea level self.config['DEFAULT'][ 'Model'] = 'CEC' # default PV modeling stratey self._weatherFields = { 'dwd': { 'temp_air': 'TTT', # translation: DWD parameter names --> pvlib parameter names 'wind_speed': 'FF', # Note that temp_air and temp_dew are in Celsius, TTT in Kelvin 'pressure': 'PPPP', 'temp_dew': 'Td', 'clouds': 'N' }, 'owm': { 'temp_air': 'temp', # translation: OWM parameter names --> pvlib parameter names 'wind_speed': 'wind_speed', # Note that temp_air and temp_dew are in Celsius, TTT in Kelvin 'pressure': 'pressure', 'temp_dew': 'dew_point', 'clouds': 'clouds' } } self._weatherFields['dwd_s'] = self._weatherFields['dwd'] self._allow_experimental = self.config[self._cfg].getboolean( 'experimental', False) # needs modification of pvlib.irradiance.erbs() self._location = Location( latitude=self.config[self._cfg].getfloat('Latitude'), longitude=self.config[self._cfg].getfloat('Longitude'), altitude=self.config[self._cfg].getfloat('Altitude'), tz='UTC') # let's stay in UTC for the entire time ... self._pvsystem = None # PV system, once defined with init_CEC() or init_PVWatts() self._mc = None # Model chain, once defined in init_CEC() or init_PVWatts() self._weather = None # weather data used for getIrradiance() and runModel() self._cloud_cover_param = None # weather data parameter used for cloud coverage (see _weatherFields) self.irradiance_model = None # model name if irradiance data calculated in getIrradiance() self.irradiance = None # calculated irradiance data self.pv_model = None # CEC or PVWatts once solar system is defined self.SQLTable = self._cfg.lower( ) # which SQL table name is this data stored to (see DBRepository.loadData()) if (self.config[self._cfg].get('Model') == 'CEC'): self._init_CEC() else: self._init_PVWatts() except Exception as e: print("pvmodel __init__: " + str(e)) sys.exit(1) def _init_CEC(self): """Configure PV system based on actual components available in pvlib CEC database""" try: moduleName = self.config[self._cfg].get('ModuleName') inverterName = self.config[self._cfg].get('InverterName') tempModel = self.config[self._cfg].get('TemperatureModel') self._pvsystem = PVSystem( surface_tilt=self.config[self._cfg].getfloat('Tilt'), surface_azimuth=self.config[self._cfg].getfloat('Azimuth'), module_parameters=pvlib.pvsystem.retrieve_sam( 'cecmod')[moduleName], inverter_parameters=pvlib.pvsystem.retrieve_sam( 'cecinverter')[inverterName], strings_per_inverter=self.config[self._cfg].getint( 'NumStrings'), modules_per_string=self.config[self._cfg].getint('NumPanels'), temperature_model_parameters=TEMPERATURE_MODEL_PARAMETERS[ 'sapm'][tempModel]) self._mc = ModelChain(self._pvsystem, self._location, aoi_model='physical', spectral_model='no_loss') self.pv_model = 'CEC' except Exception as e: print("init_CEC: " + str(e)) sys.exit(1) def _init_PVWatts(self): """Configure PV system using simplified PVWatts model""" try: pvwatts_module = { 'pdc0': self.config[self._cfg].getfloat('SystemPower'), 'gamma_pdc': self.config[self._cfg].getfloat('TemperatureCoeff') } pvwatts_inverter = { 'pdc0': self.config[self._cfg].getfloat('InverterPower'), 'eta_inv_nom': self.config[self._cfg].getfloat('NominalEfficiency') } pvwatts_losses = { 'soiling': 0, 'shading': 0, 'snow': 0, 'mismatch': 0, 'wiring': 2, 'connections': 0.5, 'lid': 0, 'nameplate_rating': 0, 'age': 0, 'availability': 0 } tempModel = self.config.get(self._cfg, 'TemperatureModel') self._pvsystem = PVSystem( surface_tilt=self.config[self._cfg].getfloat('Tilt'), surface_azimuth=self.config[self._cfg].getfloat('Azimuth'), module_parameters=pvwatts_module, inverter_parameters=pvwatts_inverter, losses_parameters=pvwatts_losses, temperature_model_parameters=TEMPERATURE_MODEL_PARAMETERS[ 'sapm'][tempModel]) self._mc = ModelChain.with_pvwatts(self._pvsystem, self._location, dc_model='pvwatts', ac_model='pvwatts', aoi_model='physical', spectral_model='no_loss') self.pv_model = 'PVWatts' except Exception as e: print("init_PVWatts: " + str(e)) sys.exit(1) def getIrradiance(self, weather: Forecast, model='disc'): """Get irradiance data from weather files (see DWDForecast()) using various models weather object eg. created by DWDForecast.weatherData must contain: weatherData.DataTable, weatherData.IssueTime model one of: 'disc', 'dirint', 'dirindex', 'erbs' (GHI decomposition models) 'erbs_kt' (as 'erbs', but with kt as input parameter; this needs a minor modification to pvlib.irradiance.erbs) 'campbell_norman', 'clearsky_scaling' (cloud coverage to irradiance) 'clearsky' (clear sky model) cloud_cover_param name of cloud cover parameter in weather""" try: try: # if weather is a Forecast object, this will work weatherData = weather.DataTable # extract weather data table from weather self.IssueTime = weather.IssueTime f = self._weatherFields[ weather. SQLTable] # field mapping dicionary weather --> pvlib if (weather.SQLTable == 'dwd_s' ): # we want be able to distinguish MOSMIX_L and _S data self.SQLTable = self._cfg.lower() + '_s' except AttributeError: # we only have weather data ... let's try anywah weatherData = weather f = {} for col in weatherData.columns: f[col] = col if ('Rad1h' not in weatherData and model not in [ 'clearsky', 'clearsky_scaling', 'campbell_norman' ]): raise Exception( 'ERROR --- weather does not include irradiation data, use cloud based models instead of = ' + model) elif (model not in ['clearsky', 'clearsky_scaling', 'campbell_norman']): ghi = np.array( weatherData['Rad1h'] * 0.2777778) # convert to Rad1h [kJ/m^2] to Wh/m^2 if (model not in ['clearsky', 'clearsky_scaling', 'campbell_norman' ]): # we don't need this for cloud models ... solar_position = self._location.get_solarposition( times=weatherData.index, pressure=weatherData[f['pressure']], temperature=weatherData[f['temp_air']] - 273.15) cosSZA = np.cos(solar_position['zenith'] * np.pi / 180) if (model == 'disc' or model == 'dirint' or model == 'dirindex'): if (model == 'disc'): disc = pvlib.irradiance.disc( ghi= ghi, # returns dataframe with columns = ['dni', 'kt', 'airmass'] solar_zenith=solar_position['zenith'], datetime_or_doy=weatherData.index, pressure=weatherData[f['pressure']]) dni = np.array(disc['dni']) kt = np.array(disc['kt']) elif (model == 'dirint'): dni = pvlib.irradiance.dirint( ghi=ghi, # returns array solar_zenith=solar_position['zenith'], times=weatherData.index, temp_dew=weatherData[f['temp_dew']] - 273.15) else: clearsky = self._location.get_clearsky( weatherData. index, # calculate clearsky ghi, dni, dhi for times model='ineichen') dni = pvlib.irradiance.dirindex( ghi=ghi, # returns array ghi_clearsky=clearsky['ghi'], dni_clearsky=clearsky['dni'], zenith=solar_position['zenith'], times=weatherData.index, pressure=weatherData[f['pressure']], temp_dew=weatherData[f['temp_dew']] - 273.15) dhi = ghi - dni * cosSZA elif (model == 'erbs' or model == 'erbs_kt'): if (model == 'erbs'): erbs = pvlib.irradiance.erbs( ghi= ghi, # returns dataframe with columns ['dni', 'dhi', 'kt'] zenith=solar_position['zenith'], datetime_or_doy=weatherData.index) else: # 'erbs_kt', needs modification in pvlib.irradiance.erbs try: erbs = pvlib.irradiance.erbs( ghi= ghi, # returns dataframe with columns ['dni', 'dhi', 'kt'] zenith=solar_position['zenith'], datetime_or_doy=weatherData.index, kt=weatherData['RRad1'] / 100) # = kt, in range 0 .. 100 except Exception as e: print( "getIrradiance: ERROR --- erbs_kt needs modification to pvlib.irradiance.erbs()" ) sys.exit(1) dni = np.array(erbs['dni']) dhi = np.array(erbs['dhi']) kt = np.array(erbs['kt']) elif (model == 'clearsky'): clearsky_model = self.config[self._cfg].get( 'clearsky_model', 'simplified_solis') self.irradiance = self._location.get_clearsky( weatherData. index, # calculate clearsky ghi, dni, dhi for clearsky model=clearsky_model) elif (model == 'clearsky_scaling' or model == 'campbell_norman'): if model == 'campbell_norman' and self._pvversion == '0.8.0': raise Exception( "ERROR --- cloud based irradiance model 'campbell_norman' only supported in pvlib 0.8.1 and higher" ) fcModel = ForecastModel( 'dummy', 'dummy', 'dummy') # only needed to call methods below fcModel.set_location(latitude=self._location.latitude, longitude=self._location.longitude, tz=self._location.tz) self.irradiance = fcModel.cloud_cover_to_irradiance( weatherData[f['clouds']], how=model) else: raise Exception( "ERROR --- incorrect irradiance model called: " + model) except Exception as e: print("getIrradiance: " + str(e)) sys.exit(1) if (model != 'clearsky' and model != 'clearsky_scaling' and model != 'campbell_norman'): self.irradiance = pd.DataFrame(data=[ghi, dni, dhi]).T self.irradiance.index = weatherData.index self.irradiance.columns = ['ghi', 'dni', 'dhi'] if (model == 'disc' or model == 'erbs'): self.irradiance['kt'] = kt self.irradiance_model = model try: self.irradiance = pd.concat([ weatherData[f['temp_air']] - 273.15, weatherData[f['wind_speed']], self.irradiance ], axis=1) self.irradiance.rename(columns={ f['temp_air']: 'temp_air', f['wind_speed']: 'wind_speed' }, inplace=True) self._cloud_cover_param = f['clouds'] except: pass def runModel(self, weather: Forecast, model, modelLst='all'): """Run one PV simulation model (named in self.pv_model, set in getIrradiance()) Weather data is inherited from prior call to getIrradiance() call Populates self.sim_result pandas dataframe with simulation results""" if modelLst is not None: if modelLst != 'all' and model != modelLst: # we have an explict list of models to calculate modelLst = modelLst.replace(" ", "") models = modelLst.split(",") if model not in models: # request was for something else ... return None try: self.getIrradiance(weather, model) self._mc.run_model(self.irradiance) cols = ['ghi', 'dni', 'dhi'] if 'kt' in self.irradiance: cols.append('kt') if (self.pv_model == 'PVWatts'): self.DataTable = pd.concat( [self._mc.dc, self._mc.ac, self.irradiance[cols]], axis=1) else: # CEC self.DataTable = pd.concat( [self._mc.dc.p_mp, self._mc.ac, self.irradiance[cols]], axis=1) m = self.irradiance_model if (m == 'clearsky_scaling' or m == 'campbell_norman'): m = m + '_' + self._cloud_cover_param if (m == 'disc' or m == 'erbs'): self.DataTable.columns = [ 'dc_' + m, 'ac_' + m, 'ghi_' + m, 'dni_' + m, 'dhi_' + m, 'kt_' + m ] else: self.DataTable.columns = [ 'dc_' + m, 'ac_' + m, 'ghi_' + m, 'dni_' + m, 'dhi_' + m ] self.InfluxFields.append('dc_' + m) return self.DataTable except Exception as e: print("runModel: " + str(e)) sys.exit(1) def run_allModels(self, weather: Forecast, modelLst='all'): """Run all implemented models (default). Alternatively, 'modelLst' can contain a comma separated list of valid models (see self.runModel()) to be calculated Populates self.DataTable pandas dataframe with all simulation results""" dfList = [] # list of calculated models if ('Rad1h' in weather.DataTable): # ---- irrandiance based models dfList.append(self.runModel(weather, 'disc', modelLst)) dfList.append(self.runModel(weather, 'dirint', modelLst)) dfList.append(self.runModel(weather, 'dirindex', modelLst)) dfList.append(self.runModel(weather, 'erbs', modelLst)) if 'RRad1' in weather.DataTable and self._allow_experimental: dfList.append(self.runModel(weather, 'erbs_kt', modelLst)) dfList.append(self.runModel(weather, 'clearsky_scaling', modelLst)) # ---- cloud based models if self._pvversion >= '0.8.1': # deprecated model 'liujordan' not implemented dfList.append(self.runModel(weather, 'campbell_norman', modelLst)) dfList.append(self.runModel(weather, 'clearsky', modelLst)) dfList.append( self._mc.solar_position.zenith) # ---- add solar position self.DataTable = pd.concat(dfList, axis=1) drop = [] haveGHI = False for col in self.DataTable: if 'ghi' in col and not (col.startswith('ghi_clearsky') or col.startswith('ghi_campbell')): if not haveGHI: self.DataTable.rename( columns={col: 'ghi'}, inplace=True ) # rename first GHI field as GHI, since this is input and hence same for all models haveGHI = True else: drop.append(col) elif col == 'kt_erbs_kt': drop.append( col) # redundant as kt is input to (experimental) erbs_kt if (len(drop) > 0): self.DataTable = self.DataTable.drop(drop, axis=1) def writeCSV(self, csvName): # write self.weatherData to .csv file """Store simulated PV power in .csv file""" path = self.config[self._cfg].get('storePath') fName = re.sub(r'\.kml$', '_sim.csv.gz', csvName) self.DataTable.to_csv(path + "/" + fName, compression='gzip') return ()
class ForecastModel(object): """ An object for querying and holding forecast model information for use within the pvlib library. Simplifies use of siphon library on a THREDDS server. Parameters ---------- model_type: string UNIDATA category in which the model is located. model_name: string Name of the UNIDATA forecast model. set_type: string Model dataset type. Attributes ---------- access_url: string URL specifying the dataset from data will be retrieved. base_tds_url : string The top level server address catalog_url : string The url path of the catalog to parse. data: pd.DataFrame Data returned from the query. data_format: string Format of the forecast data being requested from UNIDATA. dataset: Dataset Object containing information used to access forecast data. dataframe_variables: list Model variables that are present in the data. datasets_list: list List of all available datasets. fm_models: Dataset TDSCatalog object containing all available forecast models from UNIDATA. fm_models_list: list List of all available forecast models from UNIDATA. latitude: list A list of floats containing latitude values. location: Location A pvlib Location object containing geographic quantities. longitude: list A list of floats containing longitude values. lbox: boolean Indicates the use of a location bounding box. ncss: NCSS object NCSS model_name: string Name of the UNIDATA forecast model. model: Dataset A dictionary of Dataset object, whose keys are the name of the dataset's name. model_url: string The url path of the dataset to parse. modelvariables: list Common variable names that correspond to queryvariables. query: NCSS query object NCSS object used to complete the forecast data retrival. queryvariables: list Variables that are used to query the THREDDS Data Server. time: DatetimeIndex Time range. variables: dict Defines the variables to obtain from the weather model and how they should be renamed to common variable names. units: dict Dictionary containing the units of the standard variables and the model specific variables. vert_level: float or integer Vertical altitude for query data. """ access_url_key = 'NetcdfSubset' catalog_url = 'http://thredds.ucar.edu/thredds/catalog.xml' base_tds_url = catalog_url.split('/thredds/')[0] data_format = 'netcdf' vert_level = 100000 units = { 'temp_air': 'C', 'wind_speed': 'm/s', 'ghi': 'W/m^2', 'ghi_raw': 'W/m^2', 'dni': 'W/m^2', 'dhi': 'W/m^2', 'total_clouds': '%', 'low_clouds': '%', 'mid_clouds': '%', 'high_clouds': '%'} def __init__(self, model_type, model_name, set_type): self.model_type = model_type self.model_name = model_name self.set_type = set_type self.catalog = TDSCatalog(self.catalog_url) self.fm_models = TDSCatalog(self.catalog.catalog_refs[model_type].href) self.fm_models_list = sorted(list(self.fm_models.catalog_refs.keys())) try: model_url = self.fm_models.catalog_refs[model_name].href except ParseError: raise ParseError(self.model_name + ' model may be unavailable.') try: self.model = TDSCatalog(model_url) except HTTPError: try: self.model = TDSCatalog(model_url) except HTTPError: raise HTTPError(self.model_name + ' model may be unavailable.') self.datasets_list = list(self.model.datasets.keys()) self.set_dataset() def __repr__(self): return '{}, {}'.format(self.model_name, self.set_type) def set_dataset(self): ''' Retrieves the designated dataset, creates NCSS object, and creates a NCSS query object. ''' keys = list(self.model.datasets.keys()) labels = [item.split()[0].lower() for item in keys] if self.set_type == 'best': self.dataset = self.model.datasets[keys[labels.index('best')]] elif self.set_type == 'latest': self.dataset = self.model.datasets[keys[labels.index('latest')]] elif self.set_type == 'full': self.dataset = self.model.datasets[keys[labels.index('full')]] self.access_url = self.dataset.access_urls[self.access_url_key] self.ncss = NCSS(self.access_url) self.query = self.ncss.query() def set_query_latlon(self): ''' Sets the NCSS query location latitude and longitude. ''' if (isinstance(self.longitude, list) and isinstance(self.latitude, list)): self.lbox = True # west, east, south, north self.query.lonlat_box(self.latitude[0], self.latitude[1], self.longitude[0], self.longitude[1]) else: self.lbox = False self.query.lonlat_point(self.longitude, self.latitude) def set_location(self, time, latitude, longitude): ''' Sets the location for the query. Parameters ---------- time: datetime or DatetimeIndex Time range of the query. ''' if isinstance(time, datetime.datetime): tzinfo = time.tzinfo else: tzinfo = time.tz if tzinfo is None: self.location = Location(latitude, longitude) else: self.location = Location(latitude, longitude, tz=tzinfo) def get_data(self, latitude, longitude, start, end, vert_level=None, query_variables=None, close_netcdf_data=True): """ Submits a query to the UNIDATA servers using Siphon NCSS and converts the netcdf data to a pandas DataFrame. Parameters ---------- latitude: float The latitude value. longitude: float The longitude value. start: datetime or timestamp The start time. end: datetime or timestamp The end time. vert_level: None, float or integer Vertical altitude of interest. variables: None or list If None, uses self.variables. close_netcdf_data: bool Controls if the temporary netcdf data file should be closed. Set to False to access the raw data. Returns ------- forecast_data : DataFrame column names are the weather model's variable names. """ if vert_level is not None: self.vert_level = vert_level if query_variables is None: self.query_variables = list(self.variables.values()) else: self.query_variables = query_variables self.latitude = latitude self.longitude = longitude self.set_query_latlon() # modifies self.query self.set_location(start, latitude, longitude) self.start = start self.end = end self.query.time_range(self.start, self.end) self.query.vertical_level(self.vert_level) self.query.variables(*self.query_variables) self.query.accept(self.data_format) self.netcdf_data = self.ncss.get_data(self.query) # might be better to go to xarray here so that we can handle # higher dimensional data for more advanced applications self.data = self._netcdf2pandas(self.netcdf_data, self.query_variables) if close_netcdf_data: self.netcdf_data.close() return self.data def process_data(self, data, **kwargs): """ Defines the steps needed to convert raw forecast data into processed forecast data. Most forecast models implement their own version of this method which also call this one. Parameters ---------- data: DataFrame Raw forecast data Returns ------- data: DataFrame Processed forecast data. """ data = self.rename(data) return data def get_processed_data(self, *args, **kwargs): """ Get and process forecast data. Parameters ---------- *args: positional arguments Passed to get_data **kwargs: keyword arguments Passed to get_data and process_data Returns ------- data: DataFrame Processed forecast data """ return self.process_data(self.get_data(*args, **kwargs), **kwargs) def rename(self, data, variables=None): """ Renames the columns according the variable mapping. Parameters ---------- data: DataFrame variables: None or dict If None, uses self.variables Returns ------- data: DataFrame Renamed data. """ if variables is None: variables = self.variables return data.rename(columns={y: x for x, y in variables.items()}) def _netcdf2pandas(self, netcdf_data, query_variables): """ Transforms data from netcdf to pandas DataFrame. Parameters ---------- data: netcdf Data returned from UNIDATA NCSS query. query_variables: list The variables requested. Returns ------- pd.DataFrame """ # set self.time try: time_var = 'time' self.set_time(netcdf_data.variables[time_var]) except KeyError: # which model does this dumb thing? time_var = 'time1' self.set_time(netcdf_data.variables[time_var]) data_dict = {key: data[:].squeeze() for key, data in netcdf_data.variables.items() if key in query_variables} return pd.DataFrame(data_dict, index=self.time) def set_time(self, time): ''' Converts time data into a pandas date object. Parameters ---------- time: netcdf Contains time information. Returns ------- pandas.DatetimeIndex ''' times = num2date(time[:].squeeze(), time.units) self.time = pd.DatetimeIndex(pd.Series(times), tz=self.location.tz) def cloud_cover_to_ghi_linear(self, cloud_cover, ghi_clear, offset=35, **kwargs): """ Convert cloud cover to GHI using a linear relationship. 0% cloud cover returns ghi_clear. 100% cloud cover returns offset*ghi_clear. Parameters ---------- cloud_cover: numeric Cloud cover in %. ghi_clear: numeric GHI under clear sky conditions. offset: numeric Determines the minimum GHI. kwargs Not used. Returns ------- ghi: numeric Estimated GHI. References ---------- Larson et. al. "Day-ahead forecasting of solar power output from photovoltaic plants in the American Southwest" Renewable Energy 91, 11-20 (2016). """ offset = offset / 100. cloud_cover = cloud_cover / 100. ghi = (offset + (1 - offset) * (1 - cloud_cover)) * ghi_clear return ghi def cloud_cover_to_irradiance_clearsky_scaling(self, cloud_cover, method='linear', **kwargs): """ Estimates irradiance from cloud cover in the following steps: 1. Determine clear sky GHI using Ineichen model and climatological turbidity. 2. Estimate cloudy sky GHI using a function of cloud_cover e.g. :py:meth:`~ForecastModel.cloud_cover_to_ghi_linear` 3. Estimate cloudy sky DNI using the DISC model. 4. Calculate DHI from DNI and DHI. Parameters ---------- cloud_cover : Series Cloud cover in %. method : str Method for converting cloud cover to GHI. 'linear' is currently the only option. **kwargs Passed to the method that does the conversion Returns ------- irrads : DataFrame Estimated GHI, DNI, and DHI. """ solpos = self.location.get_solarposition(cloud_cover.index) cs = self.location.get_clearsky(cloud_cover.index, model='ineichen', solar_position=solpos) method = method.lower() if method == 'linear': ghi = self.cloud_cover_to_ghi_linear(cloud_cover, cs['ghi'], **kwargs) else: raise ValueError('invalid method argument') dni = disc(ghi, solpos['zenith'], cloud_cover.index)['dni'] dhi = ghi - dni * np.cos(np.radians(solpos['zenith'])) irrads = pd.DataFrame({'ghi': ghi, 'dni': dni, 'dhi': dhi}).fillna(0) return irrads def cloud_cover_to_transmittance_linear(self, cloud_cover, offset=0.75, **kwargs): """ Convert cloud cover to atmospheric transmittance using a linear model. 0% cloud cover returns offset. 100% cloud cover returns 0. Parameters ---------- cloud_cover : numeric Cloud cover in %. offset : numeric Determines the maximum transmittance. kwargs Not used. Returns ------- ghi : numeric Estimated GHI. """ transmittance = ((100.0 - cloud_cover) / 100.0) * 0.75 return transmittance def cloud_cover_to_irradiance_liujordan(self, cloud_cover, **kwargs): """ Estimates irradiance from cloud cover in the following steps: 1. Determine transmittance using a function of cloud cover e.g. :py:meth:`~ForecastModel.cloud_cover_to_transmittance_linear` 2. Calculate GHI, DNI, DHI using the :py:func:`pvlib.irradiance.liujordan` model Parameters ---------- cloud_cover : Series Returns ------- irradiance : DataFrame Columns include ghi, dni, dhi """ # in principle, get_solarposition could use the forecast # pressure, temp, etc., but the cloud cover forecast is not # accurate enough to justify using these minor corrections solar_position = self.location.get_solarposition(cloud_cover.index) dni_extra = extraradiation(cloud_cover.index) airmass = self.location.get_airmass(cloud_cover.index) transmittance = self.cloud_cover_to_transmittance_linear(cloud_cover, **kwargs) irrads = liujordan(solar_position['apparent_zenith'], transmittance, airmass['airmass_absolute'], dni_extra=dni_extra) irrads = irrads.fillna(0) return irrads def cloud_cover_to_irradiance(self, cloud_cover, how='clearsky_scaling', **kwargs): """ Convert cloud cover to irradiance. A wrapper method. Parameters ---------- cloud_cover : Series how : str Selects the method for conversion. Can be one of clearsky_scaling or liujordan. **kwargs Passed to the selected method. Returns ------- irradiance : DataFrame Columns include ghi, dni, dhi """ how = how.lower() if how == 'clearsky_scaling': irrads = self.cloud_cover_to_irradiance_clearsky_scaling( cloud_cover, **kwargs) elif how == 'liujordan': irrads = self.cloud_cover_to_irradiance_liujordan( cloud_cover, **kwargs) else: raise ValueError('invalid how argument') return irrads def kelvin_to_celsius(self, temperature): """ Converts Kelvin to celsius. Parameters ---------- temperature: numeric Returns ------- temperature: numeric """ return temperature - 273.15 def isobaric_to_ambient_temperature(self, data): """ Calculates temperature from isobaric temperature. Parameters ---------- data: DataFrame Must contain columns pressure, temperature_iso, temperature_dew_iso. Input temperature in K. Returns ------- temperature : Series Temperature in K """ P = data['pressure'] / 100.0 Tiso = data['temperature_iso'] Td = data['temperature_dew_iso'] - 273.15 # saturation water vapor pressure e = 6.11 * 10**((7.5 * Td) / (Td + 273.3)) # saturation water vapor mixing ratio w = 0.622 * (e / (P - e)) T = Tiso - ((2.501 * 10.**6) / 1005.7) * w return T def uv_to_speed(self, data): """ Computes wind speed from wind components. Parameters ---------- data : DataFrame Must contain the columns 'wind_speed_u' and 'wind_speed_v'. Returns ------- wind_speed : Series """ wind_speed = np.sqrt(data['wind_speed_u']**2 + data['wind_speed_v']**2) return wind_speed def gust_to_speed(self, data, scaling=1/1.4): """ Computes standard wind speed from gust. Very approximate and location dependent. Parameters ---------- data : DataFrame Must contain the column 'wind_speed_gust'. Returns ------- wind_speed : Series """ wind_speed = data['wind_speed_gust'] * scaling return wind_speed
class LocalRE(object): forecast_height = 10 # for DarkSky API def __init__( self, wind_turbines: list = [], pv_arrays: list = [], latitude: float = 57.6568, longitude: float = -3.5818, altitude: float = 10, roughness_length: float = 0.15, # roughness length (bit of a guess) hellman_exp: float = 0.2): """ Set up the renewable energy generation """ # This needs to be repeated in every forecast self.roughness_length = roughness_length # Initialise empty forecast dataframe, just so nothing complains self.wind_forecast = pd.DataFrame() self.pv_forecast = pd.DataFrame() # Wind turbine(s) turbines = [] for turbine in wind_turbines: turbines.append({ 'wind_turbine': WindTurbine(turbine['name'], turbine['hub_height'], nominal_power=turbine['nominal_power'], rotor_diameter=turbine['rotor_diameter'], power_curve=turbine['power_curve']), 'number_of_turbines': turbine['qty'] }) local_wind_farm = WindFarm('Local windfarm', turbines, [latitude, longitude]) # TODO - check for learned local data & overwrite power_curve self.wind_modelchain = TurbineClusterModelChain( local_wind_farm, smoothing=False, hellman_exp=hellman_exp, ) # Initialise PV models self.pv_location = Location(latitude=latitude, longitude=longitude, altitude=altitude) # Now set up the PV array & system. cec_pv_model_params = pvlib.pvsystem.retrieve_sam('CECMod') sandia_pv_model_params = pvlib.pvsystem.retrieve_sam('SandiaMod') cec_inverter_model_params = pvlib.pvsystem.retrieve_sam('CECInverter') adr_inverter_model_params = pvlib.pvsystem.retrieve_sam('ADRInverter') self.pv_modelchains = {} for pv_array in pv_arrays: # Try to find the module names in the libraries if pv_array['module_name'] in cec_pv_model_params: pv_array['module_parameters'] = cec_pv_model_params[ pv_array['module_name']] elif pv_array['module_name'] in sandia_pv_model_params: pv_array['module_parameters'] = sandia_pv_model_params[ pv_array['module_name']] else: raise RenewablesException('Could not retrieve PV module data') # Do the same with the inverter(s) if pv_array['inverter_name'] in cec_inverter_model_params: pv_array['inverter_parameters'] = cec_inverter_model_params[ pv_array['inverter_name']] elif pv_array['inverter_name'] in adr_inverter_model_params: pv_array['inverter_parameters'] = adr_inverter_model_params[ pv_array['inverter_name']] else: raise RenewablesException('Could not retrieve PV module data') self.pv_modelchains[pv_array['name']] = ModelChain( PVSystem(**pv_array), self.pv_location, aoi_model='physical', spectral_model='no_loss') def make_generation_forecasts(self, forecast): """ Makes generation forecast data from the supplied Dark Sky forecast Arguments: forecast {pandas.DataFrame} -- DarkSky originated forecast """ self.pv_forecast = self._make_pv_forecast(forecast) self.wind_forecast = self._make_wind_forecast(forecast) def _make_pv_forecast(self, forecast) -> pd.DataFrame: """Compile the forecast required for PV generation prediction Uses pvlib to generate solar irradiance predictions. Arguments: forecast {pandas.DataFrame} -- DarkSky originated forecast """ # Annoyingly, the PV & wind libraries want temperature named differently pv_forecast = forecast.rename(columns={ 'temperature': 'air_temp', 'windSpeed': 'wind_speed', }) # Use PV lib to get insolation based on the cloud cover reported here model = GFS() # Next up, we get hourly solar irradiance using interpolated cloud cover # We can get this from the clearsky GHI... if tables in sys.modules: # We can use Ineichen clear sky model (uses pytables for turbidity) clearsky = self.pv_location.get_clearsky(pv_forecast.index) else: # We can't, so use 'Simplified Solis' clearsky = self.pv_location.get_clearsky(pv_forecast.index, model='simplified_solis') # ... and by knowledge of where the sun is solpos = self.pv_location.get_solarposition(pv_forecast.index) ghi = model.cloud_cover_to_ghi_linear(pv_forecast['cloudCover'] * 100, clearsky['ghi']) dni = disc(ghi, solpos['zenith'], pv_forecast.index)['dni'] dhi = ghi - dni * np.cos(np.radians(solpos['zenith'])) # Whump it all together and we have our forecast! pv_forecast['dni'] = dni pv_forecast['dhi'] = dhi pv_forecast['ghi'] = ghi return pv_forecast def _make_wind_forecast(self, forecast) -> pd.DataFrame: """Creates forecast needed for wind generation prediction Creates renamed multidimensional columns needed for the windpowerlib system. Arguments: forecast {pandas.DataFrame} -- DarkSky originated forecast """ # Easiest to build multiindexes up one by one. columns_index = pd.MultiIndex.from_tuples([('wind_speed', 10), ('temperature', 10), ('pressure', 10), ('roughness_length', 0), ('wind_bearing', 10)]) wind_forecast = pd.DataFrame(index=forecast.index.copy(), columns=columns_index) wind_forecast.loc[:, ('wind_speed', 10)] = forecast['windSpeed'].loc[:] wind_forecast.loc[:, ('temperature', 10)] = forecast['temperature'].loc[:] wind_forecast.loc[:, ('pressure', 10)] = forecast['pressure'].loc[:] wind_forecast.loc[:, ('wind_bearing', 10)] = forecast['windBearing'].loc[:] wind_forecast.loc[:, ('roughness_length', 0)] = self.roughness_length return wind_forecast def predict_generation(self, reserved_wind_consumption=0) -> pd.DataFrame: """ Predict electricity generated from forecast Will use the timestamp index of the forecast property to estimate instantaneous electricity generation. Returns table giving amounts in kWh. Arguments: reserved_wind_consumption {float} - constant amount that is assumed to be required from wind generation to meet other local need """ prediction = pd.DataFrame(index=self.pv_forecast.index.copy()) # First up - PV # Create a total gen column of zeros prediction['PV_AC_TOTAL'] = 0 for pv_array, pv_model in self.pv_modelchains.items(): pv_model.run_model(prediction.index, self.pv_forecast) output_column_name = 'PV_AC_' + pv_array prediction[output_column_name] = pv_model.ac # Add to the total column prediction['PV_AC_TOTAL'] = prediction['PV_AC_TOTAL'] + pv_model.ac # Next - wind power. self.wind_modelchain.run_model(self.wind_forecast) prediction['WIND_AC'] = self.wind_modelchain.power_output # Convert everything into kWh prediction = prediction * 0.001 prediction['available_wind'] = prediction[ 'WIND_AC'] - reserved_wind_consumption prediction['available_wind'][prediction['available_wind'] < 0] = 0 prediction['total'] = prediction['WIND_AC'] + prediction['PV_AC_TOTAL'] prediction['surplus'] = prediction['available_wind'] + prediction[ 'PV_AC_TOTAL'] prediction['surplus'][prediction['surplus'] < 0] = 0 return prediction