def load_and_interpolate_forecast(model, element_name, station_name, issue): """Load a given dataset of forecast data with interpolated values. parameters ---------- model: str, name of model element_name: str, string representation of the provided parameter station_name: str, canonical name of the station issue: str, string representation of the model issue hour """ # Forecast columns: 1 to 4 forecast_cols = ['value' + str(x) for x in range(1, 5)] # Station location station_lat, station_lon = data_io.get_station_location(station_name) # Read in forecast element_id = data_io.get_element_id(element_name, model) forecast_data = data_io.read_forecast_data( model, element_id, station_name, issue) # Check whether to do any interpolation empty_columns = \ forecast_data.ix[:, forecast_cols].isnull().values.any(axis=0) if empty_columns.sum() == 3: # Just a single column provided. Don't do interpolation. logging.debug( "Not interpolating for model '%s', element '%s', station '%s'" % (model, str(element_id), station_name)) # Select non-empty column non_empty_col = forecast_cols[(~empty_columns).nonzero()[0][0]] interpolated_forecast = forecast_data[non_empty_col] else: # Do interpolation using meta-data meta_data = \ data_io.read_meta_data(model, element_id, station_name, issue) interpolated_forecast = interpolate( station_lat, station_lon, forecast_data.ix[:, forecast_cols], meta_data['latitude'], meta_data['longitude'], meta_data['distance'] ) forecast_data['interpolated_forecast'] = interpolated_forecast # Drop grid point data. forecast_data.drop(forecast_cols, axis=1, inplace=True) # Transform EPS data from row format to column format forecast_data = \ transform_forecast_group_data(forecast_data, model, element_name) return forecast_data
def test_read_forecast_data(file_parameters): """Test whether the relevant foreacst files can be loaded.""" # Test function relevant imports from helpers import data_io data_io.read_forecast_data(**file_parameters)