def read_catalog(model=None, data='cmip5', freq='day', var='tasmax'): ''' Read in catalogue of climate models Parameters: ---------- model (string): None deafult HadGEM2-CC, BNU-ESM, CanESM2, bcc-csm1 freq (string): deafult day other options - month, year data (string): deafult cmip5 var (string): deafult tas Returns: -------- data (pandas.DataFrame): catalogue of all runs from the selected model ''' if (model is not None): catl_model = bp.catalogue(dataset=data, Model=model, Frequency=freq, Var=var).reset_index(drop=True) else: catl_model = bp.catalogue(dataset=data, Frequency=freq, Var=var).reset_index(drop=True) return catl_model
def get_gcm_catalogue(self): """ Output: dataframe of baspy catalogue """ print("GCM settings: model = {}, \ rcp = {}, \ start = {}, \ end = {}".format(self.settings['model'], self.settings['future_rcp'], self.settings['model_start'], self.settings['model_end'], )) # Retrieve catalogue self.catalogue = bp.catalogue(dataset='cmip5', Model=self.settings['model'], Frequency='day', Experiment=['historical'] + self.settings['future_rcp'], RunID='r1i1p1', Var=self.settings['variable_name_gcm'], ).reset_index(drop=True) print(self.catalogue) return self.catalogue
import baspy as bp cmip5_cat = bp.catalogue(dataset='cmip5', refresh=True) happi_cat = bp.catalogue(dataset='happi', refresh=True) print('done')
import baspy as bp import xarray as xr ''' Define scope of CMIP6 that we want (our catalogue) * amip = atmosphere-only run (with transient/observed sea surface temperatures) * tasmin,tasmax = minimum/maximum temperature over period * CMOR (Climate Model Output Rewriter), defines, amongst other things, the temporal frequency of the data (monthly, daily etc) see: https://github.com/PCMDI/cmip6-cmor-tables/tree/master/Tables * Model = our chosen CMIP6 climate model * RunID = the run ID :-) ''' catlg = bp.catalogue(dataset='cmip6', Experiment='amip', Var=['tasmax', 'tasmin'], CMOR='day', Model='CNRM-CM6-1', RunID='r1i1p1f2') ''' Read Datasets using BASpy wrapper for Xarray ''' tasmin_ds = bp.open_dataset(catlg[catlg.Var == 'tasmin']) tasmax_ds = bp.open_dataset(catlg[catlg.Var == 'tasmax']) ''' extract DataArray from Dataset ''' tasmin = tasmin_ds.tasmin tasmax = tasmax_ds.tasmax ''' Now analyse CMIP6 data using the Xarray framework [1] http://xarray.pydata.org/en/stable/ [2] https://github.com/scotthosking/notebooks/blob/master/getting_started_with_Xarray%2BDask.ipynb '''
import baspy as bp import numpy as np import xarray as xr from pathlib import Path import pandas as pd import logging from typing import List # load the baspy catalogue df = bp.catalogue(dataset="cmip6", CMOR="Amon") def get_global_mean( Model: str, Experiment: str, RunID: str = None, year_ranges: List[slice] = [slice(1850, 1950), slice(2000, 2010)], var: str = "tas", ) -> List[float]: """Get area weighted global mean values for a CMIP6 run for year slices. The actual calculation is all done by xarray. Most of this code is a baspy wrapper. A match is looked for in the baspy catalogue, then the global surface mean temperature is calculated from atmospheric monthly data. Only tested with CMIP6. Input: Model, Experiment RunID --- same meaning as in baspy module year_ranges --- a list of slices
region_bounds = bp.region.Sub_regions.central_england ### Historical period hist_con = iris.Constraint(year=lambda y: 1979 <= y <= 2004) erai = iris.load_cube( '/group_workspaces/jasmin4/bas_climate/data/ecmwf/era-interim/mon/surface/t2m_mon.nc', callback=edit_erai_attrs, constraint=hist_con) erai = bp.region.extract(erai, region_bounds) hist_catlg = bp.catalogue(Experiment='historical', Frequency='mon', Model='HadGEM2-CC', Var='tas', RunID='r1i1p1') hist = bp.get_cube(hist_catlg, constraints=hist_con) hist = bp.region.extract(hist, region_bounds) ### Future Period fut_con = iris.Constraint(year=lambda y: 2070 <= y <= 2100) fut_catlg = bp.catalogue(Experiment='rcp45', Frequency='mon', Model='HadGEM2-CC', Var='tas', RunID='r1i1p1') fut = bp.get_cube(fut_catlg, constraints=fut_con) fut = bp.region.extract(fut, region_bounds)
# -*- coding: UTF-8 -*- from jasmin.downloader import dataprocessing as dp from baspy._xarray.util import extract_region import pandas as pd import baspy as bp ## _cm ending means climate model ## tas means surface temp cat_model = bp.catalogue(dataset='cmip5', Model='HadGEM2-CC', Frequency='day', Experiment='rcp45', RunID='r1i1p1', Var='tas').reset_index(drop=True) for index, row in cat_model.iterrows(): cm = bp.open_dataset(row) tas_cm = cm.tas lon_cor_cm = dp.roll_lon(tas_cm) ## Extract a specific region extr_reg_cm = extract_region(lon_cor_cm, kabul) reg_time_sliced_cm = dp.slice_time(extr_reg_cm, 1979, 2050)
""" # Required directories loaddir_CMIP = 'Priestley-Centre/Near_term_warming/analysis_figure_code/'+\ 'SuppFig2/saved_arrays' savedir = 'Priestley-Centre/Near_term_warming/analysis_figure_code/'+\ 'SuppFig8/saved_data' ### ------ Load in CMIP6 data ------ # Load models models = np.load(loaddir_CMIP + '/models_gtas_CMIP6_piControl.npy') # Load catalogue so can extract runids var = 'tas' cat_PI = bp.catalogue(dataset='cmip6',Var=var,Experiment='piControl',\ CMOR='Amon') years = np.linspace(1, 20, 20) ### Process data, one model and RunID at a time i = 0 fig,axs = plt.subplots(7,7,sharex=True,sharey=True,\ figsize=(15,12)) fig.suptitle('PDFs of rolling GSAT trends for 20-year segments of CMIP6 '+\ 'piControl runs',fontsize=20) axs = axs.ravel() for model in models: ## Get data for model filtmod_PI = cat_PI[cat_PI['Model'] == model]