def check_input_data(cfg): """Check input data.""" if not variables_available(cfg, ['tas']): raise ValueError( "This diagnostic needs variable 'tas' if 'read_external_file' is " "not given") if not (variables_available(cfg, ['rtnt']) or variables_available(cfg, ['rtmt'])): raise ValueError( "This diagnostic needs the variable 'rtnt' or 'rtmt' if " "'read_external_file' is not given") input_data = cfg['input_data'].values() project_group = group_metadata(input_data, 'project') projects = list(project_group.keys()) if len(projects) > 1: raise ValueError( f"This diagnostic supports only unique 'project' attributes, got " f"{projects}") project = projects[0] if project not in EXP_4XCO2: raise ValueError(f"Project '{project}' not supported yet") exp_group = group_metadata(input_data, 'exp') exps = set(exp_group.keys()) if exps != {'piControl', EXP_4XCO2[project]}: raise ValueError( f"This diagnostic needs 'piControl' and '{EXP_4XCO2[project]}' " f"experiments, got {exps}")
def test_variables_available(): cfg = { 'input_data': { 'file1.nc': { 'short_name': 'ta' }, } } assert shared.variables_available(cfg, ['ta']) is True assert shared.variables_available(cfg, ['pr']) is False
def main(cfg): """Run the diagnostic. Parameters : ---------- cfg : dict Configuration dictionary of the recipe. """ ########################################################################### # Read recipe data ########################################################################### # Get Input data input_data = (cfg['input_data'].values()) required_vars = ('tas', 'rsnstcs', 'rsnstcsnorm', 'prw') if not variables_available(cfg, required_vars): raise ValueError("This diagnostic needs {required_vars} variables") available_exp = list(group_metadata(input_data, 'exp')) if 'abrupt-4xCO2' not in available_exp: if 'abrupt4xCO2' not in available_exp: raise ValueError("The diagnostic needs an experiment with " + "4 times CO2.") if 'piControl' not in available_exp: raise ValueError("The diagnostic needs a pre industrial control " + "experiment.") ########################################################################### # Read data ########################################################################### # Create iris cube for each dataset and save annual means cubes = reform_data_iris_deangelis3b4(input_data) meas_tub_rsnstcsnorm = [] meas_tub_prw = [] for ctub in cubes: if ctub[2] == 'nomodel': if ctub[1] == 'prw': meas_tub_prw.append(ctub) if ctub[1] == 'rsnstcsnorm': meas_tub_rsnstcsnorm.append(ctub) if len(meas_tub_rsnstcsnorm) > 1: raise ValueError("This diagnostic expects one (or no) observational " "dataset for rsnstcsnorm") ########################################################################### # Process data ########################################################################### [grid_pw, reg_prw_obs] = set_grid_pw_reg_obs(cubes, meas_tub_rsnstcsnorm, meas_tub_prw) data_model = substract_and_reg_deangelis(cfg, cubes, grid_pw, reg_prw_obs) plot_deangelis_fig3b4(cfg, data_model, reg_prw_obs)
def main(cfg): """Run the diagnostic.""" sns.set(**cfg.get('seaborn_settings', {})) input_data = cfg['input_data'].values() project = list(group_metadata(input_data, 'project').keys()) project = [p for p in project if 'obs' not in p.lower()] if len(project) == 1: project = project[0] # Check if tas is available if not variables_available(cfg, ['tas']): raise ValueError("This diagnostic needs 'tas' variable") # Get ECS data ecs_filepath = io.get_ancestor_file(cfg, 'ecs.nc') ecs_cube = iris.load_cube(ecs_filepath) # Create iris cubes for each dataset hist_cubes = {} pi_cubes = {} for data in input_data: name = data['dataset'] logger.info("Processing %s", name) cube = iris.load_cube(data['filename']) # Preprocess cubes cube.convert_units(cfg.get('tas_units', 'celsius')) cube = cube.collapsed(['time'], iris.analysis.MEAN) # Save cubes if data.get('exp') == 'historical': hist_cubes[name] = cube elif data.get('exp') == 'piControl': pi_cubes[name] = cube else: pass # Plot data plot_path = plot_data(cfg, hist_cubes, pi_cubes, ecs_cube) # Write netcdf file netcdf_path = write_data(cfg, hist_cubes, pi_cubes, ecs_cube) # Provenance ancestor_files = [d['filename'] for d in input_data] ancestor_files.append(ecs_filepath) provenance_record = get_provenance_record(project, ancestor_files) if plot_path is not None: provenance_record.update({ 'plot_file': plot_path, 'plot_types': ['scatter'], }) with ProvenanceLogger(cfg) as provenance_logger: provenance_logger.log(netcdf_path, provenance_record)
def check_input_data(cfg): """Check input data.""" if not variables_available(cfg, ['tas']): raise ValueError( "This diagnostic needs variable 'tas' if 'read_external_file' is " "not given") input_data = cfg['input_data'].values() project_group = group_metadata(input_data, 'project') projects = list(project_group.keys()) if len(projects) > 1: raise ValueError( f"This diagnostic supports only unique 'project' attributes, got " f"{projects}") exp_group = group_metadata(input_data, 'exp') exps = set(exp_group.keys()) if exps != {'piControl', '1pctCO2'}: raise ValueError( f"This diagnostic needs '1pctCO2' and 'piControl' experiment, got " f"{exps}")
def main(cfg): """Run the diagnostic. Parameters : ---------- cfg : dict Configuration dictionary of the recipe. """ ########################################################################### # Read recipe data ########################################################################### # Dataset data containers data = e.Datasets(cfg) logging.debug("Found datasets in recipe:\n%s", data) # Variables var = e.Variables(cfg) # logging.debug("Found variables in recipe:\n%s", var) available_vars = list( group_metadata(cfg['input_data'].values(), 'short_name')) logging.debug("Found variables in recipe:\n%s", available_vars) available_exp = list(group_metadata(cfg['input_data'].values(), 'exp')) # Check for available variables required_vars = ('tas', 'lvp', 'rlnst', 'rsnst', 'rlnstcs', 'rsnstcs', 'hfss') if not e.variables_available(cfg, required_vars): raise ValueError("This diagnostic needs {required_vars} variables") # Check for experiments if 'abrupt-4xCO2' not in available_exp: if 'abrupt4xCO2' not in available_exp: raise ValueError("The diagnostic needs an experiment with " + "4 times CO2.") if 'piControl' not in available_exp: raise ValueError("The diagnostic needs a pre industrial control " + "experiment.") ########################################################################### # Read data ########################################################################### # Create iris cube for each dataset and save annual means for dataset_path in data: cube = iris.load(dataset_path)[0] cat.add_year(cube, 'time', name='year') cube = cube.aggregated_by('year', iris.analysis.MEAN) experiment = data.get_info(n.EXP, dataset_path) if experiment == PICONTROL: # DeAngelis use a 21 year running mean on piControl but the # full extend of 150 years abrupt4xCO2. I could not find out, # how they tread the edges, currently I just skip the mean for # the edges. This is not exacly the same as done in the paper, # small differences remain in extended data Fig 1, # but closer than other methods I # tried, e.g. skipping the edges. # For most data sets it would be also possible to # extend the piControl for 20 years, but then it would # not be centered means of piControl for each year of # abrupt4xCO2 any more. cube_new = cube.rolling_window('time', iris.analysis.MEAN, 21) endm10 = len(cube.coord('time').points) - 10 cube.data[10:endm10] = cube_new.data data.set_data(cube.data, dataset_path) ########################################################################### # Process data ########################################################################### data_dict = substract_and_reg_deangelis2(cfg, data, var) plot_slope_regression(cfg, data_dict) plot_slope_regression_all(cfg, data_dict, available_vars)