def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case106/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) exp_datasets = deangelis_recipe.f2ext.exp_datasets variables = deangelis_recipe.f2ext.variables # get all datasets datasets = get_datasets( datasets=exp_datasets, variables=variables, ) processor_tasks = [] for variable in variables: processor_tasks.extend( get_processor_tasks_for_variable( variable=variable, datasets=datasets[variable["variable_group"]], settings=deangelis_recipe.processor_settings[ variable["preprocessor"]], diagnostic={"diagnostic": diagnostic_name}, config={"data_path": deangelis_config.data_path}, work_dir=work_dir, )) for index, task in enumerate(processor_tasks): logger.info(index) run_processor(task, config={}, work_dir=work_dir)
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case108/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) # recipe variables = bock20_recipe.fig3.variables variable_additional_datasets = bock20_recipe.fig3.variable_additional_datasets additional_datasets = bock20_recipe.fig3.additional_datasets # get all datasets datasets = get_datasets( variables=variables, variable_additional_datasets=variable_additional_datasets, additional_datasets=additional_datasets) processor_tasks = [] for variable in variables: preprocessor = variable.get("preprocessor", "default") settings = bock20_recipe.processor_settings.get(preprocessor, {}) processor_tasks.extend( get_processor_tasks_for_variable( variable=variable, datasets=datasets[variable["variable_group"]], settings=settings, diagnostic={"diagnostic": diagnostic_name}, config={"data_path": bock20_config.data_path}, work_dir=work_dir)) for task in processor_tasks: run_processor(task, config={}, work_dir=work_dir)
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/land/case401/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) # recipe variable_additional_datasets = landcover_recipe.cmip6.variable_additional_datasets variables = landcover_recipe.cmip6.variables # get all datasets datasets = get_datasets( variables=variables, variable_additional_datasets=variable_additional_datasets) processor_tasks = [] for variable in variables: processor_tasks.extend( get_processor_tasks_for_variable( variable=variable, datasets=datasets[variable["variable_group"]], settings=landcover_recipe.processor_settings[ variable["preprocessor"]], diagnostic={"diagnostic": diagnostic_name}, config={"data_path": landcover_config.data_path}, work_dir=work_dir)) for task in processor_tasks: run_processor(task, config={}, work_dir=work_dir)
def run( exp_dataset, variable, diagnostic_name ): work_dir = "/home/hujk/ploto/esmvaltool/cases/case103/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) operations = generate_default_operations() combined_dataset = combine_variable( dataset=exp_dataset, variable=variable, ) add_variable_info(combined_dataset) diagnostic = { "diagnostic": "diurnal_temperature_indicator", } settings = miles_recipe.processor_settings[combined_dataset["preprocessor"]] task = { "products": [ { "variable": combined_dataset, "input": { "input_data_source_file": ( "{work_dir}" f"/{diagnostic_name}/fetcher/preproc" "/{dataset}/{variable_group}/data_source.yml" ), }, "output": { "output_directory": "{dataset}/{variable_group}" }, "settings": settings } ], # operations "operations": operations, "diagnostic": diagnostic, "output": { "output_directory": "{work_dir}" + f"/{diagnostic_name}/processor/preproc", }, } run_processor( task=task, work_dir=work_dir, config={}, )
def run( exp_dataset, variable, ): work_dir = "/home/hujk/ploto/esmvaltool/cases/case105/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) operations = generate_default_operations("climatological_mean") combined_dataset = combine_variable( dataset=exp_dataset, variable=variable ) add_variable_info(combined_dataset) diagnostic = { "diagnostic": "calculate_weights_climwip", } settings = climwip_recipe.processor_settings[combined_dataset["preprocessor"]] task = { "products": [ { "variable": combined_dataset, "input": { "input_data_source_file": ( "{work_dir}" + f"/{diagnostic_name}/fetcher/preproc" + "/{dataset}/{variable_group}/data_source.yml" ), }, "output": { "output_directory": "{alias}/{variable_group}" }, "settings": settings } ], # operations "operations": operations, "diagnostic": diagnostic, "output": { "output_directory": "{work_dir}" + f"/{diagnostic_name}/processor/preproc", }, "step_type": "processor", "type": "ploto_esmvaltool.processor.esmvalcore_pre_processor", } run_processor( task=task, work_dir=work_dir, config={}, )
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case1/ploto/c1" operations = generate_default_operations() dataset = { "dataset": "FGOALS-g3", "project": "CMIP6", "mip": "day", "exp": "1pctCO2", "ensemble": "r1i1p1f1", "grid": "gn", "frequency": "day", "start_year": 370, "end_year": 371, } diag_dataset = { "recipe_dataset_index": 0, "alias": "FGOALS-g3", "modeling_realm": ["atmos"], } variable = { "short_name": "pr", "variable_group": "pr", "preprocessor": "default", } diag = { "diagnostic": "dry_days", } task = { # input files "input_data_source_file": pathlib.Path(pathlib.Path(__file__).parent, "input_meta_file.yml"), # output "output_directory": "{work_dir}/preproc/pr", # operations "operations": operations, **dataset, **diag_dataset, **variable, **diag, } run_processor( task=task, work_dir=work_dir, config={}, )
def run( exp_dataset, variable ): work_dir = "/home/hujk/ploto/esmvaltool/cases/case102/ploto" operations = generate_default_preprocessor_operations() combined_dataset = { **exp_dataset, **variable } diagnostic = { "diagnostic": "diurnal_temperature_indicator", } settings = diurnal_recipe.processor_settings[variable["preprocessor"]] task = { "products": [ { "variable": combined_dataset, "input": { "input_data_source_file": "{work_dir}/fetcher/preproc/{alias}/{variable_group}/data_source.yml", }, "output": { "output_directory": "{alias}/{variable_group}" }, "settings": settings } ], # operations "operations": operations, "diagnostic": diagnostic, "output": { "output_directory": "{work_dir}/processor/preproc", }, } run_processor( task=task, work_dir=work_dir, config={}, )
def get_processor(exp_dataset, variable): work_dir = "/home/hujk/ploto/esmvaltool/cases/case101/ploto" operations = generate_default_operations() combined_dataset = combine_variable( dataset=exp_dataset, variable=variable, ) add_variable_info(combined_dataset) diagnostic = { "diagnostic": "dry_days", } task = { "products": [{ "variable": combined_dataset, "input": { "input_data_source_file": "{work_dir}/fetcher/preproc/{dataset}/{variable_group}/data_source.yml", }, "output": { "output_directory": "{dataset}/{variable_group}" } }], # operations "operations": operations, "diagnostic": diagnostic, "output": { "output_directory": "{work_dir}/processor/preproc", }, } run_processor( task=task, work_dir=work_dir, config={}, )
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case110/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) # recipe exp_datasets = spei_recipe.exp_datasets variables = spei_recipe.variables # get all datasets datasets = get_datasets( datasets=exp_datasets, variables=variables, ) processor_tasks = [] for variable in variables: processor_tasks.extend( get_processor_tasks_for_variable( variable=variable, datasets=datasets[variable["variable_group"]], settings=spei_recipe.processor_settings[variable["preprocessor"]], diagnostic={ "diagnostic": diagnostic_name }, config={ "data_path": spei_config.data_path }, work_dir=work_dir ) ) for task in processor_tasks: run_processor( task, config={}, work_dir=work_dir )
def run( dataset, exp, variable, recipe_dataset_index, start_year, end_year, alias ): work_dir = "/home/hujk/ploto/esmvaltool/cases/case105/ploto/map/processor" Path(work_dir).mkdir(parents=True, exist_ok=True) operations = generate_climatological_mean_operations() dataset = { "dataset": dataset, "project": "CMIP6", "mip": "Amon", "exp": exp, "ensemble": "r1i1p1f1", "grid": "gn", "frequency": "mon", "type": "exp", #******************* "start_year": start_year, "end_year": end_year, } diag_dataset = { "recipe_dataset_index": recipe_dataset_index, "alias": alias, "modeling_realm": [ "atmos" ] } variable = variable diag = { "diagnostic": "weighted_temperature_map", } settings = { "mask_landsea": { "mask_out": "sea", }, "regrid": { "target_grid": "2.5x2.5", "scheme": "linear" }, "climate_statistics": { "operator": "mean" } } task = { "input_data_source_file": f"/home/hujk/ploto/esmvaltool/cases/case105/ploto/map/fetcher/preproc/" f"{dataset['dataset']}/{variable['variable_group']}/data_source.yml", # output "output_directory": f"{work_dir}/preproc/{dataset['dataset']}/{variable['variable_group']}", # operations "operations": operations, "dataset": dataset, "diagnostic_dataset": diag_dataset, "variable": variable, "diagnostic": diag, "settings": settings } run_processor( task=task, work_dir=work_dir, config={}, )