def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case108/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) # recipe variables = bock20_recipe.fig2.variables variable_additional_datasets = bock20_recipe.fig2.variable_additional_datasets additional_datasets = bock20_recipe.fig2.additional_datasets # get all datasets datasets = get_datasets( variables=variables, variable_additional_datasets=variable_additional_datasets, additional_datasets=additional_datasets ) processor_tasks = [] for variable in variables: processor_tasks.extend( get_tasks_for_variable( variable=variable, config={ "data_path": bock20_config.data_path }, work_dir=work_dir, ) ) for task in processor_tasks: run_processor( task, config={}, work_dir=work_dir )
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case107/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) exp_datasets = eyring13_recipe.exp_datasets variables = eyring13_recipe.variables variable_additional_datasets = eyring13_recipe.variable_additional_datasets # get all datasets datasets = get_datasets( datasets=exp_datasets, variables=variables, variable_additional_datasets=variable_additional_datasets) processor_tasks = [] for variable in variables: processor_tasks.extend( get_tasks_for_variable( variable=variable, datasets=datasets[variable["variable_group"]], diagnostic={"diagnostic": diagnostic_name}, config={"data_path": eyring13_config.data_path}, work_dir=work_dir, )) for task in processor_tasks: run_processor(task, config={}, work_dir=work_dir)
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case110/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) exp_datasets = spei_recipe.exp_datasets variables = spei_recipe.variables datasets = get_datasets( datasets=exp_datasets, variables=variables, ) processor_tasks = [] for variable in variables: v_datasets = datasets[variable["variable_group"]] settings = spei_recipe.processor_settings[variable["preprocessor"]] if "multi_model_statistics" in settings: statistics = settings["multi_model_statistics"]["statistics"] for op in statistics: v_datasets.append({ "alias": f"multi-model-{op}", "variable_group": variable["variable_group"], }) processor_tasks.extend( get_tasks_for_variable( variable=variable, datasets=v_datasets, diagnostic={"diagnostic": diagnostic_name}, config={"data_path": spei_config.data_path}, work_dir=work_dir, )) for task in processor_tasks: run_processor(task, config={}, work_dir=work_dir)
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case102/ploto" exp_datasets = diurnal_recipe.exp_datasets exp_datasets = [ { **d, "alias": f"{d['dataset']}-{d['exp']}", "recipe_dataset_index": index } for index, d in enumerate(exp_datasets) ] variables = diurnal_recipe.variables tasks = [ { "util_type": "combine_metadata", "metadata_files": [ "{work_dir}" + f"/processor/preproc/{d['alias']}/{v['variable_group']}/metadata.yml" for d in exp_datasets ], "output_directory": "{work_dir}" + f"/processor/preproc/{v['variable_group']}" } for v in variables ] for task in tasks: run_processor( task=task, work_dir=work_dir, config={} )
def main(): work_dir = f"{base_dir}/graph/processor" variables = climwip_recipe.graph_variables tasks = [get_task(work_dir, v) for v in variables] for task in tasks: run_processor(task=task, work_dir=work_dir, config={})
def main(): work_dir = f"{base_dir}/map/processor" tasks = [ get_task(work_dir, variable) for variable in climwip_recipe.map_variables ] for task in tasks: run_processor(task=task, work_dir=work_dir, config={})
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case108/ploto" Path(work_dir).mkdir(parents=True, exist_ok=True) # recipe variables = bock20_recipe.fig3.variables variable_additional_datasets = bock20_recipe.fig3.variable_additional_datasets additional_datasets = bock20_recipe.fig3.additional_datasets # get all datasets datasets = get_datasets( variables=variables, variable_additional_datasets=variable_additional_datasets, additional_datasets=additional_datasets ) processor_tasks = [] for variable in variables: v_datasets = datasets[variable["variable_group"]] preprocessor = variable.get("preprocessor", "default") settings = bock20_recipe.processor_settings.get(preprocessor, {}) if "multi_model_statistics" in settings: statistics = settings["multi_model_statistics"]["statistics"] for op in statistics: v_datasets.append({ "alias": f"multi-model-{op}", "variable_group": variable["variable_group"], }) processor_tasks.extend( get_tasks_for_variable( variable=variable, datasets=v_datasets, diagnostic={ "diagnostic": diagnostic_name }, config={ "data_path": bock20_config.data_path }, work_dir=work_dir, ) ) for task in processor_tasks: run_processor( task, config={}, work_dir=work_dir )
def main(): work_dir = "/home/hujk/ploto/esmvaltool/cases/case103/ploto" diagnostic_name = "miles_block" tasks = [{ "util_type": "combine_metadata", "metadata_files": [ f"{work_dir}/{diagnostic_name}/processor/preproc/{d['dataset']}/{v['variable_group']}/metadata.yml" for d, v in itertools.product(miles_recipe.exp_datasets, miles_recipe.variables) ], "output_directory": f"{work_dir}/{diagnostic_name}/processor/preproc/" }] for task in tasks: run_processor(task=task, work_dir=work_dir, config={})