def test_recipe_info(): """Coverage test for RecipeInfo.""" recipe = get_recipe('examples/recipe_python') assert isinstance(repr(recipe), str) assert isinstance(str(recipe), str) assert isinstance(recipe.to_markdown(), str)
def test_recipe(): """Coverage test for Recipe.""" TAGS.set_tag_values(DIAGNOSTICS.load_tags()) recipe = get_recipe('examples/recipe_python') assert isinstance(repr(recipe), str) assert isinstance(str(recipe), str) assert isinstance(recipe.render(), str)
def generate( # type: ignore cls, dataset: str, start_time: str, end_time: str, shape: str, ) -> "MarrmotForcing": """ None: Marrmot does not have model-specific generate options. """ # load the ESMValTool recipe recipe_name = "hydrology/recipe_marrmot.yml" recipe = get_recipe(recipe_name) # model-specific updates to the recipe basin = to_absolute_path(shape).stem recipe.data["preprocessors"]["daily"]["extract_shape"][ "shapefile"] = shape recipe.data["diagnostics"]["diagnostic_daily"]["scripts"]["script"][ "basin"] = basin recipe.data["diagnostics"]["diagnostic_daily"][ "additional_datasets"] = [DATASETS[dataset]] variables = recipe.data["diagnostics"]["diagnostic_daily"]["variables"] var_names = "tas", "pr", "psl", "rsds", "rsdt" startyear = get_time(start_time).year for var_name in var_names: variables[var_name]["start_year"] = startyear endyear = get_time(end_time).year for var_name in var_names: variables[var_name]["end_year"] = endyear # generate forcing data and retrieve useful information recipe_output = recipe.run() forcing_file: Path = list(recipe_output.values())[0].files[0].path directory = str(Path(forcing_file).parent) # instantiate forcing object based on generated data return MarrmotForcing( directory=directory, start_time=start_time, end_time=end_time, shape=shape, forcing_file=forcing_file.name, )
def generate( # type: ignore cls, dataset: str, start_time: str, end_time: str, shape: str) -> "HypeForcing": """ None: Hype does not have model-specific generate options. """ # load the ESMValTool recipe recipe_name = "hydrology/recipe_hype.yml" recipe = get_recipe(recipe_name) # model-specific updates to the recipe preproc_names = ("preprocessor", "temperature", "water") for preproc_name in preproc_names: recipe.data["preprocessors"][preproc_name]["extract_shape"][ "shapefile"] = to_absolute_path(shape) recipe.data["datasets"] = [DATASETS[dataset]] variables = recipe.data["diagnostics"]["hype"]["variables"] var_names = "tas", "tasmin", "tasmax", "pr" startyear = get_time(start_time).year for var_name in var_names: variables[var_name]["start_year"] = startyear endyear = get_time(end_time).year for var_name in var_names: variables[var_name]["end_year"] = endyear # generate forcing data and retreive useful information recipe_output = recipe.run() # TODO return files created by ESMValTOOL which are needed by Hype Model # forcing_path = list(recipe_output['...........']).data_files[0] forcing_path = "/foobar.txt" forcing_file = Path(forcing_path).name directory = str(Path(forcing_file).parent) # instantiate forcing object based on generated data return HypeForcing( directory=directory, start_time=str(startyear), end_time=str(endyear), shape=shape, )
def generate( # type: ignore cls, dataset: str, start_time: str, end_time: str, shape: str, extract_region: dict = None, run_lisvap: dict = None, ) -> "LisfloodForcing": """ extract_region (dict): Region specification, dictionary must contain `start_longitude`, `end_longitude`, `start_latitude`, `end_latitude` run_lisvap (dict): Lisvap specification. Default is None. If lisvap should be run then give a dictionary with following key/value pairs: - lisvap_config: Name of Lisvap configuration file. - mask_map: A mask for the spatial selection. This file should have same extent and resolution as parameter-set. - version: LISVAP/LISFLOOD model version supported by ewatercycle. Pick from :py:obj:`~ewatercycle.models.lisflood.Lisflood.available_versions`. - parameterset_dir: Directory of the parameter set. Directory should contains the Lisvap config file and files the config points to. """ # load the ESMValTool recipe recipe_name = "hydrology/recipe_lisflood.yml" recipe = get_recipe(recipe_name) # model-specific updates to the recipe preproc_names = ( "general", "daily_water", "daily_temperature", "daily_radiation", "daily_windspeed", ) basin = to_absolute_path(shape).stem for preproc_name in preproc_names: recipe.data["preprocessors"][preproc_name]["extract_shape"][ "shapefile" ] = shape recipe.data["diagnostics"]["diagnostic_daily"]["scripts"]["script"][ "catchment" ] = basin if extract_region is None: extract_region = get_extents(shape) for preproc_name in preproc_names: recipe.data["preprocessors"][preproc_name][ "extract_region" ] = extract_region recipe.data["datasets"] = [DATASETS[dataset]] variables = recipe.data["diagnostics"]["diagnostic_daily"]["variables"] var_names = "pr", "tas", "tasmax", "tasmin", "tdps", "uas", "vas", "rsds" startyear = get_time(start_time).year for var_name in var_names: variables[var_name]["start_year"] = startyear endyear = get_time(end_time).year for var_name in var_names: variables[var_name]["end_year"] = endyear # set crop to false to keep the entire globe (time consuming) # because lisflood parameter set is global i.e. # recipe.data["preprocessors"]["general"]["extract_shape"]["crop"] = False # However, lisflood diagnostics line 144 gives error # ValueError: The 'longitude' DimCoord points array must be strictly monotonic. # generate forcing data and retrieve useful information recipe_output = recipe.run() directory, forcing_files = data_files_from_recipe_output(recipe_output) if run_lisvap: # Get lisvap specific options and make paths absolute lisvap_config = str(to_absolute_path(run_lisvap["lisvap_config"])) mask_map = str(to_absolute_path(run_lisvap["mask_map"])) version = run_lisvap["version"] parameterset_dir = str(to_absolute_path(run_lisvap["parameterset_dir"])) # Reindex data because recipe cropped the data # Also, create a sub dir for reindexed dataset because xarray does not # let to overwrite! reindexed_forcing_directory = Path(f"{directory}/reindexed") reindexed_forcing_directory.mkdir(parents=True, exist_ok=True) for var_name in {"pr", "tas", "tasmax", "tasmin", "sfcWind", "rsds", "e"}: reindex( f"{directory}/{forcing_files[var_name]}", var_name, mask_map, f"{reindexed_forcing_directory}/{forcing_files[var_name]}", ) # Add lisvap file names for var_name in {"e0", "es0", "et0"}: forcing_files[ var_name ] = f"lisflood_{dataset}_{basin}_{var_name}_{startyear}_{endyear}.nc" config_file = create_lisvap_config( parameterset_dir, str(reindexed_forcing_directory), dataset, lisvap_config, mask_map, start_time, end_time, forcing_files, ) lisvap( version, parameterset_dir, str(reindexed_forcing_directory), mask_map, config_file, ) # TODO add a logger message about the results of lisvap using # exit_code, stdout, stderr # Instantiate forcing object based on generated data return LisfloodForcing( directory=str(reindexed_forcing_directory), start_time=start_time, end_time=end_time, shape=shape, PrefixPrecipitation=forcing_files["pr"], PrefixTavg=forcing_files["tas"], PrefixE0=forcing_files["e0"], PrefixES0=forcing_files["es0"], PrefixET0=forcing_files["et0"], ) else: message = ( "Parameter `run_lisvap` is set to False. No forcing data will be " "generated for 'e0', 'es0' and 'et0'. However, the recipe creates " f"LISVAP input data that can be found in {directory}." ) logger.warning("%s", message) # instantiate forcing object based on generated data return LisfloodForcing( directory=directory, start_time=start_time, end_time=end_time, shape=shape, PrefixPrecipitation=forcing_files["pr"], PrefixTavg=forcing_files["tas"], )
def recipe(): recipe = get_recipe(Path(__file__).with_name('recipe_api_test.yml')) return recipe
def generate( # type: ignore cls, dataset: str, start_time: str, end_time: str, shape: str, dem_file: str, extract_region: Dict[str, float] = None, ) -> "WflowForcing": """ dem_file (str): Name of the dem_file to use. Also defines the basin param. extract_region (dict): Region specification, dictionary must contain `start_longitude`, `end_longitude`, `start_latitude`, `end_latitude` """ # load the ESMValTool recipe recipe_name = "hydrology/recipe_wflow.yml" recipe = get_recipe(recipe_name) basin = to_absolute_path(shape).stem recipe.data["diagnostics"]["wflow_daily"]["scripts"]["script"]["basin"] = basin # model-specific updates script = recipe.data["diagnostics"]["wflow_daily"]["scripts"]["script"] script["dem_file"] = dem_file if extract_region is None: extract_region = get_extents(shape) recipe.data["preprocessors"]["rough_cutout"]["extract_region"] = extract_region recipe.data["diagnostics"]["wflow_daily"]["additional_datasets"] = [ DATASETS[dataset] ] variables = recipe.data["diagnostics"]["wflow_daily"]["variables"] var_names = "tas", "pr", "psl", "rsds", "rsdt" startyear = get_time(start_time).year for var_name in var_names: variables[var_name]["start_year"] = startyear endyear = get_time(end_time).year for var_name in var_names: variables[var_name]["end_year"] = endyear # generate forcing data and retreive useful information recipe_output = recipe.run() forcing_data = recipe_output["wflow_daily/script"].data_files[0] forcing_file = forcing_data.path directory = str(forcing_file.parent) # instantiate forcing object based on generated data return WflowForcing( directory=directory, start_time=start_time, end_time=end_time, shape=shape, netcdfinput=forcing_file.name, )
def generate( # type: ignore cls, dataset: str, start_time: str, end_time: str, shape: str, start_time_climatology: str, # TODO make optional, default to start_time end_time_climatology: str, # TODO make optional, defaults to start_time + 1 y extract_region: dict = None, ) -> "PCRGlobWBForcing": """ start_time_climatology (str): Start time for the climatology data end_time_climatology (str): End time for the climatology data extract_region (dict): Region specification, dictionary must contain `start_longitude`, `end_longitude`, `start_latitude`, `end_latitude` """ # load the ESMValTool recipe recipe_name = "hydrology/recipe_pcrglobwb.yml" recipe = get_recipe(recipe_name) # model-specific updates to the recipe preproc_names = ( "crop_basin", "preproc_pr", "preproc_tas", "preproc_pr_clim", "preproc_tas_clim", ) if dataset is not None: recipe.data["diagnostics"]["diagnostic_daily"][ "additional_datasets"] = [DATASETS[dataset]] basin = to_absolute_path(shape).stem recipe.data["diagnostics"]["diagnostic_daily"]["scripts"]["script"][ "basin"] = basin if extract_region is None: extract_region = get_extents(shape) for preproc_name in preproc_names: recipe.data["preprocessors"][preproc_name][ "extract_region"] = extract_region variables = recipe.data["diagnostics"]["diagnostic_daily"]["variables"] var_names = "tas", "pr" startyear = get_time(start_time).year for var_name in var_names: variables[var_name]["start_year"] = startyear endyear = get_time(end_time).year for var_name in var_names: variables[var_name]["end_year"] = endyear var_names_climatology = "pr_climatology", "tas_climatology" startyear_climatology = get_time(start_time_climatology).year for var_name in var_names_climatology: variables[var_name]["start_year"] = startyear_climatology endyear_climatology = get_time(end_time_climatology).year for var_name in var_names_climatology: variables[var_name]["end_year"] = endyear_climatology # generate forcing data and retrieve useful information recipe_output = recipe.run() # TODO dont open recipe output, but use standard name from ESMValTool directory, forcing_files = data_files_from_recipe_output(recipe_output) # instantiate forcing object based on generated data return PCRGlobWBForcing( directory=directory, start_time=start_time, end_time=end_time, shape=shape, precipitationNC=forcing_files["pr"], temperatureNC=forcing_files["tas"], )