def merge(date): dslist = [] ds = cml.load_dataset("s2s-ai-competition", date=date, parameter="2t", version=version) dslist.append(ds.to_xarray()) ds = cml.load_dataset("s2s-ai-competition", date=date, parameter="tp", version=version) dslist.append(ds.to_xarray()) def check(ds): print(dict(ds.dims), list(ds.keys())) for ds in dslist: check(ds) ds = xr.merge(dslist) print("-- Merged into --") check(ds) dslist[0].step.values, dslist[1].step.values
def test_era5_temperature(): if not os.path.exists(os.path.expanduser("~/.cdsapirc")): pytest.skip("No ~/.cdsapirc") cml.load_dataset("era5-temperature", period=(1979, 1982), domain="France", time=12)
def test_grib_index_eumetnet_with_plugin(): ds = cml.load_dataset( "eumetnet-postprocessing-benchmark-training-data-gridded-forecasts-efi", date="2017-12-28", parameter="2ti", ) xds = ds.to_xarray() print(xds)
def test_read(): ds = cml.load_dataset( "{{ cookiecutter.plugin_name }}-{{ cookiecutter.dataset_name }}", year="2021", parameter="t2m", ) xds = ds.to_xarray() print(xds)
def test_datetime(): if not os.path.exists(os.path.expanduser("~/.cdsapirc")): pytest.skip("No ~/.cdsapirc") data = cml.load_dataset("era5-temperature", domain="france", period=(1980, ), time=12) data["1980-12-09 12:00"]
def test_datetime(): if not os.path.exists(os.path.expanduser("~/.cdsapirc")): pytest.skip("No ~/.cdsapirc") data = cml.load_dataset( "era5-temperature", domain="france", period=(1980,), time=12 ) data["1980-12-09 12:00"] with pytest.raises(NotImplementedError): data.sel(date="1980-12-09 12:00")
def __init__(self, dt="time", basins=["atlantic", "pacific"]): self._dt = dt self._df = pd.DataFrame() for basin in basins: if self._df.empty: self._df = load_dataset("hurricane-database", basin).to_pandas() else: self._df = pd.concat( [ self._df, load_dataset("hurricane-database", basin).to_pandas() ], ignore_index=True, ) assert all( [c in self._df.columns for c in [dt, "lat", "lon", "pressure"]]) self._df.sort_values(by=dt)
def merge_multiple_dates(dates): dslist = [] for date in dates: ds = cml.load_dataset("s2s-ai-competition", date=date, parameter="2t", version=version) dslist.append(ds.to_xarray()) def check(ds): print(dict(ds.dims), list(ds.keys())) for ds in dslist: check(ds) ds = xr.merge(dslist) print("-- Merged into --") check(ds)
def test_tfdataset_2(): from tensorflow.keras.layers import Dense, Flatten, Input from tensorflow.keras.models import Sequential ds = cml.load_dataset("high-low") train, test = ds.to_tfdataset(split=["train", "test"]) shape = train.element_spec[0].shape model = Sequential() model.add(Input(shape=(shape[-2], shape[-1]))) model.add(Flatten()) model.add(Dense(64, activation="sigmoid")) model.add(Dense(4, activation="softmax")) model.compile( optimizer="adam", loss="categorical_crossentropy", metrics=["accuracy"], ) print(model.summary()) model.fit(train, epochs=1, verbose=0) model.evaluate(test)
def test_dataset_1(): load_dataset("sample-bufr-data")
def test_register_dataset(): register_dataset("test-register-dataset", RegisteredDataset) ds = cml.load_dataset("test-register-dataset", shape=(3, 3)) assert ds.to_numpy().shape == (3, 3)
def wrapped(*args, **kwargs): return climetlab.load_dataset(name.replace("_", "-"), *args, **kwargs)
import climetlab as cml # cml.settings.set("plotting-options", {'dump_yaml': True}) dataset = cml.load_dataset("example-dataset") data = dataset[0] cml.plot_map(data, foreground=False) cml.plot_map(data, foreground=True) cml.plot_map(data, foreground="example-foreground") cml.plot_map( data, foreground=dict( map_grid=False, map_label=False, map_grid_frame=True, map_grid_frame_thickness=5, map_boundaries=True, ), ) # Partial update of the current `foreground` # How to do is still to be decided # Option 1 cml.plot_map( data, foreground={
def test_read_rt_2dates(): ds = cml.load_dataset("s2s-ai-competition", date=["20200102", "20200102"]) xds = ds.to_xarray() print(xds) sst = xds.sel()
def test_read_rt_tp_and_2t(): ds = cml.load_dataset("s2s-ai-competition", date="20200102", parameter=["tp", "2t"]) xds = ds.to_xarray() print(xds)
def test_samples(): cml.load_dataset("meteonet-samples-radar")
def test_remote_dataset_from_climetlab_catalog(): load_dataset("sample-netcdf-data")
def test_unknown_dataset(): with pytest.raises(NameError): load_dataset("do-not-exist-lkj45a45qsdf3")
def test_pandas_filter(): data = cml.load_dataset("hurricane-database", bassin="atlantic") irma = data.to_pandas(name="irma", year=2017) assert len(irma) == 66
#!/usr/bin/env python # coding: utf-8 # In[73]: import climetlab as cml ds = cml.load_dataset("era5-temperature", period=(1979, 1982), domain="France", time=12)
def test_numpy_1(): ds = cml.load_dataset("weather-bench") z500 = ds.to_xarray() z = z500.sel({"time": "1979-01-01"}).z.values cml.plot_map(z[0], metadata=z500.z)
def test_read_hc(): ds = cml.load_dataset("s2s-ai-competition", date="20200102", hindcast=True) xds = ds.to_xarray() print(xds)
def test_read_zarr(): ds = cml.load_dataset("s2s-ai-competition", version="0.1.7", format="zarr") xds = ds.to_xarray() print(xds)
def test_load_dataset_meteonet_sample_masks(): cml.load_dataset("meteonet-samples-masks", domain="SE")
import climetlab as cml data = cml.load_dataset("hurricane-database", "atlantic") print(data.home_page) irma = data.to_pandas(name="irma", year=2017) cml.plot_map(irma)