Ejemplo n.º 1
0
def test_Dataset(test_file):
    dataset, request, key_count = TEST_FILES[test_file]
    path = cdscommon.ensure_data(dataset,
                                 request,
                                 name="cds-" + test_file + "-{uuid}.grib")

    res = cfgrib.xarray_store.open_dataset(path)
    res.to_netcdf(path[:-5] + ".nc")
Ejemplo n.º 2
0
def test_reanalysis_Dataset(test_file):
    dataset, request, key_count = TEST_FILES[test_file]
    path = cdscommon.ensure_data(dataset,
                                 request,
                                 name='cds-' + test_file + '-{uuid}.grib')

    res = cfgrib.xarray_store.open_dataset(path)
    res.to_netcdf(path[:-5] + '.nc')
def test_reanalysis_Stream(test_file):
    dataset, request, key_count = TEST_FILES[test_file]
    path = cdscommon.ensure_data(dataset, request, name=test_file + '{ext}')

    stream = cfgrib.Stream(path)
    leader = stream.first()
    assert len(leader) == key_count
    assert sum(1 for _ in stream) == leader['count']
Ejemplo n.º 4
0
def test_Stream(test_file):
    dataset, request, key_count = TEST_FILES[test_file]
    path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')

    stream = cfgrib.FileStream(path)
    leader = stream.first()
    assert len(leader) == key_count
    assert sum(1 for _ in stream) == leader['count']
Ejemplo n.º 5
0
def test_large_Dataset():
    dataset, request, key_count = TEST_FILES['seasonal-original-pressure-levels-ecmwf']
    # make the request large
    request['leadtime_hour'] = list(range(720, 1445, 24))
    request['grid'] = ['1', '1']
    path = cdscommon.ensure_data(dataset, request, name='cds-' + dataset + '-LARGE-{uuid}.grib')

    res = cfgrib.xarray_store.open_dataset(path)
    res.to_netcdf(path[:-5] + '.nc')
Ejemplo n.º 6
0
def test_large_Dataset():
    dataset, request, key_count = TEST_FILES["era5-pressure-levels-ensemble_members"]
    # make the request large
    request["day"] = list(range(1, 32))
    request["time"] = list(["%02d:00" % h for h in range(0, 24, 3)])
    path = cdscommon.ensure_data(dataset, request, name="cds-" + dataset + "-LARGE-{uuid}.grib")

    res = cfgrib.xarray_store.open_dataset(path)
    res.to_netcdf(path[:-5] + ".nc")
Ejemplo n.º 7
0
def test_reanalysis_Stream(test_file):
    dataset, request, key_count = TEST_FILES[test_file]
    path = cdscommon.ensure_data(dataset,
                                 request,
                                 name="cds-" + test_file + "-{uuid}.grib")

    stream = cfgrib.FileStream(path)
    leader = stream.first()
    assert len(leader) == key_count
    assert sum(1 for _ in stream) == leader["count"]
Ejemplo n.º 8
0
def test_large_Dataset():
    dataset, request, key_count = TEST_FILES[
        'era5-pressure-levels-ensemble_members']
    # make the request large
    request['day'] = list(range(1, 32))
    request['time'] = list(['%02d:00' % h for h in range(0, 24, 3)])
    path = cdscommon.ensure_data(dataset,
                                 request,
                                 name='cds-' + dataset + '-LARGE-{uuid}.grib')

    res = cfgrib.xarray_store.open_dataset(path)
    res.to_netcdf(path[:-5] + '.nc')
Ejemplo n.º 9
0
def test_large_Dataset():
    dataset, request, key_count = TEST_FILES[
        "seasonal-original-pressure-levels-ecmwf"]
    # make the request large
    request["leadtime_hour"] = list(range(720, 1445, 24))
    request["grid"] = ["1", "1"]
    path = cdscommon.ensure_data(dataset,
                                 request,
                                 name="cds-" + dataset + "-LARGE-{uuid}.grib")

    res = cfgrib.xarray_store.open_dataset(path)
    res.to_netcdf(path[:-5] + ".nc")