Exemple #1
0
def test_meteo(tmpdir, name):
    rpath = str(tmpdir) + "/"
    d = pmeteo.Meteo(filename)
    d.to_output(solver_name="schism", rpath=rpath, meteo_split_by="day")
    d.to_output(solver_name="schism", rpath=rpath, filename="all.nc")

    # read schism meteo files
    files = glob.glob(rpath + "sflux/*.nc")
    files.sort()
    ma = []
    for ifile in files:
        g = xr.open_dataset(ifile)
        ts = "-".join(g.time.attrs["base_date"].astype(str)[:3])
        time_r = pd.to_datetime(ts)
        times = time_r + pd.to_timedelta(g.time.values, unit="D").round("H")
        g = g.assign_coords({"time": times})
        ma.append(g)

    b = xr.merge(ma)
    b.close()

    tlist = pd.to_datetime(b.time.data) - pd.to_datetime(
        b.time.data[0])  # convert to Schism's time coords
    tlist = tlist / pd.to_timedelta("1D")

    b = b.assign_coords({"time": tlist})

    al = xr.open_dataset(rpath + "all.nc")

    assert b.equals(al)
def test_d3d(tmpdir, input_name):
    filename = (DATA_DIR / input_name).as_posix()
    # read meteo file
    df = pmeteo.Meteo(meteo_source=filename)

    rpath = str(tmpdir) + "/"
    # output to uvp files
    df.to_output(solver_name="d3d", rpath=rpath)

    # read again meteo
    m = pmodel.set(solver_name="d3d")

    p = m.from_force(rpath + "p.amp", "msl")
    u = m.from_force(rpath + "u.amu", "u10")
    v = m.from_force(rpath + "v.amv", "v10")

    dr = xr.merge([p, u, v])
    dr = dr.sortby("latitude", ascending=True)

    # compare
    df.Dataset = df.Dataset.sortby("latitude", ascending=True)

    assert np.abs(df.Dataset.msl.values - dr.msl.values).max() < 1e-3
    assert np.abs(df.Dataset.u10.values - dr.u10.values).max() < 1e-3
    assert np.abs(df.Dataset.v10.values - dr.v10.values).max() < 1e-3
Exemple #3
0
    def force(self, **kwargs):

        meteo_source = get_value(self, kwargs, "meteo_source", None)

        kwargs.update({"meteo_source": meteo_source})

        flag = get_value(self, kwargs, "update", [])
        # check if files exist

        z = {**self.__dict__, **kwargs}  # merge self and possible kwargs

        if flag:
            if ("meteo" in flag) | ("all" in flag):
                self.meteo = pmeteo.Meteo(**z)
            else:
                logger.info("skipping meteo files ..\n")
        else:
            self.meteo = pmeteo.Meteo(**z)
Exemple #4
0
def test_meteo_url():
    geometry = {
        "lon_min": -25.0,  # lat/lon window
        "lon_max": -9.0,
        "lat_min": 56.0,
        "lat_max": 74.0,
    }
    cdate = pd.to_datetime("today") - pd.DateOffset(
        days=1)  # step back one day for availability.
    r = [0, 6, 12, 18]
    h = np.argmin([n for n in [cdate.hour - x for x in r] if n > 0])
    url = "https://nomads.ncep.noaa.gov/dods/gfs_0p25_1hr/gfs{}/gfs_0p25_1hr_{:0>2d}z".format(
        cdate.strftime("%Y%m%d"), r[h])
    meteo = pmeteo.Meteo(meteo_source=url, **geometry)
    assert isinstance(meteo.Dataset, xr.Dataset)
def test_merge_strategy_last(meteo_paths, meteo_datasets):
    # In strategy "last" we want:
    # - the first 12 hours of all the datasets
    # - the rest of the hours of the last one
    expected = xr.concat(
        [
            *[ds.isel(time=slice(0, 12)) for ds in meteo_datasets],
            meteo_datasets[-1].isel(time=slice(12, None)),
        ],
        dim="time",
    )
    merged = pm.Meteo(
        meteo_source=meteo_paths,
        meteo_combine_by="nested",
        meteo_merge="last",
        meteo_xr_kwargs={
            "concat_dim": "step"
        },
    ).Dataset
    assert merged.equals(expected)
def test_merge_strategy_first(meteo_paths, meteo_datasets):
    # In strategy "first" we want:
    # - the first 13 hours of the first meteo
    # - hours 1-13 of all subsequent meteos
    # - hours 13-end of the last meteo
    expected = xr.concat(
        [
            meteo_datasets[0].isel(time=slice(0, 13)),
            *[ds.isel(time=slice(1, 13)) for ds in meteo_datasets[1:]],
            meteo_datasets[-1].isel(time=slice(13, None)),
        ],
        dim="time",
    )
    merged = pm.Meteo(
        meteo_source=meteo_paths,
        meteo_combine_by="nested",
        meteo_merge="first",
        meteo_xr_kwargs={
            "concat_dim": "step"
        },
    ).Dataset
    assert merged.equals(expected)
def test_schism(tmpdir, input_name):
    filename = (DATA_DIR / input_name).as_posix()
    # read meteo file
    df = pmeteo.Meteo(meteo_source=filename)
    df.Dataset = df.Dataset.sortby("latitude", ascending=True)

    rpath = str(tmpdir) + "/"
    # output to uvp files
    df.to_output(solver_name="schism", rpath=rpath)

    # read again meteo
    path = rpath + "/sflux/"
    dr = xr.open_dataset(path + "/sflux_air_1.0001.nc")

    # cleanup
    #    try:
    #        shutil.rmtree(path)
    #    except OSError as e:
    #        print ("Error: %s - %s." % (e.filename, e.strerror))

    # compare
    assert np.array_equal(df.Dataset.msl.values, dr.prmsl.values)
    assert np.array_equal(df.Dataset.u10.values, dr.uwind.values)
    assert np.array_equal(df.Dataset.v10.values, dr.vwind.values)
Exemple #8
0
def test_meteo_empty():
    meteo = pmeteo.Meteo(meteo_source=None)
    assert meteo.Dataset == None
Exemple #9
0
def test_meteo_passthrough():
    original_meteo = pmeteo.Meteo(DATASET)
    new_meteo = pmeteo.Meteo(meteo_source=original_meteo.Dataset)
    assert new_meteo.Dataset.equals(original_meteo.Dataset)
Exemple #10
0
def test_meteo_returns_dataset(meteo_source):
    meteo = pmeteo.Meteo(meteo_source)
    assert isinstance(meteo, pmeteo.Meteo)
    assert isinstance(meteo.Dataset, xr.Dataset)
Exemple #11
0
def test_meteo_defaults():
    meteo = pmeteo.Meteo()
    assert meteo.Dataset == None
def schism(tmpdir):
    # initialize a model
    rpath = str(tmpdir) + "/schism/"
    case.update({"rpath": rpath + "20181001.00/"})  # use tmpdir for running the model

    b = pyposeidon.model.set(**case)

    b.execute()

    # creating a time sequence of the runs
    start_date = pd.to_datetime("2018-10-1 0:0:0")
    end_date = pd.to_datetime("2018-10-2 0:0:0")
    date_list = pd.date_range(start_date, end_date, freq="12H")

    # creating a sequence of folder to store the runs. In this case we name them after the date attribute.
    # NOTE that the first folder is the fisrt run already perfomed!!
    rpaths = [rpath + datetime.datetime.strftime(x, "%Y%m%d.%H") + "/" for x in date_list]

    # creating a sequence of folder from which we read the meteo.
    meteo = []
    for date in date_list:
        prev_date = pd.to_datetime(date) - pd.to_timedelta("12H")
        prev_date = prev_date.strftime(format="%Y-%m-%d %H:%M:%S")
        dr = pd.date_range(prev_date, date, freq="12H")
        names = ["uvp_" + datetime.datetime.strftime(x, "%Y%m%d%H") + ".grib" for x in dr]
        dur = [(DATA_DIR / name).as_posix() for name in names]
        meteo.append(dur)

    # set cast
    for l in range(len(rpaths) - 1):
        h = cast.set(
            solver_name="schism",
            model=b,
            ppath=rpaths[l],
            cpath=rpaths[l + 1],
            meteo=meteo[l + 1],
            sdate=date_list[l + 1],
        )
        h.run(execute=True)  # execute

    # Run check case - Total duration
    check.update({"rpath": rpath + "check/"})  # use tmpdir for running the model

    # Combine meteo appropriately

    m1 = pm.Meteo(meteo_source=METEO_FILES_2[0])
    m2 = pm.Meteo(meteo_source=METEO_FILES_2[1])
    m3 = pm.Meteo(meteo_source=METEO_FILES_2[2])
    m4 = pm.Meteo(meteo_source=METEO_FILES_2[3])

    # extract correct chunk

    w1 = m1.Dataset.isel(time=slice(0, 13))
    w2 = m2.Dataset.isel(time=slice(1, 13))  # note that we keep the 12 hour from the previous file
    w3 = m3.Dataset.isel(time=slice(1, 13))
    w4 = m4.Dataset.isel(time=slice(1, 13))

    # combine
    meteo = xr.combine_by_coords([w1, w2, w3, w4], combine_attrs="override")
    # saving
    check.update({"meteo_source": meteo})

    c = pyposeidon.model.set(**check)

    c.execute()

    # COMPARE
    output = data.get_output(folders=rpaths, solver_name="schism")

    total = data.get_output(folders=[rpath + "check/"], solver_name="schism")

    r = output.Dataset.isel(time=slice(0, 36))

    rb = []
    for var in total.Dataset.data_vars:
        if not total.Dataset[var].equals(r[var]):
            rb.append(var)

    print(rb)

    #    flag = True TODO
    #    for var in rb:
    #        flag = False
    #        mdif = np.abs(total.results.Dataset[var].values - output.results.Dataset[var].values).max()
    #        if mdif < 1.e-14 :
    #            flag = True
    #    print(mdif)

    if (rb == ["zcor"]) or rb == []:
        return True
    else:
        return False
def meteo_datasets(meteo_paths) -> List[pm.Meteo]:
    return [
        pm.Meteo(meteo_source=path.as_posix()).Dataset for path in meteo_paths
    ]