コード例 #1
0
def test_d3d(tmpdir, input_name):
    filename = (DATA_DIR / input_name).as_posix()
    # read meteo file
    df = pmeteo.Meteo(meteo_source=filename)

    rpath = str(tmpdir) + "/"
    # output to uvp files
    df.to_output(solver_name="d3d", rpath=rpath)

    # read again meteo
    m = pmodel.set(solver_name="d3d")

    p = m.from_force(rpath + "p.amp", "msl")
    u = m.from_force(rpath + "u.amu", "u10")
    v = m.from_force(rpath + "v.amv", "v10")

    dr = xr.merge([p, u, v])
    dr = dr.sortby("latitude", ascending=True)

    # compare
    df.Dataset = df.Dataset.sortby("latitude", ascending=True)

    assert np.abs(df.Dataset.msl.values - dr.msl.values).max() < 1e-3
    assert np.abs(df.Dataset.u10.values - dr.u10.values).max() < 1e-3
    assert np.abs(df.Dataset.v10.values - dr.v10.values).max() < 1e-3
コード例 #2
0
def test_d3d(tmpdir, dem_source, kwargs):

    ## lat,lon grid
    resolution = 0.1
    lon = np.arange(kwargs["lon_min"], kwargs["lon_max"], resolution)
    lat = np.arange(kwargs["lat_min"], kwargs["lat_max"], resolution)
    xp, yp = np.meshgrid(lon, lat)
    # update kwargs
    kwargs.update({"grid_x": xp, "grid_y": yp})

    # get dem
    df = pdem.Dem(dem_source=dem_source, **kwargs)
    # get dem on mesh
    df.Dataset = pdem.dem_on_mesh(df.Dataset, **kwargs)

    rpath = str(tmpdir) + "/"
    # output
    pdem.to_output(df.Dataset, solver_name="d3d", rpath=rpath)

    # read again dem
    m = pmodel.set(solver_name="d3d")
    rd = m.from_dep(rpath + "d3d.dep")

    # compare
    c1 = -rd.where(rd != -999)
    c2 = df.Dataset.ival.where(df.Dataset.ival < 0)

    assert c1.fillna(0).equals(c2.fillna(0)) is True
コード例 #3
0
ファイル: cast.py プロジェクト: brey/pyPoseidon-dev
    def run(self, **kwargs):

        if isinstance(self.model, str):
            self.model = pyposeidon.read_model(self.model)

        for attr, value in self.model.__dict__.items():
            if not hasattr(self, attr):
                setattr(self, attr, value)

        execute = get_value(self, kwargs, "execute", True)

        pwd = os.getcwd()

        files = [
            "bctides.in",
            "launchSchism.sh",
            "/sflux/sflux_inputs.txt",
            "/outputs/flux.out",
        ]
        files_sym = [
            "hgrid.gr3",
            "hgrid.ll",
            "manning.gr3",
            "vgrid.in",
            "drag.gr3",
            "rough.gr3",
            "station.in",
            "windrot_geo2proj.gr3",
        ]
        station_files = [
            "/outputs/staout_1",
            "/outputs/staout_2",
            "/outputs/staout_3",
            "/outputs/staout_4",
            "/outputs/staout_5",
            "/outputs/staout_6",
            "/outputs/staout_7",
            "/outputs/staout_8",
            "/outputs/staout_9",
        ]

        self.origin = self.model.rpath
        self.date0 = self.model.date

        if not os.path.exists(self.origin):
            sys.stdout.write("Initial folder not present {}\n".format(
                self.origin))
            sys.exit(1)

        ppath = self.ppath
        # create the new folder/run path
        rpath = self.cpath

        if not os.path.exists(rpath):
            os.makedirs(rpath)

        tag = kwargs.get("tag", "schism")
        copy2(ppath + self.tag + "_model.json", rpath)  # copy the info file

        # load model
        with open(rpath + self.tag + "_model.json", "rb") as f:
            info = pd.read_json(f, lines=True).T
            info[info.isnull().values] = None
            info = info.to_dict()[0]

        try:
            args = set(kwargs.keys()).intersection(
                info.keys())  # modify dic with kwargs
            for attr in list(args):
                info[attr] = kwargs[attr]
        except:
            pass

        info["config_file"] = ppath + "param.nml"

        # update the properties

        info["date"] = self.date0
        info["start_date"] = self.sdate
        info["time_frame"] = self.time_frame
        info["end_date"] = self.sdate + pd.to_timedelta(self.time_frame)
        info["meteo_source"] = self.meteo
        info["rpath"] = rpath

        m = pm.set(**info)

        # Mesh
        gfile = glob.glob(ppath + "hgrid.gr3")
        if gfile:
            info["mesh_file"] = gfile[0]
            self.mesh_file = gfile[0]
            info["mesh_generator"] = None
            self.mesh_generator = None

        m.mesh = pmesh.set(type="tri2d", **info)

        # get lat/lon from file
        if hasattr(self, "mesh_file"):
            info.update(
                {"lon_min": m.mesh.Dataset.SCHISM_hgrid_node_x.values.min()})
            info.update(
                {"lon_max": m.mesh.Dataset.SCHISM_hgrid_node_x.values.max()})
            info.update(
                {"lat_min": m.mesh.Dataset.SCHISM_hgrid_node_y.values.min()})
            info.update(
                {"lat_max": m.mesh.Dataset.SCHISM_hgrid_node_y.values.max()})

        # copy/link necessary files
        logger.debug("copy necessary files")

        for filename in files:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug(".. done")

        # copy the station files
        logger.debug("copy station files")
        for filename in station_files:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug(".. done")

        # symlink the big files
        logger.debug("symlink model files")
        for filename in files_sym:
            ipath = glob.glob(self.origin + filename)
            if ipath:
                try:
                    os.symlink(
                        pathlib.Path(ipath[0]).resolve(strict=True),
                        rpath + filename)
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        logger.warning("Restart link present\n")
                        logger.warning("overwriting\n")
                        os.remove(rpath + filename)
                        os.symlink(
                            pathlib.Path(ipath[0]).resolve(strict=True),
                            rpath + filename,
                        )
        logger.debug(".. done")

        # create restart file
        logger.debug("create restart file")

        # check for combine hotstart
        hotout = int((self.sdate - self.date0).total_seconds() /
                     info["params"]["core"]["dt"])
        logger.debug("hotout_it = {}".format(hotout))

        resfile = glob.glob(ppath +
                            "/outputs/hotstart_it={}.nc".format(hotout))
        if not resfile:
            # load model model from ppath
            with open(ppath + self.tag + "_model.json", "rb") as f:
                ph = pd.read_json(f, lines=True).T
                ph[ph.isnull().values] = None
                ph = ph.to_dict()[0]
            p = pm.set(**ph)
            p.hotstart(it=hotout)

        # link restart file
        inresfile = "/outputs/hotstart_it={}.nc".format(hotout)
        outresfile = "/hotstart.nc"

        logger.info("set restart\n")

        try:
            os.symlink(
                pathlib.Path(ppath + inresfile).resolve(strict=True),
                rpath + outresfile)
        except OSError as e:
            if e.errno == errno.EEXIST:
                logger.warning("Restart link present\n")
                logger.warning("overwriting\n")
                os.remove(rpath + outresfile)
                os.symlink(
                    pathlib.Path(ppath + inresfile).resolve(strict=True),
                    rpath + outresfile,
                )
            else:
                raise e

        # get new meteo

        logger.info("process meteo\n")

        flag = get_value(self, kwargs, "update", [])

        check = [
            os.path.exists(rpath + "sflux/" + f)
            for f in ["sflux_air_1.0001.nc"]
        ]

        if (np.any(check) == False) or ("meteo" in flag):

            m.force(**info)
            if hasattr(self, "meteo_split_by"):
                times, datasets = zip(*m.meteo.Dataset.groupby(
                    "time.{}".format(self.meteo_split_by)))
                mpaths = [
                    "sflux/sflux_air_1.{:04d}.nc".format(t + 1)
                    for t in np.arange(len(times))
                ]
                for das, mpath in list(zip(datasets, mpaths)):
                    m.to_force(
                        das,
                        vars=["msl", "u10", "v10"],
                        rpath=rpath,
                        filename=mpath,
                        date=self.date0,
                    )
            else:
                m.to_force(
                    m.meteo.Dataset,
                    vars=["msl", "u10", "v10"],
                    rpath=rpath,
                    date=self.date0,
                )

        else:
            logger.warning("meteo files present\n")

        # modify param file
        rnday_new = (self.sdate - self.date0).total_seconds() / (
            3600 * 24.0) + pd.to_timedelta(
                self.time_frame).total_seconds() / (3600 * 24.0)
        hotout_write = int(rnday_new * 24 * 3600 /
                           info["params"]["core"]["dt"])
        info["parameters"].update({
            "ihot": 2,
            "rnday": rnday_new,
            "start_hour": self.date0.hour,
            "start_day": self.date0.day,
            "start_month": self.date0.month,
            "start_year": self.date0.year,
        })

        m.config(output=True, **info)  # save param.nml

        m.config_file = rpath + "param.nml"

        m.save()

        if execute:
            m.run()

        logger.info("done for date :" + self.sdate.strftime("%Y%m%d.%H"))

        os.chdir(pwd)
コード例 #4
0
ファイル: cast.py プロジェクト: brey/pyPoseidon-dev
    def run(self, **kwargs):

        if isinstance(self.model, str):
            self.model = pyposeidon.read_model(self.model)

        for attr, value in self.model.__dict__.items():
            if not hasattr(self, attr):
                setattr(self, attr, value)

        execute = get_value(self, kwargs, "execute", False)

        pwd = os.getcwd()

        files = [
            self.tag + "_hydro.xml",
            self.tag + ".enc",
            self.tag + ".obs",
            self.tag + ".bnd",
            self.tag + ".bca",
            "run_flow2d3d.sh",
        ]
        files_sym = [self.tag + ".grd", self.tag + ".dep"]

        self.origin = self.model.rpath
        self.date0 = self.model.date

        if not os.path.exists(self.origin):
            sys.stdout.write("Initial folder not present {}\n".format(
                self.origin))
            sys.exit(1)

        ppath = self.ppath

        cf = [glob.glob(ppath + "/" + e) for e in files]
        cfiles = [item.split("/")[-1] for sublist in cf for item in sublist]

        # create the folder/run path

        rpath = self.cpath

        if not os.path.exists(rpath):
            os.makedirs(rpath)

        copy2(ppath + self.tag + "_model.json", rpath)  # copy the info file

        # load model
        with open(rpath + self.tag + "_model.json", "rb") as f:
            info = pd.read_json(f, lines=True).T
            info[info.isnull().values] = None
            info = info.to_dict()[0]

        try:
            args = set(kwargs.keys()).intersection(
                info.keys())  # modify dic with kwargs
            for attr in list(args):
                info[attr] = kwargs[attr]
        except:
            pass

        # update the properties
        info["date"] = self.date
        info["start_date"] = self.date
        info["time_frame"] = self.time_frame
        info["meteo_source"] = self.meteo
        info["rpath"] = rpath
        if self.restart_step:
            info["restart_step"] = self.restart_step

        m = pm.set(**info)

        # copy/link necessary files
        logger.debug("copy necessary files")

        for filename in cfiles:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug(".. done")

        # symlink the big files
        logger.debug("symlink model files")
        for filename in files_sym:
            ipath = glob.glob(self.origin + filename)
            if ipath:
                try:
                    os.symlink(
                        pathlib.Path(ipath[0]).resolve(strict=True),
                        rpath + filename)
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        logger.warning("Restart link present\n")
                        logger.warning("overwriting\n")
                        os.remove(rpath + filename)
                        os.symlink(
                            pathlib.Path(ipath[0]).resolve(strict=True),
                            rpath + filename,
                        )
        logger.debug(".. done")

        copy2(ppath + m.tag + ".mdf", rpath)  # copy the mdf file

        # copy restart file

        inresfile = "tri-rst." + m.tag + "." + datetime.datetime.strftime(
            self.date, "%Y%m%d.%H%M%M")

        outresfile = "restart." + datetime.datetime.strftime(
            self.date, "%Y%m%d.%H%M%M")

        #  copy2(ppath+inresfile,rpath+'tri-rst.'+outresfile)
        try:
            os.symlink(
                pathlib.Path(ppath + "/" + inresfile).resolve(strict=True),
                rpath + "tri-rst." + outresfile,
            )
            logger.debug("symlink {} to {}".format(
                ppath + "/" + inresfile, rpath + "tri-rst." + outresfile))
        except OSError as e:
            if e.errno == errno.EEXIST:
                logger.warning("Restart symlink present\n")
                logger.warning("overwriting\n")
                os.remove(rpath + "tri-rst." + outresfile)
                os.symlink(
                    pathlib.Path(ppath + "/" + inresfile).resolve(strict=True),
                    rpath + "tri-rst." + outresfile,
                )
            else:
                raise e

        # get new meteo

        logger.info("process meteo\n")

        flag = get_value(self, kwargs, "update", ["meteo"])

        check = [
            os.path.exists(rpath + f) for f in ["u.amu", "v.amv", "p.amp"]
        ]

        if (np.any(check) == False) or ("meteo" in flag):

            m.force()
            m.to_force(m.meteo.Dataset,
                       vars=["msl", "u10", "v10"],
                       rpath=rpath)  # write u,v,p files

        else:
            logger.info("meteo files present\n")

        # modify mdf file
        m.config(
            config_file=ppath + m.tag + ".mdf",
            config={"Restid": outresfile},
            output=True,
        )

        m.config_file = rpath + m.tag + ".mdf"

        os.chdir(rpath)
        m.save()

        if execute:
            m.run()

        # cleanup
        os.remove(rpath + "tri-rst." + outresfile)

        logger.info("done for date :" +
                    datetime.datetime.strftime(self.date, "%Y%m%d.%H"))

        os.chdir(pwd)
コード例 #5
0
ファイル: data.py プロジェクト: brey/pyPoseidon-dev
    def __init__(self, **kwargs):

        rpath = kwargs.get("rpath", "./schism/")

        folders = kwargs.get(
            "folders", None
        )  # [os.path.join(os.path.abspath(loc),name) for name in os.listdir(loc) if os.path.isdir(os.path.join(loc,name))]

        if folders:
            self.folders = folders
        else:
            self.folders = [rpath]

        datai = []

        tag = kwargs.get("tag", "schism")

        misc = kwargs.get("misc", {})

        for folder in self.folders:

            logger.info(" Combining output for folder {}\n".format(folder))

            xdat = glob.glob(folder + "/outputs/schout_[!0]*.nc")
            xdat.sort(key=lambda f: int("".join(filter(str.isdigit, f))))

            if len(xdat) > 0:
                datai.append(xdat)  # append to list

            else:  # run merge output

                with open(folder + "/" + tag + "_model.json", "r") as f:
                    info = pd.read_json(f, lines=True).T
                    info[info.isnull().values] = None
                    info = info.to_dict()[0]

                p = pm.set(**info)

                p.misc = misc

                p.results()

                self.misc = p.misc

                xdat = glob.glob(folder + "/outputs/schout_[!0]*.nc")
                xdat.sort(key=lambda f: int("".join(filter(str.isdigit, f))))

                datai.append(xdat)  # append to list

        merge = kwargs.get("merge", True)

        if merge:

            datai = flat_list(datai)
            self.Dataset = xr.open_mfdataset(datai,
                                             combine="by_coords",
                                             data_vars="minimal")

            with open(self.folders[-1] + "/" + tag + "_model.json", "r") as f:
                info = pd.read_json(f, lines=True).T
                info[info.isnull().values] = None
                info = info.to_dict()[0]

            p = pm.set(**info)

            if hasattr(p, "stations"):

                logger.info(" Retrieve station timeseries\n")

                dstamp = kwargs.get("dstamp", info["date"])

                p.get_station_data(dstamp=dstamp)
                self.time_series = p.time_series

        else:
            self.Dataset = [
                xr.open_mfdataset(x, combine="by_coords", data_vars="minimal")
                for x in datai
            ]

            ts = []

            for folder in self.folders:

                p = pm.read_model(folder +
                                  "/{}_model.json".format(tag))  # read model

                if hasattr(p, "stations"):

                    logger.info(" Retrieve station timeseries\n")

                    dstamp = kwargs.get("dstamp", p.date)

                    p.get_station_data(dstamp=dstamp)
                    ts.append(p.time_series)

            self.time_series = ts