示例#1
0
def d3d(tmpdir,dic):
    #initialize a model
    rpath = str(tmpdir)+'/'
    dic.update({'rpath':rpath}) # use tmpdir for running the model
    b = pyposeidon.model(**dic)

    try:
        b.execute()
        out = data.data(**dic)
        a = pyposeidon.read_model(rpath+'d3d_model.json') # read model
        a.execute()
        out = data.data(**dic)
        return True
    except:
        return False
示例#2
0
def schism(tmpdir, dic):
    #initialize a model
    rpath = str(tmpdir) + '/'
    dic.update({'rpath': rpath})  # use tmpdir for running the model

    b = pyposeidon.model(**dic)

    try:
        b.execute()
        b.results()
        a = pyposeidon.read_model(rpath + 'test_model.json')  # read model
        a.execute()
        a.results()
        return True
    except:
        return False
示例#3
0
def create_features(path="./", tag="schism"):

    logger.info("create geojson file for elevation")
    m = pyposeidon.read_model(path + tag + "_model.json")
    m.get_data()

    d = m.data.Dataset

    vmin = d.elev.min().compute()
    vmax = d.elev.max().compute()

    x = d.SCHISM_hgrid_node_x.values
    y = d.SCHISM_hgrid_node_y.values
    tri = d.SCHISM_hgrid_face_nodes.values

    nodes = pd.DataFrame({"lon": x, "lat": y})
    tria = pd.DataFrame(tri, columns=["a", "b", "c"])

    tria["ap"] = tria.apply(lambda x: nodes.loc[x.a, ["lon", "lat"]].values,
                            axis=1)
    tria["bp"] = tria.apply(lambda x: nodes.loc[x.b, ["lon", "lat"]].values,
                            axis=1)
    tria["cp"] = tria.apply(lambda x: nodes.loc[x.c, ["lon", "lat"]].values,
                            axis=1)

    tria["geometry"] = tria.apply(
        lambda x: shapely.geometry.Polygon([x.ap, x.bp, x.cp]), axis=1)

    colormap = branca.colormap.LinearColormap(["green", "yellow", "red"],
                                              vmin=vmin.values,
                                              vmax=vmax.values)
    colormap.caption = "Elevation"

    # geopandas
    gf_ = gp.GeoDataFrame(tria, crs={"init": "epsg:4326"})

    gf_ = gf_.drop(["a", "b", "c", "ap", "bp", "cp"], axis=1)

    ## All frames
    fs = []
    for l in range(d.time.shape[0]):
        fr = d.elev.isel(time=l)
        a = fr.sel(nSCHISM_hgrid_node=tria.a.to_list()).values
        b = fr.sel(nSCHISM_hgrid_node=tria.b.to_list()).values
        c = fr.sel(nSCHISM_hgrid_node=tria.c.to_list()).values
        tria["value"] = np.mean([a, b, c], axis=0)
        tria["color"] = [colormap(x) for x in tria.value.to_list()]
        fs.append(
            create_geojson_features(
                tria, d.time[l].values.astype(str).split(".")[0]))

    tf = [j for i in fs for j in i]

    if not os.path.exists(path + "server"):
        os.makedirs(path + "server")

    json.dump(tf, open(path + "server/anim.json", "w"))

    gf_.to_file(path + "server/grid.geojson", driver="GeoJSON")

    logger.info("... saved")

    return
示例#4
0
def create_features(path='./', tag='schism'):

    logger.info('create geojson file for elevation')
    m = pyposeidon.read_model(path + tag + '_model.json')
    m.get_data()

    d = m.data.Dataset

    vmin = d.elev.min().compute()
    vmax = d.elev.max().compute()

    x = d.SCHISM_hgrid_node_x.values
    y = d.SCHISM_hgrid_node_y.values
    tri = d.SCHISM_hgrid_face_nodes.values

    nodes = pd.DataFrame({'lon': x, 'lat': y})
    tria = pd.DataFrame(tri, columns=['a', 'b', 'c'])

    tria['ap'] = tria.apply(lambda x: nodes.loc[x.a, ['lon', 'lat']].values,
                            axis=1)
    tria['bp'] = tria.apply(lambda x: nodes.loc[x.b, ['lon', 'lat']].values,
                            axis=1)
    tria['cp'] = tria.apply(lambda x: nodes.loc[x.c, ['lon', 'lat']].values,
                            axis=1)

    tria['geometry'] = tria.apply(
        lambda x: shapely.geometry.Polygon([x.ap, x.bp, x.cp]), axis=1)

    colormap = branca.colormap.LinearColormap(['green', 'yellow', 'red'],
                                              vmin=vmin.values,
                                              vmax=vmax.values)
    colormap.caption = 'Elevation'

    # geopandas
    gf_ = gp.GeoDataFrame(tria, crs={'init': 'epsg:4326'})

    gf_ = gf_.drop(['a', 'b', 'c', 'ap', 'bp', 'cp'], axis=1)

    ## All frames
    fs = []
    for l in range(d.time.shape[0]):
        fr = d.elev.isel(time=l)
        a = fr.sel(nSCHISM_hgrid_node=tria.a.to_list()).values
        b = fr.sel(nSCHISM_hgrid_node=tria.b.to_list()).values
        c = fr.sel(nSCHISM_hgrid_node=tria.c.to_list()).values
        tria['value'] = np.mean([a, b, c], axis=0)
        tria['color'] = [colormap(x) for x in tria.value.to_list()]
        fs.append(
            create_geojson_features(
                tria, d.time[l].values.astype(str).split('.')[0]))

    tf = [j for i in fs for j in i]

    if not os.path.exists(path + 'server'):
        os.makedirs(path + 'server')

    json.dump(tf, open(path + 'server/anim.json', 'w'))

    gf_.to_file(path + 'server/grid.geojson', driver='GeoJSON')

    logger.info('... saved')

    return
示例#5
0
    def run(self, **kwargs):

        if isinstance(self.model, str):
            self.model = pyposeidon.read_model(self.model)

        for attr, value in self.model.__dict__.items():
            if not hasattr(self, attr):
                setattr(self, attr, value)

        execute = get_value(self, kwargs, "execute", False)

        pwd = os.getcwd()

        files = [
            self.tag + "_hydro.xml",
            self.tag + ".enc",
            self.tag + ".obs",
            self.tag + ".bnd",
            self.tag + ".bca",
            "run_flow2d3d.sh",
        ]
        files_sym = [self.tag + ".grd", self.tag + ".dep"]

        self.origin = self.model.rpath
        self.date0 = self.model.date

        if not os.path.exists(self.origin):
            sys.stdout.write("Initial folder not present {}\n".format(
                self.origin))
            sys.exit(1)

        ppath = self.ppath

        cf = [glob.glob(ppath + "/" + e) for e in files]
        cfiles = [item.split("/")[-1] for sublist in cf for item in sublist]

        # create the folder/run path

        rpath = self.cpath

        if not os.path.exists(rpath):
            os.makedirs(rpath)

        copy2(ppath + self.tag + "_model.json", rpath)  # copy the info file

        # load model
        with open(rpath + self.tag + "_model.json", "rb") as f:
            info = pd.read_json(f, lines=True).T
            info[info.isnull().values] = None
            info = info.to_dict()[0]

        try:
            args = set(kwargs.keys()).intersection(
                info.keys())  # modify dic with kwargs
            for attr in list(args):
                info[attr] = kwargs[attr]
        except:
            pass

        # update the properties
        info["date"] = self.date
        info["start_date"] = self.date
        info["time_frame"] = self.time_frame
        info["meteo_source"] = self.meteo
        info["rpath"] = rpath
        if self.restart_step:
            info["restart_step"] = self.restart_step

        m = pm.set(**info)

        # copy/link necessary files
        logger.debug("copy necessary files")

        for filename in cfiles:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug(".. done")

        # symlink the big files
        logger.debug("symlink model files")
        for filename in files_sym:
            ipath = glob.glob(self.origin + filename)
            if ipath:
                try:
                    os.symlink(
                        pathlib.Path(ipath[0]).resolve(strict=True),
                        rpath + filename)
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        logger.warning("Restart link present\n")
                        logger.warning("overwriting\n")
                        os.remove(rpath + filename)
                        os.symlink(
                            pathlib.Path(ipath[0]).resolve(strict=True),
                            rpath + filename,
                        )
        logger.debug(".. done")

        copy2(ppath + m.tag + ".mdf", rpath)  # copy the mdf file

        # copy restart file

        inresfile = "tri-rst." + m.tag + "." + datetime.datetime.strftime(
            self.date, "%Y%m%d.%H%M%M")

        outresfile = "restart." + datetime.datetime.strftime(
            self.date, "%Y%m%d.%H%M%M")

        #  copy2(ppath+inresfile,rpath+'tri-rst.'+outresfile)
        try:
            os.symlink(
                pathlib.Path(ppath + "/" + inresfile).resolve(strict=True),
                rpath + "tri-rst." + outresfile,
            )
            logger.debug("symlink {} to {}".format(
                ppath + "/" + inresfile, rpath + "tri-rst." + outresfile))
        except OSError as e:
            if e.errno == errno.EEXIST:
                logger.warning("Restart symlink present\n")
                logger.warning("overwriting\n")
                os.remove(rpath + "tri-rst." + outresfile)
                os.symlink(
                    pathlib.Path(ppath + "/" + inresfile).resolve(strict=True),
                    rpath + "tri-rst." + outresfile,
                )
            else:
                raise e

        # get new meteo

        logger.info("process meteo\n")

        flag = get_value(self, kwargs, "update", ["meteo"])

        check = [
            os.path.exists(rpath + f) for f in ["u.amu", "v.amv", "p.amp"]
        ]

        if (np.any(check) == False) or ("meteo" in flag):

            m.force()
            m.to_force(m.meteo.Dataset,
                       vars=["msl", "u10", "v10"],
                       rpath=rpath)  # write u,v,p files

        else:
            logger.info("meteo files present\n")

        # modify mdf file
        m.config(
            config_file=ppath + m.tag + ".mdf",
            config={"Restid": outresfile},
            output=True,
        )

        m.config_file = rpath + m.tag + ".mdf"

        os.chdir(rpath)
        m.save()

        if execute:
            m.run()

        # cleanup
        os.remove(rpath + "tri-rst." + outresfile)

        logger.info("done for date :" +
                    datetime.datetime.strftime(self.date, "%Y%m%d.%H"))

        os.chdir(pwd)
示例#6
0
    def run(self, **kwargs):

        if isinstance(self.model, str):
            self.model = pyposeidon.read_model(self.model)

        for attr, value in self.model.__dict__.items():
            if not hasattr(self, attr):
                setattr(self, attr, value)

        execute = get_value(self, kwargs, "execute", True)

        pwd = os.getcwd()

        files = [
            "bctides.in",
            "launchSchism.sh",
            "/sflux/sflux_inputs.txt",
            "/outputs/flux.out",
        ]
        files_sym = [
            "hgrid.gr3",
            "hgrid.ll",
            "manning.gr3",
            "vgrid.in",
            "drag.gr3",
            "rough.gr3",
            "station.in",
            "windrot_geo2proj.gr3",
        ]
        station_files = [
            "/outputs/staout_1",
            "/outputs/staout_2",
            "/outputs/staout_3",
            "/outputs/staout_4",
            "/outputs/staout_5",
            "/outputs/staout_6",
            "/outputs/staout_7",
            "/outputs/staout_8",
            "/outputs/staout_9",
        ]

        self.origin = self.model.rpath
        self.date0 = self.model.date

        if not os.path.exists(self.origin):
            sys.stdout.write("Initial folder not present {}\n".format(
                self.origin))
            sys.exit(1)

        ppath = self.ppath
        # create the new folder/run path
        rpath = self.cpath

        if not os.path.exists(rpath):
            os.makedirs(rpath)

        tag = kwargs.get("tag", "schism")
        copy2(ppath + self.tag + "_model.json", rpath)  # copy the info file

        # load model
        with open(rpath + self.tag + "_model.json", "rb") as f:
            info = pd.read_json(f, lines=True).T
            info[info.isnull().values] = None
            info = info.to_dict()[0]

        try:
            args = set(kwargs.keys()).intersection(
                info.keys())  # modify dic with kwargs
            for attr in list(args):
                info[attr] = kwargs[attr]
        except:
            pass

        info["config_file"] = ppath + "param.nml"

        # update the properties

        info["date"] = self.date0
        info["start_date"] = self.sdate
        info["time_frame"] = self.time_frame
        info["end_date"] = self.sdate + pd.to_timedelta(self.time_frame)
        info["meteo_source"] = self.meteo
        info["rpath"] = rpath

        m = pm.set(**info)

        # Mesh
        gfile = glob.glob(ppath + "hgrid.gr3")
        if gfile:
            info["mesh_file"] = gfile[0]
            self.mesh_file = gfile[0]
            info["mesh_generator"] = None
            self.mesh_generator = None

        m.mesh = pmesh.set(type="tri2d", **info)

        # get lat/lon from file
        if hasattr(self, "mesh_file"):
            info.update(
                {"lon_min": m.mesh.Dataset.SCHISM_hgrid_node_x.values.min()})
            info.update(
                {"lon_max": m.mesh.Dataset.SCHISM_hgrid_node_x.values.max()})
            info.update(
                {"lat_min": m.mesh.Dataset.SCHISM_hgrid_node_y.values.min()})
            info.update(
                {"lat_max": m.mesh.Dataset.SCHISM_hgrid_node_y.values.max()})

        # copy/link necessary files
        logger.debug("copy necessary files")

        for filename in files:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug(".. done")

        # copy the station files
        logger.debug("copy station files")
        for filename in station_files:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug(".. done")

        # symlink the big files
        logger.debug("symlink model files")
        for filename in files_sym:
            ipath = glob.glob(self.origin + filename)
            if ipath:
                try:
                    os.symlink(
                        pathlib.Path(ipath[0]).resolve(strict=True),
                        rpath + filename)
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        logger.warning("Restart link present\n")
                        logger.warning("overwriting\n")
                        os.remove(rpath + filename)
                        os.symlink(
                            pathlib.Path(ipath[0]).resolve(strict=True),
                            rpath + filename,
                        )
        logger.debug(".. done")

        # create restart file
        logger.debug("create restart file")

        # check for combine hotstart
        hotout = int((self.sdate - self.date0).total_seconds() /
                     info["params"]["core"]["dt"])
        logger.debug("hotout_it = {}".format(hotout))

        resfile = glob.glob(ppath +
                            "/outputs/hotstart_it={}.nc".format(hotout))
        if not resfile:
            # load model model from ppath
            with open(ppath + self.tag + "_model.json", "rb") as f:
                ph = pd.read_json(f, lines=True).T
                ph[ph.isnull().values] = None
                ph = ph.to_dict()[0]
            p = pm.set(**ph)
            p.hotstart(it=hotout)

        # link restart file
        inresfile = "/outputs/hotstart_it={}.nc".format(hotout)
        outresfile = "/hotstart.nc"

        logger.info("set restart\n")

        try:
            os.symlink(
                pathlib.Path(ppath + inresfile).resolve(strict=True),
                rpath + outresfile)
        except OSError as e:
            if e.errno == errno.EEXIST:
                logger.warning("Restart link present\n")
                logger.warning("overwriting\n")
                os.remove(rpath + outresfile)
                os.symlink(
                    pathlib.Path(ppath + inresfile).resolve(strict=True),
                    rpath + outresfile,
                )
            else:
                raise e

        # get new meteo

        logger.info("process meteo\n")

        flag = get_value(self, kwargs, "update", [])

        check = [
            os.path.exists(rpath + "sflux/" + f)
            for f in ["sflux_air_1.0001.nc"]
        ]

        if (np.any(check) == False) or ("meteo" in flag):

            m.force(**info)
            if hasattr(self, "meteo_split_by"):
                times, datasets = zip(*m.meteo.Dataset.groupby(
                    "time.{}".format(self.meteo_split_by)))
                mpaths = [
                    "sflux/sflux_air_1.{:04d}.nc".format(t + 1)
                    for t in np.arange(len(times))
                ]
                for das, mpath in list(zip(datasets, mpaths)):
                    m.to_force(
                        das,
                        vars=["msl", "u10", "v10"],
                        rpath=rpath,
                        filename=mpath,
                        date=self.date0,
                    )
            else:
                m.to_force(
                    m.meteo.Dataset,
                    vars=["msl", "u10", "v10"],
                    rpath=rpath,
                    date=self.date0,
                )

        else:
            logger.warning("meteo files present\n")

        # modify param file
        rnday_new = (self.sdate - self.date0).total_seconds() / (
            3600 * 24.0) + pd.to_timedelta(
                self.time_frame).total_seconds() / (3600 * 24.0)
        hotout_write = int(rnday_new * 24 * 3600 /
                           info["params"]["core"]["dt"])
        info["parameters"].update({
            "ihot": 2,
            "rnday": rnday_new,
            "start_hour": self.date0.hour,
            "start_day": self.date0.day,
            "start_month": self.date0.month,
            "start_year": self.date0.year,
        })

        m.config(output=True, **info)  # save param.nml

        m.config_file = rpath + "param.nml"

        m.save()

        if execute:
            m.run()

        logger.info("done for date :" + self.sdate.strftime("%Y%m%d.%H"))

        os.chdir(pwd)