Example #1
0
def schism(tmpdir, kwargs):

    grid = pgrid.grid(type='tri2d',
                      grid_file=(DATA_DIR / 'hgrid.gr3').as_posix())

    #update dem

    xp = grid.Dataset.SCHISM_hgrid_node_x.values
    yp = grid.Dataset.SCHISM_hgrid_node_y.values

    kwargs.update({'grid_x': xp, 'grid_y': yp})
    #get dem
    df = pdem.dem(**kwargs)

    grid.Dataset['depth'].loc[:] = -df.Dataset.ival.values

    filename_ = str(tmpdir.join('hgrid_.gr3'))
    #output to grid file
    grid.to_file(filename_)

    #read again new grid
    grid_ = pgrid.grid(type='tri2d', grid_file=filename_)

    #compare
    return grid.Dataset.equals(grid_.Dataset)
Example #2
0
def test_answer(tmpdir):

    rpath = str(tmpdir) + "/"

    df = pg.grid(type="tri2d", grid_file=GRID_FILE)

    df.to_file(rpath + "test.gr3")

    dh = pg.grid(type="tri2d", grid_file=rpath + "test.gr3")

    assert df.Dataset.equals(dh.Dataset)
Example #3
0
def test_answer(tmpdir):

    rpath = str(tmpdir) + '/'

    df = pg.grid(type='tri2d', grid_file=GRID_FILE)

    df.to_file(rpath + 'test.gr3')

    dh = pg.grid(type='tri2d', grid_file=rpath + 'test.gr3')

    assert df.Dataset.equals(dh.Dataset)
Example #4
0
    def create(self, **kwargs):

        if not kwargs: kwargs = self.__dict__.copy()

        # Grid
        self.grid = pgrid.grid(type='r2d', **kwargs)

        # set lat/lon from file
        if hasattr(self, 'grid_file'):
            kwargs.update({'lon_min': self.grid.Dataset.x.values.min()})
            kwargs.update({'lon_max': self.grid.Dataset.x.values.max()})
            kwargs.update({'lat_min': self.grid.Dataset.y.values.min()})
            kwargs.update({'lat_max': self.grid.Dataset.y.values.max()})

        nj, ni = self.grid.Dataset.lons.shape
        self.nj, self.ni = nj, ni

        kwargs.update({'ni': ni, 'nj': nj})

        # get bathymetry
        self.bath(**kwargs)

        # get boundaries
        self.bc()

        #get meteo
        if self.atm: self.force(**kwargs)

        #get tide
        if self.tide: self.tidebc()

        self.config(**kwargs)
Example #5
0
    def read_folder(self, rfolder, **kwargs):

        gfile = glob.glob(rfolder + '/*.grd')  # Grid
        dfile = glob.glob(rfolder + '/*.dep')  # bathymetry
        u = glob.glob(rfolder + '/*.amu')  # meteo
        v = glob.glob(rfolder + '/*.amv')
        p = glob.glob(rfolder + '/*.amp')

        #config
        self.mdf = pd.read_csv(d[0], sep='=')
        self.mdf = self.mdf.set_index(self.mdf.columns[0])  # set index
        #grid
        self.grid = pgrid.grid('r2d', grid_file=gfile[0])
        #bath
        self.dem.Dataset = d3d.from_dep(dfile[0])
        #meteo
        mf = []
        mf.append(d3d.from_force(u[0], 'u10'))
        mf.append(d3d.from_force(v[0], 'v10'))
        mf.append(d3d.from_force(p[0], 'msl'))
        self.meteo.Dataset = xr.merge(mf)

        #---------------------------------------------------------------------
        logger.exception('folder incomplete. Abort\n')
        sys.exit(1)
Example #6
0
def test_d3d_grid(tmpdir, natural_earth, dem_source, window):
    grid = pg.grid(type="r2d", geometry=window, resolution=0.1, rpath=str(tmpdir) + "/")
    gr = grid.Dataset
    xp, yp = gr.lons, gr.lats
    df = pdem.dem(**window, grid_x=xp, grid_y=yp, dem_source=dem_source)
    df.adjust(natural_earth)
    assert np.isnan(df.Dataset.fval.values).sum() == 0
Example #7
0
def test_schism_grid(tmpdir, natural_earth, dem_source, window):
    grid = pg.grid(type="tri2d", geometry=window, coastlines=natural_earth, rpath=str(tmpdir) + "/")
    xg = grid.Dataset.SCHISM_hgrid_node_x.values
    yg = grid.Dataset.SCHISM_hgrid_node_y.values
    df = pdem.dem(**window, grid_x=xg, grid_y=yg, dem_source=dem_source)  # get dem
    df.adjust(natural_earth)
    assert np.isnan(df.Dataset.fval.values).sum() == 0
Example #8
0
def test_answer(tmpdir, coast):

    df = pg.grid(type="tri2d", geometry="global", coastlines=coast, rpath=str(tmpdir) + "/")

    check = np.isnan(df.Dataset.depth.values).sum() == 0

    assert check == True
Example #9
0
    def read_folder(self, rfolder, **kwargs):

        gfile = glob.glob(rfolder + "/*.grd")  # Grid
        dfile = glob.glob(rfolder + "/*.dep")  # bathymetry
        u = glob.glob(rfolder + "/*.amu")  # meteo
        v = glob.glob(rfolder + "/*.amv")
        p = glob.glob(rfolder + "/*.amp")

        # config
        self.mdf = pd.read_csv(d[0], sep="=")
        self.mdf = self.mdf.set_index(self.mdf.columns[0])  # set index
        # grid
        self.grid = pgrid.grid("r2d", grid_file=gfile[0])
        # bath
        self.dem.Dataset = d3d.from_dep(dfile[0])
        # meteo
        mf = []
        mf.append(d3d.from_force(u[0], "u10"))
        mf.append(d3d.from_force(v[0], "v10"))
        mf.append(d3d.from_force(p[0], "msl"))
        self.meteo.Dataset = xr.merge(mf)

        # ---------------------------------------------------------------------
        logger.exception("folder incomplete. Abort\n")
        sys.exit(1)
def func(tmpdir, name):

    filename = DATA_DIR / name
    # read grid file
    grid = pgrid.grid(type="tri2d", grid_file=filename)

    filename_ = str(tmpdir.join("hgrid_.gr3"))
    # output to grid file
    grid.to_file(filename_)

    # read again new grid
    grid_ = pgrid.grid(type="tri2d", grid_file=filename_)

    # cleanup
    os.remove(filename_)

    # compare
    return grid.Dataset.equals(grid_.Dataset)
Example #11
0
def test_answer(tmpdir, window, coast):

    df = pg.grid(type='tri2d',
                 geometry=window,
                 coastlines=coast,
                 rpath=str(tmpdir) + '/')

    check = np.isnan(df.Dataset.depth.values).sum() == 0
    assert check == True
Example #12
0
def test_d3d_grid(tmpdir, dic):

    grid = pg.grid(type="r2d",
                   geometry=dic,
                   resolution=0.1,
                   rpath=str(tmpdir) + "/")
    gr = grid.Dataset
    xp, yp = gr.lons, gr.lats

    # get dem
    df = pdem.dem(**dic, grid_x=xp, grid_y=yp, dem_source=DEM_SOURCE)
    df.adjust(natural_earth)

    assert np.isnan(df.Dataset.fval.values).sum() == 0
Example #13
0
    def set(self, **kwargs):

        if isinstance(self.model, str):
            self.model = pyposeidon.read_model(self.model)

        for attr, value in self.model.__dict__.items():
            if not hasattr(self, attr):
                setattr(self, attr, value)

        execute = get_value(self, kwargs, "execute", False)

        pwd = os.getcwd()

        files = ["bctides.in", "launchSchism.sh", "/sflux/sflux_inputs.txt", "/outputs/flux.out"]
        files_sym = [
            "hgrid.gr3",
            "hgrid.ll",
            "manning.gr3",
            "vgrid.in",
            "drag.gr3",
            "rough.gr3",
            "station.in",
            "windrot_geo2proj.gr3",
        ]
        station_files = [
            "/outputs/staout_1",
            "/outputs/staout_2",
            "/outputs/staout_3",
            "/outputs/staout_4",
            "/outputs/staout_5",
            "/outputs/staout_6",
            "/outputs/staout_7",
            "/outputs/staout_8",
            "/outputs/staout_9",
        ]

        self.origin = self.model.rpath
        self.date0 = self.model.date

        if not os.path.exists(self.origin):
            sys.stdout.write("Initial folder not present {}\n".format(self.origin))
            sys.exit(1)

        ppath = self.ppath
        # create the folder/run path
        rpath = self.cpath

        if not os.path.exists(rpath):
            os.makedirs(rpath)

        copy2(ppath + self.tag + "_model.json", rpath)  # copy the info file

        # load model
        with open(rpath + self.tag + "_model.json", "rb") as f:
            info = pd.read_json(f, lines=True).T
            info[info.isnull().values] = None
            info = info.to_dict()[0]

        args = set(kwargs.keys()).intersection(info.keys())  # modify dic with kwargs
        for attr in list(args):
            info[attr] = kwargs[attr]

        info["config_file"] = ppath + "param.nml"

        # update the properties

        info["start_date"] = self.date
        info["time_frame"] = self.time_frame
        info["end_date"] = self.date + pd.to_timedelta(self.time_frame)
        info["meteo_source"] = self.meteo
        info["rpath"] = rpath
        #            info['grid_file'] = ppath + '/hgrid.gr3'

        #            for attr, value in self.items():
        #                setattr(info, attr, value)

        m = pmodel(**info)

        # Grid
        m.grid = pgrid.grid(type="tri2d", **info)

        # get lat/lon from file
        if hasattr(self, "grid_file"):
            info.update({"lon_min": m.grid.Dataset.SCHISM_hgrid_node_x.values.min()})
            info.update({"lon_max": m.grid.Dataset.SCHISM_hgrid_node_x.values.max()})
            info.update({"lat_min": m.grid.Dataset.SCHISM_hgrid_node_y.values.min()})
            info.update({"lat_max": m.grid.Dataset.SCHISM_hgrid_node_y.values.max()})

        # copy/link necessary files
        logger.debug("copy necessary files")

        for filename in files:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug(".. done")

        # copy the station files
        logger.debug("copy station files")
        for filename in station_files:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug(".. done")

        # symlink the station files
        # logger.debug('symlink station files')
        # for filename in station_files:

        #   ipath = glob.glob(self.path+self.folders[0] + filename)
        #   if ipath:

        #        if not os.path.exists(rpath + '/outputs/'):
        #            os.makedirs(rpath + '/outputs/')

        #        try:
        #            os.symlink(ipath[0],rpath + filename)
        #        except OSError as e:
        #            if e.errno == errno.EEXIST:
        #                logger.warning('Restart link present\n')
        #                logger.warning('overwriting\n')
        #                os.remove(rpath + filename)
        #                os.symlink(ipath[0],rpath + filename)
        # logger.debug('.. done')

        # symlink the big files
        logger.debug("symlink model files")
        for filename in files_sym:
            ipath = glob.glob(self.origin + filename)
            if ipath:
                try:
                    os.symlink(ipath[0], rpath + filename)
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        logger.warning("Restart link present\n")
                        logger.warning("overwriting\n")
                        os.remove(rpath + filename)
                        os.symlink(ipath[0], rpath + filename)
        logger.debug(".. done")

        # create restart file
        logger.debug("create restart file")

        # check for combine hotstart
        hotout = int((self.date - self.date0).total_seconds() / info["params"]["core"]["dt"])
        logger.debug("hotout_it = {}".format(hotout))

        resfile = glob.glob(ppath + "/outputs/hotstart_it={}.nc".format(hotout))
        if not resfile:
            # load model model from ppath
            with open(ppath + self.tag + "_model.json", "rb") as f:
                ph = pd.read_json(f, lines=True).T
                ph[ph.isnull().values] = None
                ph = ph.to_dict()[0]
            p = pmodel(**ph)
            p.hotstart(it=hotout)

        # link restart file
        inresfile = "/outputs/hotstart_it={}.nc".format(hotout)
        outresfile = "/hotstart.nc"

        logger.info("set restart\n")

        try:
            os.symlink(ppath + inresfile, rpath + outresfile)
        except OSError as e:
            if e.errno == errno.EEXIST:
                logger.warning("Restart link present\n")
                logger.warning("overwriting\n")
                os.remove(rpath + outresfile)
                os.symlink(ppath + inresfile, rpath + outresfile)
            else:
                raise e

        # get new meteo

        logger.info("process meteo\n")

        flag = get_value(self, kwargs, "update", [])

        check = [os.path.exists(rpath + "sflux/" + f) for f in ["sflux_air_1.0001.nc"]]

        if (np.any(check) == False) or ("meteo" in flag):

            m.force(**info)
            if hasattr(self, "meteo_split_by"):
                times, datasets = zip(*m.meteo.Dataset.groupby("time.{}".format(self.meteo_split_by)))
                mpaths = ["sflux/sflux_air_1.{:04d}.nc".format(t + 1) for t in np.arange(len(times))]
                for das, mpath in list(zip(datasets, mpaths)):
                    m.to_force(das, vars=["msl", "u10", "v10"], rpath=rpath, filename=mpath, date=self.date0)
            else:
                m.to_force(m.meteo.Dataset, vars=["msl", "u10", "v10"], rpath=rpath, date=self.date0)

        else:
            logger.warning("meteo files present\n")

        # modify param file
        rnday_new = (self.date - self.date0).total_seconds() / (3600 * 24.0) + pd.to_timedelta(
            self.time_frame
        ).total_seconds() / (3600 * 24.0)
        hotout_write = int(rnday_new * 24 * 3600 / info["params"]["core"]["dt"])
        info["parameters"].update(
            {
                "ihot": 2,
                "rnday": rnday_new,
                "start_hour": self.date0.hour,
                "start_day": self.date0.day,
                "start_month": self.date0.month,
                "start_year": self.date0.year,
            }
        )

        m.config(output=True, **info)

        m.config_file = rpath + "param.nml"

        os.chdir(rpath)
        # subprocess.call(rpath+'run_flow2d3d.sh',shell=True)
        m.save()

        if execute:
            m.run()

        # cleanup
        #            os.remove(rpath+'hotstart.nc')

        # save compiled nc file

        # out = data(**{'solver':m.solver,'rpath':rpath,'savenc':True})

        logger.info("done for date :" + datetime.datetime.strftime(self.date, "%Y%m%d.%H"))

        os.chdir(pwd)
Example #14
0
    def set(self,**kwargs):

        if isinstance(self.model,str):
            self.model = pyposeidon.read_model(self.model)

        for attr, value in self.model.__dict__.items():
            if not hasattr(self, attr): setattr(self, attr, value)

        execute = get_value(self,kwargs,'execute', False)

        pwd = os.getcwd()

        files = [ 'bctides.in', 'launchSchism.sh','/sflux/sflux_inputs.txt', '/outputs/flux.out']
        files_sym = ['hgrid.gr3', 'hgrid.ll', 'manning.gr3', 'vgrid.in', 'drag.gr3', 'rough.gr3', 'station.in', 'windrot_geo2proj.gr3']
        station_files = ['/outputs/staout_1' , '/outputs/staout_2' , '/outputs/staout_3' , '/outputs/staout_4' , '/outputs/staout_5' , '/outputs/staout_6' , '/outputs/staout_7' , '/outputs/staout_8' , '/outputs/staout_9']


        self.origin=self.model.rpath
        self.date0 = self.model.date

        if not os.path.exists(self.origin):
            sys.stdout.write('Initial folder not present {}\n'.format(self.origin))
            sys.exit(1)

        ppath = self.ppath
        # create the folder/run path
        rpath=self.cpath

        if not os.path.exists(rpath):
            os.makedirs(rpath)

        copy2(ppath+self.tag+'_model.json',rpath) #copy the info file

        # load model
        with open(rpath+self.tag+'_model.json', 'rb') as f:
                      info = pd.read_json(f,lines=True).T
                      info[info.isnull().values] = None
                      info = info.to_dict()[0]


        args = set(kwargs.keys()).intersection(info.keys()) # modify dic with kwargs
        for attr in list(args):
            info[attr] = kwargs[attr]

        info['config_file'] = ppath + 'param.nml'

        #update the properties

        info['start_date'] = self.date
        info['time_frame'] = self.time_frame
        info['end_date'] = self.date + pd.to_timedelta(self.time_frame)
        info['meteo_source'] = self.meteo
        info['rpath'] = rpath
#            info['grid_file'] = ppath + '/hgrid.gr3'

#            for attr, value in self.items():
#                setattr(info, attr, value)

        m=pmodel(**info)

        # Grid
        m.grid=pgrid.grid(type='tri2d',**info)

        # get lat/lon from file
        if hasattr(self, 'grid_file'):
            info.update({'lon_min' : m.grid.Dataset.SCHISM_hgrid_node_x.values.min()})
            info.update({'lon_max' : m.grid.Dataset.SCHISM_hgrid_node_x.values.max()})
            info.update({'lat_min' : m.grid.Dataset.SCHISM_hgrid_node_y.values.min()})
            info.update({'lat_max' : m.grid.Dataset.SCHISM_hgrid_node_y.values.max()})

        # copy/link necessary files
        logger.debug('copy necessary files')

        for filename in files:
            ipath = glob.glob(ppath+filename)
            if ipath:
                try:
                    copy2(ppath+filename,rpath+filename)
                except:
                    dir_name ,file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath+filename,rpath+filename)
        logger.debug('.. done')

        #copy the station files
        logger.debug('copy station files')
        for filename in station_files:
             ipath = glob.glob(ppath+filename)
             if ipath:
                 try:
                     copy2(ppath+filename,rpath+filename)
                 except:
                     dir_name ,file_name = os.path.split(filename)
                     if not os.path.exists(rpath + dir_name):
                         os.makedirs(rpath + dir_name)
                     copy2(ppath+filename,rpath+filename)
        logger.debug('.. done')


        #symlink the station files
        #logger.debug('symlink station files')
        #for filename in station_files:

        #   ipath = glob.glob(self.path+self.folders[0] + filename)
        #   if ipath:

        #        if not os.path.exists(rpath + '/outputs/'):
        #            os.makedirs(rpath + '/outputs/')

        #        try:
        #            os.symlink(ipath[0],rpath + filename)
        #        except OSError as e:
        #            if e.errno == errno.EEXIST:
        #                logger.warning('Restart link present\n')
        #                logger.warning('overwriting\n')
        #                os.remove(rpath + filename)
        #                os.symlink(ipath[0],rpath + filename)
        #logger.debug('.. done')

        #symlink the big files
        logger.debug('symlink model files')
        for filename in files_sym:
            ipath = glob.glob(self.origin+filename)
            if ipath:
                try:
                    os.symlink(ipath[0],rpath+filename)
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        logger.warning('Restart link present\n')
                        logger.warning('overwriting\n')
                        os.remove(rpath+filename)
                        os.symlink(ipath[0],rpath+filename)
        logger.debug('.. done')


        # create restart file
        logger.debug('create restart file')

        #check for combine hotstart
        hotout=int((self.date - self.date0).total_seconds()/info['params']['core']['dt'])
        logger.debug('hotout_it = {}'.format(hotout))

        resfile=glob.glob(ppath+'/outputs/hotstart_it={}.nc'.format(hotout))
        if not resfile:
            # load model model from ppath
            with open(ppath+self.tag+'_model.json', 'rb') as f:
                ph = pd.read_json(f,lines=True).T
                ph[ph.isnull().values] = None
                ph = ph.to_dict()[0]
            p = pmodel(**ph)
            p.hotstart(it=hotout)


        # link restart file
        inresfile='/outputs/hotstart_it={}.nc'.format(hotout)
        outresfile='/hotstart.nc'


        logger.info('set restart\n')

        try:
            os.symlink(ppath+inresfile,rpath+outresfile)
        except OSError as e:
            if e.errno == errno.EEXIST:
                logger.warning('Restart link present\n')
                logger.warning('overwriting\n')
                os.remove(rpath+outresfile)
                os.symlink(ppath+inresfile,rpath+outresfile)
            else:
                raise e

        #get new meteo

        logger.info('process meteo\n')

        flag = get_value(self,kwargs,'update',[])

        check=[os.path.exists(rpath+'sflux/'+ f) for f in ['sflux_air_1.0001.nc']]

        if (np.any(check)==False) or ('meteo' in flag):

            m.force(**info)
            if hasattr(self, 'meteo_split_by'):
                times, datasets = zip(*m.meteo.Dataset.groupby('time.{}'.format(self.meteo_split_by)))
                mpaths = ['sflux/sflux_air_1.{:04d}.nc'.format(t + 1) for t in np.arange(len(times))]
                for das,mpath in list(zip(datasets,mpaths)):
                    m.to_force(das,vars=['msl','u10','v10'],rpath=rpath, filename=mpath, date=self.date0)
            else:
                m.to_force(m.meteo.Dataset,vars=['msl','u10','v10'],rpath=rpath,date=self.date0)

        else:
            logger.warning('meteo files present\n')

        # modify param file
        rnday_new = (self.date - self.date0).total_seconds()/(3600*24.) + pd.to_timedelta(self.time_frame).total_seconds()/(3600*24.)
        hotout_write = int(rnday_new * 24 * 3600 / info['params']['core']['dt'])
        info['parameters'].update({'ihot': 2, 'rnday':rnday_new,  'start_hour':self.date0.hour , 'start_day':self.date0.day, 'start_month':self.date0.month, 'start_year':self.date0.year})

        m.config(output=True, **info)

        m.config_file = rpath + 'param.nml'

        os.chdir(rpath)
        #subprocess.call(rpath+'run_flow2d3d.sh',shell=True)
        m.save()

        if execute : m.run()

        #cleanup
#            os.remove(rpath+'hotstart.nc')

        # save compiled nc file

        #out = data(**{'solver':m.solver,'rpath':rpath,'savenc':True})

        logger.info('done for date :'+datetime.datetime.strftime(self.date,'%Y%m%d.%H'))

        os.chdir(pwd)