Example #1
0
    def bath(self, **kwargs):

        kwargs['grid_x'] = self.grid.Dataset.lons.values
        kwargs['grid_y'] = self.grid.Dataset.lats.values

        dpath = get_value(self, kwargs, 'dem', None)

        kwargs.update({'dem': dpath})

        flag = get_value(self, kwargs, 'update', [])
        # check if files exist
        if flag:
            if ('dem' in flag) | ('all' in flag):
                kwargs.update({
                    'lon_min': self.lon_min,
                    'lat_min': self.lat_min,
                    'lon_max': self.lon_max,
                    'lat_max': self.lat_max
                })
                self.dem = pdem.dem(**kwargs)
            else:
                logger.info('reading local dem file ..\n')
                dem_source = z['rpath'] + self.tag + '.dep'
                rdem = from_dep(dem_source)

        else:
            kwargs.update({
                'lon_min': self.lon_min,
                'lat_min': self.lat_min,
                'lon_max': self.lon_max,
                'lat_max': self.lat_max
            })
            self.dem = pdem.dem(**kwargs)
Example #2
0
    def force(self, **kwargs):

        meteo_source = get_value(self, kwargs, 'meteo_source', None)

        kwargs.update({'meteo_source': meteo_source})

        flag = get_value(self, kwargs, 'update', [])
        # check if files exist

        z = {**self.__dict__, **kwargs}  # merge self and possible kwargs

        if flag:
            if ('meteo' in flag) | ('all' in flag):
                self.meteo = pmeteo.meteo(**z)
            else:
                logger.info('skipping meteo files ..\n')
        else:
            self.meteo = pmeteo.meteo(**z)
Example #3
0
    def to_output(self, solver=None, **kwargs):

        model = importlib.import_module(
            'pyPoseidon.model')  #load pyPoseidon model class

        s = getattr(model, solver)  # get solver class
        var_list = kwargs.pop('vars', ['msl', 'u10', 'v10'])

        m_index = get_value(self, kwargs, 'm_index', 1)

        split_by = get_value(self, kwargs, 'meteo_split_by', None)
        if split_by:
            times, datasets = zip(
                *self.Dataset.groupby('time.{}'.format(split_by)))
            mpaths = [
                'sflux/sflux_air_{}.{:04d}.nc'.format(m_index, t + 1)
                for t in np.arange(len(times))
            ]
            for das, mpath in list(zip(datasets, mpaths)):
                s.to_force(das, vars=var_list, filename=mpath, **kwargs)
        else:
            s.to_force(self.Dataset, vars=var_list, **kwargs)
Example #4
0
    def save(self, **kwargs):

        path = get_value(self, kwargs, 'rpath', './d3d/')

        lista = [
            key for key, value in self.__dict__.items()
            if key not in ['meteo', 'dem', 'grid']
        ]
        dic = {k: self.__dict__.get(k, None) for k in lista}

        grid = self.__dict__.get('grid', None)
        if isinstance(grid, np.str):
            dic.update({'grid': grid})
        else:
            dic.update({'grid': grid.__class__.__name__})

        dem = self.__dict__.get('dem', None)
        if isinstance(dem, np.str):
            dic.update({'dem': dem})
        elif isinstance(dem, pdem.dem):
            dic.update({'dem': dem.Dataset.elevation.attrs})

        meteo = self.__dict__.get('meteo', None)
        if isinstance(meteo, np.str):
            dic.update({'meteo': meteo})
        elif isinstance(meteo, pmeteo.meteo):
            dic.update({'meteo': meteo.Dataset.attrs})

        dic['version'] = pyPoseidon.__version__

        for attr, value in dic.items():
            if isinstance(value, datetime.datetime):
                dic[attr] = dic[attr].isoformat()
            if isinstance(value, pd.Timedelta):
                dic[attr] = dic[attr].isoformat()
            if isinstance(value, pd.DataFrame): dic[attr] = dic[attr].to_dict()
        json.dump(dic,
                  open(path + self.tag + '_model.json', 'w'),
                  default=myconverter)
Example #5
0
    def set(self, **kwargs):

        if isinstance(self.model, str):
            self.model = pyPoseidon.read_model(self.model)

        for attr, value in self.model.__dict__.items():
            if not hasattr(self, attr): setattr(self, attr, value)

        execute = get_value(self, kwargs, 'execute', False)

        pwd = os.getcwd()

        files = [
            self.tag + '_hydro.xml', self.tag + '.enc', self.tag + '.obs',
            self.tag + '.bnd', self.tag + '.bca', 'run_flow2d3d.sh'
        ]
        files_sym = [self.tag + '.grd', self.tag + '.dep']

        self.origin = self.model.rpath
        self.date0 = self.model.date

        if not os.path.exists(self.origin):
            sys.stdout.write('Initial folder not present {}\n'.format(
                self.origin))
            sys.exit(1)

        ppath = self.ppath

        cf = [glob.glob(ppath + '/' + e) for e in files]
        cfiles = [item.split('/')[-1] for sublist in cf for item in sublist]

        # create the folder/run path

        rpath = self.cpath

        if not os.path.exists(rpath):
            os.makedirs(rpath)

        copy2(ppath + self.tag + '_model.json', rpath)  #copy the info file

        # load model
        with open(rpath + self.tag + '_model.json', 'rb') as f:
            info = pd.read_json(f, lines=True).T
            info[info.isnull().values] = None
            info = info.to_dict()[0]

        args = set(kwargs.keys()).intersection(
            info.keys())  # modify dic with kwargs
        for attr in list(args):
            info[attr] = kwargs[attr]

        #update the properties
        info['date'] = self.date
        info['start_date'] = self.date
        info['time_frame'] = self.time_frame
        info['meteo_source'] = self.meteo
        info['rpath'] = rpath
        if self.restart_step:
            info['restart_step'] = self.restart_step


#            for attr, value in self.items():
#                setattr(info, attr, value)
        m = pmodel(**info)

        # copy/link necessary files
        logger.debug('copy necessary files')

        for filename in cfiles:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug('.. done')

        #symlink the big files
        logger.debug('symlink model files')
        for filename in files_sym:
            ipath = glob.glob(self.origin + filename)
            if ipath:
                try:
                    os.symlink(ipath[0], rpath + filename)
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        logger.warning('Restart link present\n')
                        logger.warning('overwriting\n')
                        os.remove(rpath + filename)
                        os.symlink(ipath[0], rpath + filename)
        logger.debug('.. done')

        copy2(ppath + m.tag + '.mdf', rpath)  #copy the mdf file

        # copy restart file

        inresfile = 'tri-rst.' + m.tag + '.' + datetime.datetime.strftime(
            self.date, '%Y%m%d.%H%M%M')

        outresfile = 'restart.' + datetime.datetime.strftime(
            self.date, '%Y%m%d.%H%M%M')

        #  copy2(ppath+inresfile,rpath+'tri-rst.'+outresfile)
        try:
            os.symlink(ppath + '/' + inresfile,
                       rpath + 'tri-rst.' + outresfile)
            logger.debug('symlink {} to {}'.format(
                ppath + '/' + inresfile, rpath + 'tri-rst.' + outresfile))
        except OSError as e:
            if e.errno == errno.EEXIST:
                logger.warning('Restart symlink present\n')
                logger.warning('overwriting\n')
                os.remove(rpath + 'tri-rst.' + outresfile)
                os.symlink(ppath + '/' + inresfile,
                           rpath + 'tri-rst.' + outresfile)
            else:
                raise e

        #get new meteo

        logger.info('process meteo\n')

        flag = get_value(self, kwargs, 'update', ['meteo'])

        check = [
            os.path.exists(rpath + f) for f in ['u.amu', 'v.amv', 'p.amp']
        ]

        if (np.any(check) == False) or ('meteo' in flag):

            m.force()
            m.to_force(m.meteo.Dataset,
                       vars=['msl', 'u10', 'v10'],
                       rpath=rpath)  #write u,v,p files

        else:
            logger.info('meteo files present\n')

        # modify mdf file
        m.config(config_file=ppath + m.tag + '.mdf',
                 config={'Restid': outresfile},
                 output=True)

        m.config_file = rpath + m.tag + '.mdf'

        os.chdir(rpath)
        #subprocess.call(rpath+'run_flow2d3d.sh',shell=True)
        m.save()

        if execute: m.run()

        #cleanup
        os.remove(rpath + 'tri-rst.' + outresfile)

        # save compiled nc file

        #out = data(**{'solver':m.solver,'rpath':rpath,'savenc':True})

        logger.info('done for date :' +
                    datetime.datetime.strftime(self.date, '%Y%m%d.%H'))

        os.chdir(pwd)
Example #6
0
    def set(self, **kwargs):

        if isinstance(self.model, str):
            self.model = pyPoseidon.read_model(self.model)

        for attr, value in self.model.__dict__.items():
            if not hasattr(self, attr): setattr(self, attr, value)

        execute = get_value(self, kwargs, 'execute', False)

        pwd = os.getcwd()

        files = [
            'bctides.in', 'launchSchism.sh', '/sflux/sflux_inputs.txt',
            '/outputs/flux.out'
        ]
        files_sym = [
            'hgrid.gr3', 'hgrid.ll', 'manning.gr3', 'vgrid.in', 'drag.gr3',
            'rough.gr3', 'station.in', 'windrot_geo2proj.gr3'
        ]
        station_files = [
            '/outputs/staout_1', '/outputs/staout_2', '/outputs/staout_3',
            '/outputs/staout_4', '/outputs/staout_5', '/outputs/staout_6',
            '/outputs/staout_7', '/outputs/staout_8', '/outputs/staout_9'
        ]

        self.origin = self.model.rpath
        self.date0 = self.model.date

        if not os.path.exists(self.origin):
            sys.stdout.write('Initial folder not present {}\n'.format(
                self.origin))
            sys.exit(1)

        ppath = self.ppath
        # create the folder/run path
        rpath = self.cpath

        if not os.path.exists(rpath):
            os.makedirs(rpath)

        copy2(ppath + self.tag + '_model.json', rpath)  #copy the info file

        # load model
        with open(rpath + self.tag + '_model.json', 'rb') as f:
            info = pd.read_json(f, lines=True).T
            info[info.isnull().values] = None
            info = info.to_dict()[0]

        args = set(kwargs.keys()).intersection(
            info.keys())  # modify dic with kwargs
        for attr in list(args):
            info[attr] = kwargs[attr]

        info['config_file'] = ppath + 'param.nml'

        #update the properties

        info['start_date'] = self.date
        info['time_frame'] = self.time_frame
        info['end_date'] = self.date + pd.to_timedelta(self.time_frame)
        info['meteo_source'] = self.meteo
        info['rpath'] = rpath
        #            info['grid_file'] = ppath + '/hgrid.gr3'

        #            for attr, value in self.items():
        #                setattr(info, attr, value)

        m = pmodel(**info)

        # Grid
        m.grid = pgrid.grid(type='tri2d', **info)

        # get lat/lon from file
        if hasattr(self, 'grid_file'):
            info.update(
                {'lon_min': m.grid.Dataset.SCHISM_hgrid_node_x.values.min()})
            info.update(
                {'lon_max': m.grid.Dataset.SCHISM_hgrid_node_x.values.max()})
            info.update(
                {'lat_min': m.grid.Dataset.SCHISM_hgrid_node_y.values.min()})
            info.update(
                {'lat_max': m.grid.Dataset.SCHISM_hgrid_node_y.values.max()})

        # copy/link necessary files
        logger.debug('copy necessary files')

        for filename in files:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug('.. done')

        #copy the station files
        logger.debug('copy station files')
        for filename in station_files:
            ipath = glob.glob(ppath + filename)
            if ipath:
                try:
                    copy2(ppath + filename, rpath + filename)
                except:
                    dir_name, file_name = os.path.split(filename)
                    if not os.path.exists(rpath + dir_name):
                        os.makedirs(rpath + dir_name)
                    copy2(ppath + filename, rpath + filename)
        logger.debug('.. done')

        #symlink the station files
        #logger.debug('symlink station files')
        #for filename in station_files:

        #   ipath = glob.glob(self.path+self.folders[0] + filename)
        #   if ipath:

        #        if not os.path.exists(rpath + '/outputs/'):
        #            os.makedirs(rpath + '/outputs/')

        #        try:
        #            os.symlink(ipath[0],rpath + filename)
        #        except OSError as e:
        #            if e.errno == errno.EEXIST:
        #                logger.warning('Restart link present\n')
        #                logger.warning('overwriting\n')
        #                os.remove(rpath + filename)
        #                os.symlink(ipath[0],rpath + filename)
        #logger.debug('.. done')

        #symlink the big files
        logger.debug('symlink model files')
        for filename in files_sym:
            ipath = glob.glob(self.origin + filename)
            if ipath:
                try:
                    os.symlink(ipath[0], rpath + filename)
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        logger.warning('Restart link present\n')
                        logger.warning('overwriting\n')
                        os.remove(rpath + filename)
                        os.symlink(ipath[0], rpath + filename)
        logger.debug('.. done')

        # create restart file
        logger.debug('create restart file')

        #check for combine hotstart
        hotout = int((self.date - self.date0).total_seconds() /
                     info['params']['core']['dt'])
        logger.debug('hotout_it = {}'.format(hotout))

        resfile = glob.glob(ppath +
                            '/outputs/hotstart_it={}.nc'.format(hotout))
        if not resfile:
            # load model model from ppath
            with open(ppath + self.tag + '_model.json', 'rb') as f:
                ph = pd.read_json(f, lines=True).T
                ph[ph.isnull().values] = None
                ph = ph.to_dict()[0]
            p = pmodel(**ph)
            p.hotstart(it=hotout)

        # link restart file
        inresfile = '/outputs/hotstart_it={}.nc'.format(hotout)
        outresfile = '/hotstart.nc'

        logger.info('set restart\n')

        try:
            os.symlink(ppath + inresfile, rpath + outresfile)
        except OSError as e:
            if e.errno == errno.EEXIST:
                logger.warning('Restart link present\n')
                logger.warning('overwriting\n')
                os.remove(rpath + outresfile)
                os.symlink(ppath + inresfile, rpath + outresfile)
            else:
                raise e

        #get new meteo

        logger.info('process meteo\n')

        flag = get_value(self, kwargs, 'update', [])

        check = [
            os.path.exists(rpath + 'sflux/' + f)
            for f in ['sflux_air_1.0001.nc']
        ]

        if (np.any(check) == False) or ('meteo' in flag):

            m.force(**info)
            if hasattr(self, 'meteo_split_by'):
                times, datasets = zip(*m.meteo.Dataset.groupby(
                    'time.{}'.format(self.meteo_split_by)))
                mpaths = [
                    'sflux/sflux_air_1.{:04d}.nc'.format(t + 1)
                    for t in np.arange(len(times))
                ]
                for das, mpath in list(zip(datasets, mpaths)):
                    m.to_force(das,
                               vars=['msl', 'u10', 'v10'],
                               rpath=rpath,
                               filename=mpath,
                               date=self.date0)
            else:
                m.to_force(m.meteo.Dataset,
                           vars=['msl', 'u10', 'v10'],
                           rpath=rpath,
                           date=self.date0)

        else:
            logger.warning('meteo files present\n')

        # modify param file
        rnday_new = (self.date - self.date0).total_seconds() / (
            3600 * 24.) + pd.to_timedelta(
                self.time_frame).total_seconds() / (3600 * 24.)
        hotout_write = int(rnday_new * 24 * 3600 /
                           info['params']['core']['dt'])
        info['parameters'].update({
            'ihot': 2,
            'rnday': rnday_new,
            'start_hour': self.date0.hour,
            'start_day': self.date0.day,
            'start_month': self.date0.month,
            'start_year': self.date0.year
        })

        m.config(output=True, **info)

        m.config_file = rpath + 'param.nml'

        os.chdir(rpath)
        #subprocess.call(rpath+'run_flow2d3d.sh',shell=True)
        m.save()

        if execute: m.run()

        #cleanup
        #            os.remove(rpath+'hotstart.nc')

        # save compiled nc file

        #out = data(**{'solver':m.solver,'rpath':rpath,'savenc':True})

        logger.info('done for date :' +
                    datetime.datetime.strftime(self.date, '%Y%m%d.%H'))

        os.chdir(pwd)
Example #7
0
    def output(self, **kwargs):

        path = get_value(self, kwargs, 'rpath', './d3d/')
        slevel = get_value(self, kwargs, 'slevel', 0.)
        flag = get_value(self, kwargs, 'update', [])

        nj, ni = self.grid.Dataset.lons.shape

        if not os.path.exists(path):
            os.makedirs(path)

        #save mdf
        self.mdf.to_csv(path + self.tag + '.mdf', sep='=')

        # save grid file
        if flag:
            if ('all' in flag) | ('grid' in flag):
                #save grid
                self.grid.to_file(filename=path + self.tag + '.grd')
            else:
                logger.info('skipping grid file ..\n')
        else:
            self.grid.to_file(filename=path + self.tag + '.grd')

        # save bathymetry file
        self.to_dep(self.dem.Dataset, rpath=path, tag=self.tag, update=flag)

        #save meteo
        if self.atm:
            try:
                self.to_force(self.meteo.Dataset,
                              vars=['msl', 'u10', 'v10'],
                              rpath=path,
                              **kwargs)
            except AttributeError as e:
                logger.warning('no meteo data available.. no update..\n')
                pass

        #save obs file
        self.to_obs(self, **kwargs)

        #save enc file
        if flag:

            if ('all' in flag) | ('model' in flag):
                #save enc
                #write enc out
                with open(path + self.tag + '.enc', 'w') as f:
                    f.write('{:>5}{:>5}\n'.format(ni + 1,
                                                  1))  # add one like ddb
                    f.write('{:>5}{:>5}\n'.format(ni + 1, nj + 1))
                    f.write('{:>5}{:>5}\n'.format(1, nj + 1))
                    f.write('{:>5}{:>5}\n'.format(1, 1))
                    f.write('{:>5}{:>5}\n'.format(ni + 1, 1))

        else:

            #write enc out
            with open(path + self.tag + '.enc', 'w') as f:
                f.write('{:>5}{:>5}\n'.format(ni + 1, 1))  # add one like ddb
                f.write('{:>5}{:>5}\n'.format(ni + 1, nj + 1))
                f.write('{:>5}{:>5}\n'.format(1, nj + 1))
                f.write('{:>5}{:>5}\n'.format(1, 1))
                f.write('{:>5}{:>5}\n'.format(ni + 1, 1))

        calc_dir = get_value(self, kwargs, 'rpath', './d3d/')

        try:
            bin_path = os.environ['D3D']
        except:
            bin_path = get_value(self, kwargs, 'epath', None)

        try:
            lib_path = os.environ['LD3D']
        except:
            lib_path = get_value(self, kwargs, 'lpath', None)

        if bin_path is None:
            #---------------------------------------------------------------------
            logger.warning('D3D executable path (epath) not given\n')
            #---------------------------------------------------------------------

        if lib_path is None:
            #---------------------------------------------------------------------
            logger.warning('D3D libraries path (lpath) not given\n')
            #---------------------------------------------------------------------

        ncores = get_value(self, kwargs, 'ncores', NCORES)

        if not os.path.exists(calc_dir + self.tag + '_hydro.xml'):

            # edit and save config file
            copy2(DATA_PATH + 'config_d_hydro.xml',
                  calc_dir + self.tag + '_hydro.xml')

        xml = md.parse(calc_dir + self.tag + '_hydro.xml')

        xml.getElementsByTagName('mdfFile')[0].firstChild.replaceWholeText(
            self.tag + '.mdf')

        with open(calc_dir + self.tag + '_hydro.xml', 'w') as f:
            xml.writexml(f)

        if not os.path.exists(calc_dir + 'run_flow2d3d.sh'):

            copy2(DATA_PATH + 'run_flow2d3d.sh', calc_dir + 'run_flow2d3d.sh')

            #make the script executable
            execf = calc_dir + 'run_flow2d3d.sh'
            mode = os.stat(execf).st_mode
            mode |= (mode & 0o444) >> 2  # copy R bits to X
            os.chmod(execf, mode)

        #---------------------------------------------------------------------
        logger.info('output done\n')
Example #8
0
    def run(self, **kwargs):

        calc_dir = get_value(self, kwargs, 'rpath', './d3d/')

        try:
            bin_path = os.environ['D3D']
        except:
            bin_path = get_value(self, kwargs, 'epath', None)

        try:
            lib_path = os.environ['LD3D']
        except:
            lib_path = get_value(self, kwargs, 'lpath', None)

        if bin_path is None:
            #------------------------------------------------------------------------------
            logger.warning(
                'D3D executable path (epath) not given -> using default \n')
            #------------------------------------------------------------------------------
            bin_path = os.pathsep + cpath
            lib_path = bin_path

        ncores = get_value(self, kwargs, 'ncores', NCORES)

        argfile = get_value(self, kwargs, 'argfile', self.tag + '_hydro.xml')

        #---------------------------------------------------------------------
        logger.info('executing model\n')
        #---------------------------------------------------------------------

        # note that cwd is the folder where the executable is
        ex = subprocess.Popen(args=[
            './run_flow2d3d.sh {} {} {}'.format(argfile, ncores, bin_path,
                                                lib_path)
        ],
                              cwd=calc_dir,
                              shell=True,
                              stderr=subprocess.PIPE,
                              stdout=subprocess.PIPE)  #, bufsize=1)

        with open(calc_dir + self.tag + '_run.log', 'w') as f:  #save output

            for line in iter(ex.stdout.readline, b''):
                f.write(line.decode(sys.stdout.encoding))
                logger.info(line.decode(sys.stdout.encoding))

            for line in iter(ex.stderr.readline, b''):
                logger.info(line.decode(sys.stdout.encoding))
                tempfiles = glob.glob(calc_dir + '/tri-diag.' + self.tag +
                                      '-*')
                try:
                    biggest = max(tempfiles,
                                  key=(lambda tf: os.path.getsize(tf)))
                    with open(biggest, "r") as f1:
                        for line in f1:
                            f.write(line.decode(sys.stdout.encoding))
                except:
                    pass

    #cleanup
        tempfiles = glob.glob(calc_dir + '/tri-diag.' + self.tag + '-*')
        biggest = max(tempfiles, key=(lambda tf: os.path.getsize(tf)))
        with open(calc_dir + self.tag + '_run.log', 'a') as f:  #save diagnosis
            with open(biggest, "r") as f1:
                for line in f1:
                    f.write(line)

        tempfiles = glob.glob(calc_dir + '/tri-diag.' + self.tag +
                              '-*') + glob.glob(calc_dir + '/TMP_*')

        for filename in tempfiles:
            try:
                os.remove(filename)
            except OSError:
                pass

        ex.stdout.close()
        ex.stderr.close()

        #---------------------------------------------------------------------
        logger.info('FINISHED\n')
Example #9
0
    def to_obs(self, **kwargs):
        #save obs

        ofilename = get_value(self, kwargs, 'ofilename', None)
        flag = get_value(self, kwargs, 'update', [])

        if ofilename:

            obs_points = pd.read_csv(ofilename,
                                     delimiter='\t',
                                     header=None,
                                     names=['index', 'Name', 'lat', 'lon'])
            obs_points = obs_points.set_index('index', drop=True).reset_index(
                drop=True)  #reset index if any

            obs_points = obs_points[
                (obs_points.lon.between(self.grid.Dataset.lons.values.min(),
                                        self.grid.Dataset.lons.values.max()))
                & (obs_points.lat.between(self.grid.Dataset.lats.values.min(
                ), self.grid.Dataset.lats.values.max()))]

            obs_points.reset_index(inplace=True, drop=True)

            try:
                bat = -self.dem.Dataset.fval.values.astype(
                    float)  #reverse for the hydro run/use the adjusted values
            #     mask = bat==999999
            except AttributeError:
                bat = -self.dem.Dataset.ival.values.astype(
                    float
                )  #reverse for the hydro run/revert to interpolated values

            b = np.ma.masked_array(bat, np.isnan(bat))  # mask land

            i_indx, j_indx = self.vpoints(self.grid.Dataset, obs_points, b,
                                          **kwargs)

            obs_points['i'] = i_indx
            obs_points['j'] = j_indx

            #drop NaN points
            obs = obs_points.dropna().copy()

            obs = obs.reset_index(drop=True)  #reset index

            obs['i'] = obs['i'].values.astype(int)
            obs['j'] = obs['j'].values.astype(int)
            obs['new_lat'] = self.grid.Dataset.y[
                obs.i.values].values  #Valid point
            obs['new_lon'] = self.grid.Dataset.x[obs.j.values].values

            self.obs = obs  #store it

            obs.Name = obs.Name.str.strip().apply(lambda name: name.replace(
                ' ', ''))  #Remove spaces to write to file
            sort = sorted(obs.Name.values,
                          key=len)  # sort the names to get the biggest word
            try:
                wsize = len(
                    sort[-1])  # size of bigget word in order to align below
            except:
                pass

        if flag:

            if ('all' in flag) | ('model' in flag):

                # Add one in the indices due to python/fortran convention
                try:
                    with open(self.rpath + '{}.obs'.format(self.tag),
                              'w') as f:
                        for l in range(obs.shape[0]):
                            f.write('{0:<{3}}{1:>{3}}{2:>{3}}\n'.format(
                                obs.Name[l][:20], obs.j[l] + 1, obs.i[l] + 1,
                                wsize))
                except:  #TODO
                    pass

        else:
            try:
                # Add one in the indices due to python/fortran convention
                with open(self.rpath + '{}.obs'.format(self.tag), 'w') as f:
                    for l in range(obs.shape[0]):
                        f.write('{0:<{3}}{1:>{3}}{2:>{3}}\n'.format(
                            obs.Name[l][:20], obs.j[l] + 1, obs.i[l] + 1,
                            wsize))
            except:
                pass
Example #10
0
    def config(self, **kwargs):

        mdf_file = kwargs.get('config_file', None)
        dic = get_value(self, kwargs, 'parameters', None)

        if mdf_file:
            self.mdf = pd.read_csv(mdf_file, sep='=')
        else:
            self.mdf = pd.read_csv(DATA_PATH + 'default.mdf', sep='=')

        self.mdf = self.mdf.set_index(self.mdf.columns[0])  # set index

        mdfidx = self.mdf.index.str.strip()  # store the stripped names

        #define grid file
        self.mdf.loc[self.mdf.index.str.contains('Filcco')] = '#{}#'.format(
            self.tag + '.grd')

        #define enc file
        self.mdf.loc[self.mdf.index.str.contains('Filgrd')] = '#{}#'.format(
            self.tag + '.enc')

        #define dep file
        self.mdf.loc[self.mdf.index.str.contains('Fildep')] = '#{}#'.format(
            self.tag + '.dep')

        #define obs file
        if self.ofilename:
            self.mdf.loc[self.mdf.index.str.contains(
                'Filsta')] = '#{}#'.format(self.tag + '.obs')
        else:
            self.mdf.loc[self.mdf.index.str.contains('Filsta')] = '##'

        # adjust ni,nj
        nj, ni = self.nj, self.ni
        self.mdf.loc[self.mdf.index.str.contains(
            'MNKmax')] = '{} {} {}'.format(ni + 1, nj + 1,
                                           1)  # add one like ddb

        # adjust iteration date
        self.mdf.loc[self.mdf.index.str.contains('Itdate')] = '#{}#'.format(
            self.date.strftime(format='%Y-%m-%d'))

        #set time unit
        self.mdf.loc[self.mdf.index.str.contains('Tunit')] = '#M#'

        #adjust iteration start
        Tstart = self.start_date.hour * 60
        self.mdf.loc[self.mdf.index.str.contains('Tstart')] = Tstart

        #adjust iteration stop
        Tstop = Tstart + int(
            pd.to_timedelta(self.time_frame).total_seconds() / 60)
        self.mdf.loc[self.mdf.index.str.contains('Tstop')] = Tstop

        #adjust time for output
        mstep = get_value(self, kwargs, 'map_step', 0)
        hstep = get_value(self, kwargs, 'his_step', 0)
        pstep = get_value(self, kwargs, 'pp_step', 0)
        rstep = get_value(self, kwargs, 'restart_step', 0)

        if rstep == -1:  # save a restart file at the end
            rstep = Tstop

        self.mdf.loc[self.mdf.index.str.contains(
            'Flmap')] = '{:d} {:d} {:d}'.format(Tstart, mstep, Tstop)
        self.mdf.loc[self.mdf.index.str.contains(
            'Flhis')] = '{:d} {:d} {:d}'.format(Tstart, hstep, Tstop)
        self.mdf.loc[self.mdf.index.str.contains(
            'Flpp')] = '{:d} {:d} {:d}'.format(Tstart, pstep, Tstop)
        self.mdf.loc[self.mdf.index.str.contains('Flrst')] = rstep

        #time interval to smooth the hydrodynamic boundary conditions
        self.mdf.loc[self.mdf.index.str.contains('Tlfsmo')] = 0.

        if not self.atm: self.mdf.loc['Sub1'] = ' '

        # set tide only run
        if self.tide:
            self.mdf.loc[self.mdf.index.str.contains(
                'Filbnd')] = '#{}#'.format(self.tag + '.bnd')
            self.mdf.loc[self.mdf.index.str.contains(
                'Filana')] = '#{}#'.format(self.tag + '.bca')
#           if 'Tidfor' not in order: order.append('Tidfor')
#           inp['Tidfor']=[['M2','S2','N2','K2'], \
#                       ['K1','O1','P1','Q1'], \
#                         ['-----------']]

# specify ini file
# if 'Filic' not in order: order.append('Filic')
# inp['Filic']=basename+'.ini'

# netCDF output
        if not 'FlNcdf' in mdfidx:
            self.mdf.reindex(self.mdf.index.values.tolist() + ['FlNcdf '])

        self.mdf.loc['FlNcdf '] = '#map his#'

        other = kwargs.get('config', None)
        if other:
            # Check for any other mdf variable in input
            for key, val in other.items():
                if key in mdfidx:
                    self.mdf.loc[self.mdf.index.str.contains(key)] = val
                else:
                    self.mdf.loc[key] = val

        output = kwargs.get('output', False)

        if output:
            #save mdf
            path = get_value(self, kwargs, 'rpath', './d3d/')
            self.mdf.to_csv(path + self.tag + '.mdf', sep='=')
Example #11
0
    def __init__(self, **kwargs):
        
        self.tag = kwargs.get('tag', None)
        self.mpath = kwargs.get('mpath', None)
        self.wrf_path = kwargs.get('WRF_PATH', None)
        self.meteo = kwargs.get('meteo', None)
        

        if self.meteo == 'erai' :
            self.metgrid='METGRID.TBL.ERAI'
        elif self.meteo == 'ecmwf' :
            self.metgrid='METGRID.TBL.ARW'

        rpath = get_value(self,kwargs,'rpath','./') 


        if not os.path.exists(rpath):
            os.makedirs(rpath)  # create top folder
        
        if not os.path.exists(rpath+'/wps'):
            os.makedirs(rpath+'/wps')  # create wps folder

        if not os.path.exists(rpath+'/wrf'):
            os.makedirs(rpath+'/wrf')  # create wrf folder


#cp -uvr /usr/local/share/WRF/WRF3/WPS/namelist.wps  $case/wps/namelist.wps

#links
        cfiles = glob.glob(self.wrf_path+'/WPS/*.exe')
        for filename in cfiles:
                os.symlink(filename,rpath+'/wps'+filename)

        os.symlink(self.wrf_path+'/WPS/geogrid',rpath+'/wps/geogrid')

        os.symlink(self.wrf_path+'/WPS/link_grib.csh',rpath+'/wps/link_grib.sh')

        os.symlink(self.wrf_path+'/WPS/metgrid/'+self.metgrid, self.wrf_path+'/WPS/metgrid/METGRID.TBL')

        os.symlink(self.wrf_path+'/WPS/metgrid',rpath+'/wps/metgrid')

#link Vtable
        os.symlink(self.wrf_path+'/Vtable',rpath+'/wps/Vtable')
        
        
#setup wrf folder
        os.symlink(self.wrf_path+'run',rpath+'/wps/Vtable')

        cfiles = glob.glob(wrf_path+'/WRFV3/run/*_DATA')
        for filename in cfiles:
            os.symlink(filename,rpath+'/wrf/'+filename)
    
        cfiles = glob.glob(wrf_path+'/WRFV3/run/ETAMP*')
        for filename in cfiles:
            os.symlink(filename,rpath+'/wrf/'+filename)

        cfiles = glob.glob(wrf_path+'/WRFV3/run/*.TBL')
        for filename in cfiles:
            os.symlink(filename,rpath+'/wrf/'+filename)

        cfiles = glob.glob(wrf_path+'/WRFV3/run/tr*')
        for filename in cfiles:
            os.symlink(filename,rpath+'/wrf/'+filename)
            
        cfiles = glob.glob(wrf_path+'/WRFV3/run/*.txt')
        for filename in cfiles:
            os.symlink(filename,rpath+'/wrf/'+filename)

        cfiles = glob.glob(wrf_path+'/WRFV3/run/*.tbl')
        for filename in cfiles:
            os.symlink(filename,rpath+'/wrf/'+filename)

        cfiles = glob.glob(wrf_path+'/WRFV3/run/*.formatted')
        for filename in cfiles:
            os.symlink(filename,rpath+'/wrf/'+filename)

        cfiles = glob.glob(wrf_path+'/WRFV3/main/*.exe)
        for filename in cfiles:
            os.symlink(filename,rpath+'/wrf/'+filename)

        
        
        def namelist(self,**kwargs):