Esempio n. 1
0
 def _check_general(self):
     '''
 check general options in json config file
   - date_start and date_end have a valid format
   - end_date is after start_date
   - boundary_interval is an integer
 '''
     # check if start_date and end_date are in valid format
     start_date = utils.return_validate(
         self.config['options_general']['date_start'])
     end_date = utils.return_validate(
         self.config['options_general']['date_end'])
     # end_date should be after start_date
     if (start_date >= end_date):
         message = ''
         logger.error(message)
         raise IOError(message)
     # boundary interval should be an int number of hours
     assert isinstance(self.config['options_general']['boundary_interval'],
                       int), ('boundary_interval should be given as an '
                              'integer in %s' % self.configfile)
     # boundary interval should not be larger than time between start_date
     # and end_date
     assert (
         (self.config['options_general']['boundary_interval'] * 3600) <
         (end_date - start_date).total_seconds()), (
             'boundary interval is larger than time between start_date and '
             'end_date')
Esempio n. 2
0
 def __init__(self, datestart, dateend):
     config.__init__(self)
     self.startdate = datestart
     self.enddate = dateend
     # read WRF namelist in WRF work_dir
     wrf_nml = f90nml.read(self.config['options_wrf']['namelist.input'])
     # get number of domains
     self.ndoms = wrf_nml['domains']['max_dom']
     self.rundir = self.config['filesystem']['wrf_run_dir']
     # archive in subdir per year
     self.archivedir = os.path.join(
         self.config['filesystem']['archive_dir'],
         str(self.startdate.year))
     utils._create_directory(self.archivedir)
     # define static variables
     self.define_vars_static()
     # define variables that need to be stored hourly
     self.define_vars_hourly()
     # define variables that need to be stored every minute for the inner
     # domain, hourly for the other domains
     self.define_vars_minute()
     self.define_vars_deac()  # define variables to be deaccumulated
     self.archive()  # archive "normal" variables
     self.archive_wrfvar_input()  # archive wrfvar_input files
     # get start_date from config.json
     start_date = utils.return_validate(
         self.config['options_general']['date_start'])
     if (start_date == datestart):  # very first timestep
         self.archive_static()  # archive static variables
     self.cleanup()
Esempio n. 3
0
    def _header(self):
        '''
        define suite.rc header information
        '''
        start_time = utils.datetime_to_string(utils.return_validate(
            self.config['options_general']['date_start']),
                                              format='%Y%m%dT%H')
        end_time = utils.datetime_to_string(utils.return_validate(
            self.config['options_general']['date_end']),
                                            format='%Y%m%dT%H')
        # define template
        template = """#!Jinja2

{{% set START = "{start_time}" %}}
{{% set STOP  = "{end_time}" %}}

"""
        # context variables in template
        context = {"start_time": start_time, "end_time": end_time}
        return template.format(**context)
Esempio n. 4
0
    def _scheduling(self):
        '''
        define suite.rc scheduling information
        '''
        # get start_hour and increment time from config.json
        start_hour = str(
            utils.return_validate(
                self.config['options_general']['date_start']).hour).zfill(2)
        # check if we need to add upp
        try:
            if self.config['options_upp']['upp']:
                uppBlock = "=> upp"
            else:
                uppBlock = ""
        except KeyError:
            uppBlock = ""
        # define template
        template = """[scheduling]
    initial cycle point = {{{{ START }}}}
    final cycle point   = {{{{ STOP }}}}
    [[dependencies]]
        # Initial cycle point
        [[[R1]]]
            graph = \"\"\"
                wrf_init => wps => wrf_real => wrfda => wrf_run {upp}
                obsproc_init => obsproc_run => wrfda
            \"\"\"
        # Repeat every {incr_hour} hours, starting {incr_hour} hours
        # after initial cylce point
        [[[+PT{incr_hour}H/PT{incr_hour}H]]]
            graph = \"\"\"
                wrf_run[-PT{incr_hour}H] => wrf_init => wrf_real => wrfda => wrf_run {upp}
                wrfda[-PT{incr_hour}H] => obsproc_init => obsproc_run => wrfda
            \"\"\"
        # Repeat every {wps_incr_hour} hours, starting {wps_incr_hour} hours
        # after initial cylce point
        [[[+PT{wps_incr_hour}H/PT{wps_incr_hour}H]]]
            graph = \"\"\"
                wps[-PT{wps_incr_hour}H] => wps => wrf_init
            \"\"\"
"""
        # context variables in template
        context = {
            "start_hour": start_hour,
            "incr_hour": self.incr_hour,
            "wps_incr_hour": self.wps_interval_hours,
            "upp": uppBlock
        }
        return template.format(**context)
Esempio n. 5
0
File: scale.py Progetto: kinow/wrfpy
 def read_init(self, cdom, pdom):
     c_wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(cdom))
     p_wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(pdom))
     self.fg_p = Dataset(os.path.join(p_wrfda_workdir, 'fg'), 'r')
     self.wrfinput_p = Dataset(
         os.path.join(p_wrfda_workdir, 'wrfvar_output'), 'r')
     shutil.copyfile(os.path.join(c_wrfda_workdir, 'fg'),
                     os.path.join(c_wrfda_workdir, 'wrfvar_output'))
     self.wrfinput_c = Dataset(
         os.path.join(c_wrfda_workdir, 'wrfvar_output'), 'r+')
     # get time information from wrfinput file
     dtobj, datestr = self.get_time(
         os.path.join(c_wrfda_workdir, 'wrfvar_output'))
     # get file connection to wrfvar_input file for child domain in wrf run directory
     start_date = utils.return_validate(
         self.config['options_general']['date_start'])
     if (dtobj == start_date):  # very first timestep
         self.wrfinput_c_nolsm = Dataset(
             os.path.join(self.wrf_rundir, ('wrfinput_d0' + str(cdom))),
             'r')
     else:
         self.wrfinput_c_nolsm = Dataset(
             os.path.join(self.wrf_rundir,
                          ('wrfvar_input_d0' + str(cdom) + '_' + datestr)),
             'r')
     # lon/lat information parent domain
     self.XLONG_p = self.wrfinput_p.variables['XLONG'][0, :]
     self.XLAT_p = self.wrfinput_p.variables['XLAT'][0, :]
     # lon/lat information child domain
     self.XLONG_c = self.wrfinput_c.variables['XLONG'][0, :]
     self.XLAT_c = self.wrfinput_c.variables['XLAT'][0, :]
     # lon/lat information parent domain
     self.XLONG_U_p = self.wrfinput_p.variables['XLONG_U'][0, :]
     self.XLAT_U_p = self.wrfinput_p.variables['XLAT_U'][0, :]
     # lon/lat information child domain
     self.XLONG_U_c = self.wrfinput_c.variables['XLONG_U'][0, :]
     self.XLAT_U_c = self.wrfinput_c.variables['XLAT_U'][0, :]
     # V
     # lon/lat information parent domain
     self.XLONG_V_p = self.wrfinput_p.variables['XLONG_V'][0, :]
     self.XLAT_V_p = self.wrfinput_p.variables['XLAT_V'][0, :]
     # lon/lat information child domain
     self.XLONG_V_c = self.wrfinput_c.variables['XLONG_V'][0, :]
     self.XLAT_V_c = self.wrfinput_c.variables['XLAT_V'][0, :]
Esempio n. 6
0
def main(datestring, interval):
    '''
    Main function to initialize WPS timestep:
      - converts cylc timestring to datetime object
      - calls wrf.__init()
    '''
    dt = utils.convert_cylc_time(datestring)
    postprocess = upp()
    # construct wrfout name for domain 1
    dt_str = dt.strftime('%Y-%m-%d_%H:%M:%S')
    wrfout_name = wrfout_d01_ + dt_str
    wrfout_file = os.path.join(self.config['filesystem']['wrf_run_dir'],
                               wrfout_name)
    start_date = utils.return_validate(
        postprocess.config['options_general']['date_start'])
    if (start_date == dt):  # very first timestep
        postprocess.run_unipost_file(wrfout_files[0], use_t0=True)
    else:
        postprocess.run_unipost_file(wrfout_files[0], use_t0=False)
Esempio n. 7
0
 def __init__(self, datestring):
     config.__init__(self)
     dt = utils.convert_cylc_time(datestring)
     postprocess = upp()
     # construct wrfout name for domain 1
     dt_str = dt.strftime('%Y-%m-%d_%H:%M:%S')
     wrfout_name = 'wrfout_d01_' + dt_str
     wrfout_file = os.path.join(self.config['filesystem']['wrf_run_dir'],
                                wrfout_name)
     start_date = utils.return_validate(
         postprocess.config['options_general']['date_start'])
     upp_interval = postprocess.config['options_upp']['upp_interval']
     if (start_date == dt):  # very first timestep
         postprocess.run_unipost_file(wrfout_file,
                                      frequency=upp_interval,
                                      use_t0=True)
     else:
         postprocess.run_unipost_file(wrfout_file,
                                      frequency=upp_interval,
                                      use_t0=False)
Esempio n. 8
0
 def archive_wrfvar_input(self):
     '''
     archive wrfvar_input files
     '''
     # loop over all domains
     wrfvar_archivedir = os.path.join(self.archivedir, 'wrfvar')
     utils._create_directory(wrfvar_archivedir)
     start_date = utils.return_validate(
             self.config['options_general']['date_start'])
     for domain in range(1, self.ndoms + 1):
         # iterate over all variables that need to be archived
             for cdate in pandas.date_range(self.startdate, self.enddate,
                                            freq='2H')[:-1]:
                 if (cdate != start_date):
                     datestr_in = cdate.strftime('%Y-%m-%d_%H:%M:%S')
                     # define and load input file
                     input_fn = ('wrfvar_input' + '_d0' + str(domain) +
                                 '_' + datestr_in)
                     input_file = os.path.join(self.rundir, input_fn)
                     output_file = os.path.join(wrfvar_archivedir, input_fn)
                     # copy wrfvar_input to archive dir
                     shutil.copyfile(input_file, output_file)
Esempio n. 9
0
 def prepare_wrfda_namelist(self, domain):
     # set domain specific workdir
     wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain))
     # read WRFDA namelist, use namelist.wrfda as supplied in config.json
     # if not supplied, fall back to default from WRFDA
     if utils.check_file_exists(
             self.config['options_wrfda']['namelist.wrfda'], boolean=True):
         wrfda_namelist = self.config['options_wrfda']['namelist.wrfda']
     else:
         wrfda_namelist = os.path.join(
             self.config['filesystem']['wrfda_dir'],
             'var/test/tutorial/namelist.input')
     wrfda_nml = f90nml.read(wrfda_namelist)
     # read WRF namelist in WRF work_dir
     wrf_nml = f90nml.read(
         os.path.join(self.config['filesystem']['wrf_run_dir'],
                      'namelist.input'))
     # set domain specific information in namelist
     for var in ['e_we', 'e_sn', 'e_vert', 'dx', 'dy']:
         # get variable from ${RUNDIR}/namelist.input
         var_value = wrf_nml['domains'][var]
         # set domain specific variable in WRDFA_WORKDIR/namelist.input
         wrfda_nml['domains'][var] = var_value[domain - 1]
     for var in [
             'mp_physics', 'ra_lw_physics', 'ra_sw_physics', 'radt',
             'sf_sfclay_physics', 'sf_surface_physics', 'bl_pbl_physics',
             'cu_physics', 'cudt', 'num_soil_layers'
     ]:
         # get variable from ${RUNDIR}/namelist.input
         var_value = wrf_nml['physics'][var]
         # set domain specific variable in WRDFA_WORKDIR/namelist.input
         try:
             wrfda_nml['physics'][var] = var_value[domain - 1]
         except TypeError:
             wrfda_nml['physics'][var] = var_value
     obsproc_nml = f90nml.read(
         os.path.join(self.obs[domain][0], 'namelist.obsproc'))
     # sync wrfda namelist with obsproc namelist
     wrfda_nml['wrfvar18']['analysis_date'] = (
         obsproc_nml['record2']['time_analysis'])
     wrfda_nml['wrfvar21']['time_window_min'] = (
         obsproc_nml['record2']['time_window_min'])
     wrfda_nml['wrfvar22']['time_window_max'] = (
         obsproc_nml['record2']['time_window_max'])
     if self.check_cv5_cv7():
         wrfda_nml['wrfvar7']['cv_options'] = int(
             self.config['options_wrfda']['cv_type'])
         wrfda_nml['wrfvar6']['max_ext_its'] = 2
         wrfda_nml['wrfvar5']['check_max_iv'] = True
     else:
         wrfda_nml['wrfvar7']['cv_options'] = 3
     tana = utils.return_validate(
         obsproc_nml['record2']['time_analysis'][:-6])
     wrfda_nml['time_control']['start_year'] = tana.year
     wrfda_nml['time_control']['start_month'] = tana.month
     wrfda_nml['time_control']['start_day'] = tana.day
     wrfda_nml['time_control']['start_hour'] = tana.hour
     wrfda_nml['time_control']['end_year'] = tana.year
     wrfda_nml['time_control']['end_month'] = tana.month
     wrfda_nml['time_control']['end_day'] = tana.day
     wrfda_nml['time_control']['end_hour'] = tana.hour
     # save changes to wrfda_nml
     utils.silentremove(os.path.join(wrfda_workdir, 'namelist.input'))
     wrfda_nml.write(os.path.join(wrfda_workdir, 'namelist.input'))
Esempio n. 10
0
File: wrf.py Progetto: kinow/wrfpy
 def prepare_wrf_config(self, datestart, dateend):
     '''
 Copy over default WRF namelist and modify time_control variables
 '''
     from datetime import datetime
     # check if both datestart and dateend are a datetime instance
     if not all([isinstance(dt, datetime) for dt in [datestart, dateend]]):
         raise TypeError(
             "datestart and dateend must be an instance of datetime")
     # read WRF namelist in WRF work_dir
     wrf_nml = f90nml.read(self.config['options_wrf']['namelist.input'])
     # get number of domains
     ndoms = wrf_nml['domains']['max_dom']
     # check if ndoms is an integer and >0
     if not (isinstance(ndoms, int) and ndoms > 0):
         raise ValueError("'domains_max_dom' namelist variable should be an " \
                         "integer>0")
     # define dictionary with time control values
     dict = {
         'time_control:start_year': datestart.year,
         'time_control:start_month': datestart.month,
         'time_control:start_day': datestart.day,
         'time_control:start_hour': datestart.hour,
         'time_control:end_year': dateend.year,
         'time_control:end_month': dateend.month,
         'time_control:end_day': dateend.day,
         'time_control:end_hour': dateend.hour,
     }
     # loop over dictionary and set start/end date parameters
     for el in dict.keys():
         if type(dict[el]) != list:
             wrf_nml[el.split(':')[0]][el.split(':')[1]] = [dict[el]
                                                            ] * ndoms
         else:
             wrf_nml[el.split(':')[0]][el.split(':')[1]] = dict[el] * ndoms
     # set interval_seconds to total seconds between datestart and dateend
     wrf_nml['time_control']['interval_seconds'] = int(
         self.config['options_general']['boundary_interval'])
     # calculate datetime.timedelta between datestart and dateend
     td = dateend - datestart
     # set run_days, run_hours, run_minutes, run_seconds
     td_days, td_hours, td_minutes, td_seconds = utils.days_hours_minutes_seconds(
         td)
     wrf_nml['time_control']['run_days'] = td_days
     wrf_nml['time_control']['run_hours'] = td_hours
     wrf_nml['time_control']['run_minutes'] = td_minutes
     wrf_nml['time_control']['run_seconds'] = td_seconds
     # check if WUR urban config is to be used
     if 'sf_urban_use_wur_config' in wrf_nml['physics']:
         # get start_date from config.json
         start_date = utils.return_validate(
             self.config['options_general']['date_start'])
         # if very first timestep, don't initialize urban parameters from file
         if (wrf_nml['physics']['sf_urban_use_wur_config']
                 and start_date == datestart):
             wrf_nml['physics']['sf_urban_init_from_file'] = False
         else:
             wrf_nml['physics']['sf_urban_init_from_file'] = True
     # write namelist.input
     wrf_nml.write(
         os.path.join(self.config['filesystem']['wrf_run_dir'],
                      'namelist.input'))
Esempio n. 11
0
    def applyToGrid(self, lat, lon, diffT, domain):
        # load netcdf files
        wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain))
        wrfinputFile = os.path.join(wrfda_workdir, 'wrfvar_output')
        lat2, lon2, lu_ind2 = self.getCoords(wrfinputFile)
        # get datetime from wrfinput file
        dtobj, datestr = self.get_time(wrfinputFile)
        # if not ((lat==lat2) and (lon==lon2)) we need to interpolate
        if not (np.array_equal(lat, lat2) and np.array_equal(lon, lon2)):
            # do interpolation to get new diffT
            diffT = interpolate.griddata(
                (lon.reshape(-1), lat.reshape(-1)),
                diffT.reshape(-1), (lon2.reshape(-1), lat2.reshape(-1)),
                method='cubic').reshape(np.shape(lon2))
            diffT[lu_ind2 != 1] = 0  # set to 0 if LU_IND!=1
        # open wrfvar_output (output after data assimilation)
        self.wrfinput2 = Dataset(os.path.join(wrfda_workdir, 'wrfvar_output'),
                                 'r+')
        # open wrfvar_input (input before DA (last step previous run)
        start_date = utils.return_validate(
            self.config['options_general']['date_start'])
        if (dtobj == start_date):  # very first timestep
            self.wrfinput3 = Dataset(
                os.path.join(self.wrf_rundir, ('wrfinput_d0' + str(domain))),
                'r')
            return
        else:
            self.wrfinput3 = Dataset(
                os.path.join(
                    self.wrf_rundir,
                    ('wrfvar_input_d0' + str(domain) + '_' + datestr)), 'r')
        # define variables to increment
        # variables_2d = ['TC_URB', 'TR_URB', 'TB_URB', 'TG_URB', 'TS_URB']
        # variables_3d = ['TRL_URB', 'TBL_URB', 'TGL_URB', 'TSLB']
        # begin determining multiplying factor
        rhocp = 1231
        uc_urb = self.wrfinput2.variables['UC_URB'][:]
        lp_urb = self.wrfinput2.variables['BUILD_AREA_FRACTION'][:]
        hgt_urb = self.wrfinput2.variables['BUILD_HEIGHT'][:]
        lb_urb = self.wrfinput2.variables['BUILD_SURF_RATIO'][:]
        frc_urb = self.wrfinput2.variables['FRC_URB2D'][:]
        chc_urb = self.wrfinput2.variables['CHC_SFCDIF'][:]
        R = numpy.maximum(numpy.minimum(lp_urb / frc_urb, 0.9), 0.1)
        RW = 1.0 - R
        HNORM = 2. * hgt_urb * frc_urb / (lb_urb - lp_urb)
        HNORM[lb_urb <= lp_urb] = 10.0
        ZR = numpy.maximum(numpy.minimum(hgt_urb, 100.0), 3.0)
        h = ZR / HNORM
        W = 2 * h
        # set safety margin on W/RW >=8 or else SLUCM could misbehave
        # make sure to use the same safety margin in module_sf_urban.F
        W[(W / RW) > 8.0] = ((8.0 / (W / RW)) * W)[(W / RW) > 8.0]
        CW = numpy.zeros(numpy.shape(uc_urb))
        CW[uc_urb > 5] = 7.51 * uc_urb[uc_urb > 5]**0.78
        CW[uc_urb <= 5] = 6.15 + 4.18 * uc_urb[uc_urb <= 5]
        DTW = diffT * (1 + ((RW * rhocp) / (W + RW)) * (chc_urb / CW))

        diffT = DTW  # change 09/01/2018
        diffT = numpy.nan_to_num(diffT)  # replace nan by 0
        # apply temperature changes
        TSK = self.wrfinput2.variables['TSK']
        TSK[:] = TSK[:] + diffT
        TB_URB = self.wrfinput2.variables['TB_URB']
        TB_URB[:] = TB_URB[:] + diffT
        TG_URB = self.wrfinput2.variables['TG_URB']
        TG_URB[:] = TG_URB[:] + diffT
        TS_URB = self.wrfinput2.variables['TS_URB']
        TS_URB[:] = TS_URB[:] + diffT
        TGR_URB = self.wrfinput2.variables['TGR_URB']
        TGR_URB[:] = TGR_URB[:] + diffT

        # wall layer temperature
        try:
            TBL_URB_factors = self.config['options_urbantemps']['TBL_URB']
        except KeyError:
            # fallback values if none are defined in config
            # these may not work correctly for other cities than Amsterdam
            TBL_URB_factors = [0.823, 0.558, 0.379, 0.257]
        if not (isinstance(TBL_URB_factors, list)
                and len(TBL_URB_factors) > 1):
            TBL_URB_factors = [0.823, 0.558, 0.379, 0.257]
        TBL_URB = self.wrfinput2.variables['TBL_URB']
        levs = numpy.shape(self.wrfinput2.variables['TBL_URB'][:])[1]
        TBL_URB = self.wrfinput2.variables['TBL_URB']
        for lev in range(0, levs):
            try:
                TBL_URB[0, lev, :] = (TBL_URB[0, lev, :] +
                                      diffT * float(TBL_URB_factors[lev]))
            except IndexError:
                # no factor for this layer => no increment
                pass

        # road layer temperature
        try:
            TGL_URB_factors = self.config['options_urbantemps']['TGL_URB']
        except KeyError:
            # fallback values if none are defined in config
            # these may not work correctly for other cities than Amsterdam
            TGL_URB_factors = [0.776, 0.170, 0.004]
        if not (isinstance(TGL_URB_factors, list)
                and len(TGL_URB_factors) > 1):
            TGL_URB_factors = [0.776, 0.170, 0.004]
        TGL_URB = self.wrfinput2.variables['TGL_URB']
        levs = numpy.shape(self.wrfinput2.variables['TGL_URB'][:])[1]
        TGL_URB = self.wrfinput2.variables['TGL_URB']
        for lev in range(0, levs):
            try:
                TGL_URB[0, lev, :] = (TGL_URB[0, lev, :] +
                                      diffT * float(TGL_URB_factors[lev]))
            except IndexError:
                # no factor for this layer => no increment
                pass

        #  adjustment soil for vegetation fraction urban cell
        try:
            TSLB_factors = self.config['options_urbantemps']['TSLB']
        except KeyError:
            # fallback values if none are defined in config
            # these may not work correctly for other cities than Amsterdam
            TSLB_factors = [0.507, 0.009]
        if not (isinstance(TSLB_factors, list) and len(TSLB_factors) > 1):
            TSLB_factors = [0.507, 0.009]
        TSLB = self.wrfinput2.variables['TSLB']  # after update_lsm
        TSLB_in = self.wrfinput3.variables['TSLB']  # before update_lsm
        levs = numpy.shape(self.wrfinput2.variables['TSLB'][:])[1]
        for lev in range(0, levs):
            # reset TSLB for urban cells to value before update_lsm
            TSLB[0, lev, :][lu_ind2 == 1] = TSLB_in[0, lev, :][lu_ind2 == 1]
            try:
                TSLB[0, lev, :] = (TSLB[0, lev, :] +
                                   diffT * float(TSLB_factors[lev]))
            except IndexError:
                pass

        # close netcdf file
        self.wrfinput2.close()
        self.wrfinput3.close()