Ejemplo n.º 1
0
 def _run_metgrid(self, j_id=None):
   '''
   run metgrid.exe (locally or using slurm script defined in config.json)
   '''
   if len(self.config['options_slurm']['slurm_metgrid.exe']):
     if j_id:
       mid = "--dependency=afterok:%d" %j_id
       metgrid_command = ['sbatch', mid, self.config['options_slurm']['slurm_metgrid.exe']]
     else:
       metgrid_command = ['sbatch', self.config['options_slurm']['slurm_metgrid.exe']]
     utils.check_file_exists(metgrid_command[-1])
     utils.silentremove(os.path.join(self.wps_workdir, 'metgrid', 'metgrid.exe'))
     os.symlink(os.path.join(self.config['filesystem']['wps_dir'],'metgrid','metgrid.exe'),
                os.path.join(self.wps_workdir, 'metgrid', 'metgrid.exe'))
     try:
       res = subprocess.check_output(metgrid_command, cwd=self.wps_workdir,
                                     stderr=utils.devnull())
       j_id = int(res.split()[-1])  # slurm job-id
     except subprocess.CalledProcessError:
       logger.error('Metgrid failed %s:' %metgrid_command)
       raise  # re-raise exception
     return j_id  # return slurm job-id
   else:
     metgrid_command = os.path.join(self.config['filesystem']['wps_dir'],
                             'metgrid', 'metgrid.exe')
     utils.check_file_exists(metgrid_command)
     try:
       subprocess.check_call(metgrid_command, cwd=self.wps_workdir,
                             stdout=utils.devnull(), stderr=utils.devnull())
     except subprocess.CalledProcessError:
       logger.error('Metgrid failed %s:' %metgrid_command)
       raise  # re-raise exception
Ejemplo n.º 2
0
 def _prepare_namelist(self, datestart, dateend):
   '''
   prepare wps namelist
   '''
   # read WPS namelist in WPS work_dir
   wps_nml = f90nml.read(self.config['options_wps']['namelist.wps'])
   # get numer of domains
   ndoms = wps_nml['share']['max_dom']
   # check if ndoms is an integer and >0
   if not (isinstance(ndoms, int) and ndoms>0):
     raise ValueError("'domains_max_dom' namelist variable should be an " \
                     "integer>0")
   # check if both datestart and dateend are a datetime instance
   if not all([ isinstance(dt, datetime) for dt in [datestart, dateend] ]):
     raise TypeError("datestart and dateend must be an instance of datetime")
   # set new datestart and dateend
   wps_nml['share']['start_date'] = [datetime.strftime(datestart,
                                                         '%Y-%m-%d_%H:%M:%S')] * ndoms
   wps_nml['share']['end_date'] = [datetime.strftime(dateend,
                                                       '%Y-%m-%d_%H:%M:%S')] * ndoms
   # write namelist in wps work_dir
   utils.silentremove(os.path.join(
     self.config['filesystem']['work_dir'], 'wps', 'namelist.wps'))
   wps_nml.write(os.path.join(
     self.config['filesystem']['work_dir'], 'wps', 'namelist.wps'))
Ejemplo n.º 3
0
  def _write_itag(self, wrfout, current_time):
    '''
    Create input file for unipost
      --------content itag file ---------------------------------------
      First line is location of wrfout data
      Second line is required format
      Third line is the modeltime to process
      Fourth line is the model identifier (WRF, NMM)
      -----------------------------------------------------------------
    '''
    logger.debug('Enter write_itag')
    logger.debug('Time in itag file is: %s' %current_time)
    # set itag filename and cleanup
    filename = os.path.join(config['post_dir'], 'itag')
    utils.silentremove(filename)
    # template of itag file
    template = """{wrfout}
netcdf
{current_time}:00:00
NCAR
"""
    # context variables in template
    context = {
      "wrfout":wrfout,
      "current_time":current_time
      }
    # create the itag file and write content to it based on the template
    try:
      with open(filename, 'w') as itag:
        itag.write(template.format(**context))
    except IOError as e:
      logger.error('Unable to write itag file: %s' %filename)
      raise  # re-raise exception
    logger.debug('Leave write_itag')
Ejemplo n.º 4
0
 def _initialize(self):
   '''
   Check if archive dir exists, create if not.
   The archive dir is used to ...
   '''
   utils._create_directory(config['upp_archive_dir'])  # create archive dir
   # create post_dir (remove old one if needed)
   utils.silentremove(config['post_dir'])
   utils._create_directory(config['post_dir'])
Ejemplo n.º 5
0
 def _link_vtable(self):
   '''
   link the required Vtable
   '''
   utils.silentremove(os.path.join(self.wps_workdir, 'Vtable'))
   # TODO: make vtable depend on the boundary source
   vtable =  'Vtable.GFS'
   vtable_path = os.path.join(self.config['filesystem']['wps_dir'], 'ungrib',
                         'Variable_Tables', vtable)
   os.symlink(vtable_path, os.path.join(self.wps_workdir, 'Vtable'))
Ejemplo n.º 6
0
  def prepare_wrfda_namelist(self, domain):
    # set domain specific workdir
    wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain))
    # read WRFDA namelist
    wrfda_namelist = os.path.join(self.config['filesystem']['wrfda_dir'],
                                  'var/test/tutorial/namelist.input')
    wrfda_nml = f90nml.read(wrfda_namelist)

    # read WRF namelist in WRF work_dir
    wrf_nml = f90nml.read(os.path.join(self.config['filesystem']['wrf_run_dir'],
                                       'namelist.input'))
    ## silent remove file if exists
    ##utils.silentremove(os.path.join(wrfda_workdir, 'fg'))
    ## create symlink of wrfinput_d0${domain}
    ##os.symlink(os.path.join(self.rundir, 'wrfinput_d0' + str(domain)),
    ##          os.path.join(wrfda_workdir, 'fg'))
    # set domain specific information in namelist
    for var in ['e_we', 'e_sn', 'e_vert', 'dx', 'dy']:
      # get variable from ${RUNDIR}/namelist.input
      var_value = wrf_nml['domains'][var]
      # set domain specific variable in WRDFA_WORKDIR/namelist.input
      wrfda_nml['domains'][var] = var_value[domain - 1]
    for var in ['mp_physics', 'ra_lw_physics', 'ra_sw_physics', 'radt',
                'sf_sfclay_physics', 'sf_surface_physics', 'bl_pbl_physics',
                'cu_physics', 'cudt', 'num_soil_layers']:
      # get variable from ${RUNDIR}/namelist.input
      var_value = wrf_nml['physics'][var]
      # set domain specific variable in WRDFA_WORKDIR/namelist.input
      try:
        wrfda_nml['physics'][var] = var_value[domain - 1]
      except TypeError:
        wrfda_nml['physics'][var] = var_value
    obsproc_nml = f90nml.read(os.path.join(self.obs[domain][0], 'namelist.obsproc'))
    # sync wrfda namelist with obsproc namelist
    wrfda_nml['wrfvar18']['analysis_date'] = obsproc_nml['record2']['time_analysis']
    wrfda_nml['wrfvar21']['time_window_min'] = obsproc_nml['record2']['time_window_min']
    wrfda_nml['wrfvar22']['time_window_max'] = obsproc_nml['record2']['time_window_max']
    if check_cv5():
      wrfda_nml['wrfvar7']['cv_options'] =  5
      wrfda_nml['wrfvar6']['max_ext_its'] = 2
      wrfda_nml['wrfvar5']['check_max_iv'] = True
    else:
      wrfda_nml['wrfvar7']['cv_options'] =  3
    tana = utils.return_validate(obsproc_nml['record2']['time_analysis'][:-6])
    wrfda_nml['time_control']['start_year'] = tana.year
    wrfda_nml['time_control']['start_month'] = tana.month
    wrfda_nml['time_control']['start_day'] = tana.day
    wrfda_nml['time_control']['start_hour'] = tana.hour
    wrfda_nml['time_control']['end_year'] = tana.year
    wrfda_nml['time_control']['end_month'] = tana.month
    wrfda_nml['time_control']['end_day'] = tana.day
    wrfda_nml['time_control']['end_hour'] = tana.hour
    # save changes to wrfda_nml
    utils.silentremove(os.path.join(wrfda_workdir, 'namelist.input'))
    wrfda_nml.write(os.path.join(wrfda_workdir, 'namelist.input'))
Ejemplo n.º 7
0
 def __init__(self):
     config.__init__(self)
     rundir = self.config['filesystem']['wrf_run_dir']
     wpsdir = os.path.join(self.config['filesystem']['work_dir'], 'wps')
     ## wrf run dir
     # cleanup old met_em files
     # create list of files to remove
     #files = [glob.glob(os.path.join(rundir, ext))
     #         for ext in ['met_em*']]
     # flatten list
     #files_flat = [item for sublist in files for item in sublist] 
     # remove files silently
     #[ utils.silentremove(filename) for filename in files_flat ]
     # copy new met_em files
     # create list of files to copy
     files = [glob.glob(os.path.join(wpsdir, ext))
              for ext in ['met_em*']]
     # flatten list
     files_flat = [item for sublist in files for item in sublist]
     [ shutil.copyfile(filename, os.path.join(rundir, os.path.basename(filename))) for filename in files_flat ]
     ## wps workdir
     # create list of files to remove
     files = [glob.glob(os.path.join(wpsdir, ext))
              for ext in ['met_em*', 'FILE*', 'PFILE*', 'GRIBFILE*']]
     # flatten list
     files_flat = [item for sublist in files for item in sublist]
     # remove files silently
     [ utils.silentremove(filename) for filename in files_flat ]
Ejemplo n.º 8
0
  def create_obsproc_dir(self, workdir):
    '''
    symlink all files required to run obsproc.exe into obsproc workdir
    '''
    # cleanup
    utils.silentremove(workdir)
    # create work directory
    utils._create_directory(workdir)
		# symlink error files
    files = ['DIR.txt', 'HEIGHT.txt', 'PRES.txt', 'RH.txt', 'TEMP.txt',
		         'UV.txt', 'obserr.txt']
    for fl in files:
      os.symlink(os.path.join(self.obsproc_dir, fl), 
                 os.path.join(workdir, fl))
    # symlink obsproc.exe
    os.symlink(os.path.join(self.obsproc_dir, 'src', 'obsproc.exe'), 
               os.path.join(workdir, 'obsproc.exe'))
Ejemplo n.º 9
0
 def wrfda_post(self):
   '''
   Move files into WRF run dir after all data assimilation steps have completed
   '''
   # prepare a WRFDA workdirectory for each domain
   for domain in range(1, self.max_dom+1):
     # set domain specific workdir
     wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain))
     if domain==1:
       # copy over updated lateral boundary conditions to RUNDIR
       # only for outer domain
       utils.silentremove(os.path.join(self.rundir, 'wrfbdy_d01'))
       shutil.copyfile(os.path.join(wrfda_workdir, 'wrfbdy_d01'),
                     os.path.join(self.rundir, 'wrfbdy_d01'))
     # copy wrfvar_output_d0${domain} to ${RUNDIR}/wrfinput_d0${domain}
     utils.silentremove(os.path.join(self.rundir ,'wrfinput_d0' + str(domain)))
     shutil.copyfile(os.path.join(wrfda_workdir, 'wrfvar_output'),
                     os.path.join(self.rundir, 'wrfinput_d0' + str(domain)))
Ejemplo n.º 10
0
 def obsproc_init(self, datestart):
   '''
   Sync obsproc namelist with WRF namelist.input
   '''
   from shutil import copyfile
   from datetime import timedelta
   from datetime import datetime
   # convert to unique list
   obslist = list(set(self.obs.values()))
   # read WRF namelist in WRF work_dir
   wrf_nml = f90nml.read(os.path.join(self.config['filesystem']['wrf_run_dir'],
                                      'namelist.input'))
   for obs in obslist:
     # read obsproc namelist
     obsproc_nml = f90nml.read(os.path.join(self.obsproc_dir,
                                            'namelist.obsproc.3dvar.wrfvar-tut'))
     # create obsproc workdir
     self.create_obsproc_dir(obs[0])
     # copy observation in LITTLE_R format to obsproc_dir
     shutil.copyfile(os.path.join(
       self.config['filesystem']['obs_dir'], obs[1]),
       os.path.join(obs[0], obs[1]))
     # sync obsproc namelist variables with wrf namelist.input
     obsproc_nml['record1']['obs_gts_filename'] = obs[1]
     obsproc_nml['record8']['nesti'] = wrf_nml['domains']['i_parent_start']
     obsproc_nml['record8']['nestj'] = wrf_nml['domains']['j_parent_start']
     obsproc_nml['record8']['nestix'] = wrf_nml['domains']['e_we']
     obsproc_nml['record8']['nestjx'] = wrf_nml['domains']['e_sn']
     obsproc_nml['record8']['numc'] = wrf_nml['domains']['parent_id']
     obsproc_nml['record8']['dis'] = wrf_nml['domains']['dx']
     obsproc_nml['record8']['maxnes'] = wrf_nml['domains']['max_dom']
     # set time_analysis, time_window_min, time_window_max
     # check if both datestart and dateend are a datetime instance
     if not isinstance(datestart, datetime):
       raise TypeError("datestart must be an instance of datetime")
     obsproc_nml['record2']['time_analysis'] = datetime.strftime(datestart,
                                                         '%Y-%m-%d_%H:%M:%S')
     obsproc_nml['record2']['time_window_min'] = datetime.strftime(
       datestart - timedelta(minutes=15), '%Y-%m-%d_%H:%M:%S')
     obsproc_nml['record2']['time_window_max'] = datetime.strftime(
       datestart + timedelta(minutes=15), '%Y-%m-%d_%H:%M:%S')
     # save obsproc_nml
     utils.silentremove(os.path.join(obs[0], 'namelist.obsproc'))
     obsproc_nml.write(os.path.join(obs[0], 'namelist.obsproc'))
Ejemplo n.º 11
0
 def _clean_boundaries_wps(self):
   '''
   clean old leftover boundary files in WPS directory
   '''
   # create list of files to remove
   files = [glob.glob(os.path.join(self.wps_workdir, ext))
            for ext in ['GRIBFILE.*', 'FILE:', 'PFILE:', 'PRES:']]
   # flatten list
   files_flat = [item for sublist in files for item in sublist]
   # remove files silently
   [ utils.silentremove(filename) for filename in files_flat ]
Ejemplo n.º 12
0
 def _cleanup_output_files(self):
   '''
   Clean up old output files in post_dir
   '''
   logger.debug('Enter cleanup_output_files')
   file_ext = [ '*.out', '*.tm00', 'fort.110', 'itag']
   files_found = [ f for files in [
     glob.glob(os.path.join(config['post_dir'], ext))
     for ext in file_ext ] for f in files]
   # try to remove files, raise exception if needed
   [ utils.silentremove(fl) for fl in files_found ]
   logger.debug('Leave cleanup_output_files')
Ejemplo n.º 13
0
 def create_parame(self, parame_type, domain):
   # set domain specific workdir
   wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain))
   filename = os.path.join(wrfda_workdir, 'parame.in')
   utils.silentremove(filename)
   # add configuration to parame.in file
   parame = open(filename, 'w')  # open file
   if parame_type == 'lower':
     ## start config file lower boundary conditions
     parame.write("""&control_param
       da_file = './fg'
       wrf_input = './wrfinput_d01'
       wrf_input = '/home/WUR/haren009/sources/WRFV3/run/wrfinput_d01'
       domain_id = 1
       cycling = .true.
       debug = .true.
       low_bdy_only = .true.
       update_lsm = .false.
       var4d_lbc = .false.
       iswater = 16
   /
   """)
     ## end config file lower boundary conditions
   else:
     ## start config file lateral boundary conditions
     parame.write("""&control_param
       da_file = '/home/haren/model/WRFV3/run2/wrfinput_d01'
       wrf_bdy_file = './wrfbdy_d01'
       domain_id = 1
       cycling = .true.
       debug = .true.
       update_low_bdy = .false.
       update_lateral_bdy = .true.
       update_lsm = .false.
       var4d_lbc = .false.
   /
   """)
     ## end config file lateral boundary conditions
   parame.close()  # close file
Ejemplo n.º 14
0
 def prepare_updatebc_type(self, boundary_type, datestart, domain):
   # set domain specific workdir
   wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain))
   if boundary_type == 'lower' :
     # define parame.in file
     self.create_parame(boundary_type, domain)
     # copy first guess (wrfout in wrfinput format) for WRFDA
     first_guess = os.path.join(self.rundir, 'wrfvar_input_d0' + str(domain) +
                               '_' + datetime.strftime(datestart,
                                                       '%Y-%m-%d_%H:%M:%S'))
     try:
       shutil.copyfile(first_guess, os.path.join(wrfda_workdir, 'fg'))
     except Exception:
       shutil.copyfile(os.path.join(self.rundir, 'wrfinput_d0' + str(domain)),
                       os.path.join(wrfda_workdir, 'fg'))
     # read parame.in file
     parame = f90nml.read(os.path.join(wrfda_workdir, 'parame.in'))
     # set domain in parame.in
     parame['control_param']['domain_id'] = domain
     # set wrf_input (IC from WPS and WRF real)
     parame['control_param']['wrf_input'] = str(os.path.join(
       self.rundir, 'wrfinput_d0' + str(domain)))
     # save changes to parame.in file
     utils.silentremove(os.path.join(wrfda_workdir, 'parame.in'))
     parame.write(os.path.join(wrfda_workdir, 'parame.in'))
   elif boundary_type == 'lateral' :
     # define parame.in file
     self.create_parame(boundary_type, domain)
     # read parame.in file
     parame = f90nml.read(os.path.join(wrfda_workdir, 'parame.in'))
     # set output from WRFDA
     parame['control_param']['da_file'] = os.path.join(wrfda_workdir, 'wrfvar_output')
     # save changes to parame.in file
     utils.silentremove(os.path.join(wrfda_workdir, 'parame.in'))
     parame.write(os.path.join(wrfda_workdir, 'parame.in'))
   else:
     raise Exception('unknown boundary type')
Ejemplo n.º 15
0
 def _save_namelists(self):
   '''
   write coarse and fine WRF namelist.input to the respective run directories
   as namelist.forecast
   '''
   # define namelist directories
   coarse_namelist_dir = os.path.join(self.config['filesystem']['work_dir'],
                                      'wrf_coarse')
   fine_namelist_dir = os.path.join(self.config['filesystem']['work_dir'],
                                      'wrf_fine')
   # create directories
   [utils._create_directory(directory) for directory in [coarse_namelist_dir,
                                                         fine_namelist_dir]]
   # remove old files if needed
   [utils.silentremove(filename) for filename in [
     os.path.join(dn, 'namelist.forecast') for dn in [coarse_namelist_dir,
                                                      fine_namelist_dir]]]
   # write namelists
   self.nml_coarse.write(os.path.join(coarse_namelist_dir,
                                          'namelist.forecast'))
   self.nml_fine.write(os.path.join(fine_namelist_dir,
                                        'namelist.forecast'))
Ejemplo n.º 16
0
Archivo: wrf.py Proyecto: kinow/wrfpy
 def cleanup_previous_wrf_run(self):
     from utils import silentremove
     '''
 cleanup initial/boundary conditions and namelist from previous WRF run
 '''
     # remove initial conditions (wrfinput files)
     for filename in glob.glob(
             os.path.join(self.config['filesystem']['wrf_run_dir'],
                          'wrfinput_d*')):
         silentremove(filename)
     # remove lateral boundary conditions (wrfbdy_d01)
     silentremove(
         os.path.join(self.config['filesystem']['wrf_run_dir'],
                      'wrfbdy_d01'))
     silentremove(
         os.path.join(self.config['filesystem']['wrf_run_dir'],
                      'namelist.input'))
Ejemplo n.º 17
0
    def __run_job(self, job):
        success = False
        try:
            obj = self.driver.get_object(job["bucket"], job["name"])
        except ObjectDoesNotExistError:
            self.error_message("Could not get file object, skipping: %s/%s" %
                               (job["bucket"], job["name"]))
            return CONTINUE

        if job["size"] < 1024 * 10:
            try:
                self.debug_message(
                    110,
                    "%3d: Put complete file %s into queue" %
                    (self.worker_id, job["bucket"] + job["name"]),
                )
                stream = obj.as_stream()
                content = b"".join(list(stream))

                size_of_fetched_object = len(content)
                if size_of_fetched_object != job["size"]:
                    self.error_message(
                        "prefetched file %s: got %s bytes, not the real size (%s bytes)"
                        % (job["name"], size_of_fetched_object, job["size"]), )
                    return CONTINUE

                job["data"] = io.BytesIO(content)
                job["type"] = TASK_TYPE.DOWNLOADED
                success = True
            except LibcloudError:
                self.error_message("Libcloud error, could not download file")
                return CONTINUE
            except Exception:
                self.error_message("Could not download file")
                return CONTINUE
        elif job["size"] < self.options["prefetch_size"]:
            try:
                self.debug_message(
                    110,
                    "%3d: Prefetch file %s" %
                    (self.worker_id, job["bucket"] + job["name"]),
                )
                tmpfilename = self.tmp_dir_path + "/" + str(uuid.uuid4())
                obj.download(tmpfilename)
                job["data"] = None
                job["tmpfile"] = tmpfilename
                job["type"] = TASK_TYPE.TEMP_FILE
                success = True
            except OSError as e:
                self.error_message("Could not open temporary file %s" %
                                   e.filename)
                self.abort_message()
                return FINISH
            except ObjectDoesNotExistError as e:
                silentremove(tmpfilename)
                self.error_message("Could not open object, skipping: %s" %
                                   e.object_name)
                return CONTINUE
            except LibcloudError:
                silentremove(tmpfilename)
                self.error_message(
                    "Error downloading object, skipping: %s/%s" %
                    (job["bucket"], job["name"]))
                return CONTINUE
            except Exception:
                silentremove(tmpfilename)
                self.error_message(
                    "Error using temporary file for, skipping: %s/%s" %
                    (job["bucket"], job["name"]))
                return CONTINUE
        else:
            try:
                self.debug_message(
                    110,
                    "%3d: Prepare file as stream for download %s" %
                    (self.worker_id, job["bucket"] + job["name"]),
                )
                job["data"] = obj
                job["type"] = TASK_TYPE.STREAM
                success = True
            except LibcloudError:
                self.error_message(
                    "Libcloud error preparing stream object, skipping: %s/%s" %
                    (job["bucket"], job["name"]))
                return CONTINUE
            except Exception:
                self.error_message(
                    "Error preparing stream object, skipping: %s/%s" %
                    (job["bucket"], job["name"]))
                return CONTINUE

        if success == True:
            self.queue_try_put(self.output_queue, job)

        return CONTINUE