def _run_ungrib(self, j_id=None): ''' run ungrib.exe (locally or using slurm script defined in config.json) ''' if len(self.config['options_slurm']['slurm_ungrib.exe']): if j_id: mid = "--dependency=afterok:%d" %j_id ungrib_command = ['sbatch', mid, self.config['options_slurm']['slurm_ungrib.exe']] else: ungrib_command = ['sbatch', self.config['options_slurm']['slurm_ungrib.exe']] utils.check_file_exists(ungrib_command[-1]) utils.silentremove(os.path.join(self.wps_workdir, 'ungrib', 'ungrib.exe')) if not os.path.isdir(os.path.join(self.wps_workdir, 'ungrib')): utils._create_directory(os.path.join(self.wps_workdir, 'ungrib')) os.symlink(os.path.join(self.config['filesystem']['wps_dir'],'ungrib','ungrib.exe'), os.path.join(self.wps_workdir, 'ungrib', 'ungrib.exe')) try: res = subprocess.check_output(ungrib_command, cwd=self.wps_workdir, stderr=utils.devnull()) j_id = int(res.split()[-1]) # slurm job-id except subprocess.CalledProcessError: #logger.error('Ungrib failed %s:' %ungrib_command) raise # re-raise exception return j_id # return slurm job-id else: ungrib_command = os.path.join(self.config['filesystem']['wps_dir'], 'ungrib', 'ungrib.exe') utils.check_file_exists(ungrib_command) try: subprocess.check_call(ungrib_command, cwd=self.wps_workdir, stdout=utils.devnull(), stderr=utils.devnull()) except subprocess.CalledProcessError: #logger.error('Ungrib failed %s:' %ungrib_command) raise # re-raise exception
def prepare_updatebc(self, datestart): # prepare a WRFDA workdirectory for each domain for domain in range(1, self.max_dom + 1): # TODO: add check for domain is int # define domain specific workdir wrfda_workdir = os.path.join(self.wrfda_workdir, "d0" + str(domain)) # general functionality independent of boundary type in parame.in if os.path.exists(wrfda_workdir): shutil.rmtree(wrfda_workdir) # remove wrfda_workdir utils._create_directory(os.path.join(wrfda_workdir, 'var', 'da')) wrf_nml = f90nml.read( os.path.join(self.config['filesystem']['wrf_run_dir'], 'namelist.input')) # define parame.in file self.create_parame('lower', domain) # symlink da_update_bc.exe os.symlink( os.path.join(self.config['filesystem']['wrfda_dir'], 'var/da/da_update_bc.exe'), os.path.join(wrfda_workdir, 'da_update_bc.exe')) # copy wrfbdy_d01 file (lateral boundaries) to WRFDA_WORKDIR shutil.copyfile(os.path.join(self.rundir, 'wrfbdy_d01'), os.path.join(wrfda_workdir, 'wrfbdy_d01')) # set parame.in file for updating lower boundary first self.prepare_updatebc_type('lower', datestart, domain)
def _link_tbl_files(self): ''' link GEOGRID.TBL and METGRID.TBL into wps work_dir ''' # geogrid if not os.path.isfile( os.path.join(self.wps_workdir, 'geogrid', 'GEOGRID.TBL')): if not self.config['options_wps']['geogrid.tbl']: geogridtbl = os.path.join(self.config['filesystem']['wps_dir'], 'geogrid', 'GEOGRID.TBL.ARW') else: geogridtbl = self.config['options_wps']['geogrid.tbl'] utils._create_directory(os.path.join(self.wps_workdir, 'geogrid')) os.symlink( geogridtbl, os.path.join(self.wps_workdir, 'geogrid', 'GEOGRID.TBL')) # metgrid if not os.path.isfile( os.path.join(self.wps_workdir, 'metgrid', 'METGRID.TBL')): if not self.config['options_wps']['metgrid.tbl']: metgridtbl = os.path.join(self.config['filesystem']['wps_dir'], 'metgrid', 'METGRID.TBL.ARW') else: metgridtbl = self.config['options_wps']['metgrid.tbl'] utils._create_directory(os.path.join(self.wps_workdir, 'metgrid')) os.symlink( metgridtbl, os.path.join(self.wps_workdir, 'metgrid', 'METGRID.TBL'))
def __init__(self, datestart, dateend): config.__init__(self) self.startdate = datestart self.enddate = dateend # read WRF namelist in WRF work_dir wrf_nml = f90nml.read(self.config['options_wrf']['namelist.input']) # get number of domains self.ndoms = wrf_nml['domains']['max_dom'] self.rundir = self.config['filesystem']['wrf_run_dir'] # archive in subdir per year self.archivedir = os.path.join( self.config['filesystem']['archive_dir'], str(self.startdate.year)) utils._create_directory(self.archivedir) # define static variables self.define_vars_static() # define variables that need to be stored hourly self.define_vars_hourly() # define variables that need to be stored every minute for the inner # domain, hourly for the other domains self.define_vars_minute() self.define_vars_deac() # define variables to be deaccumulated self.archive() # archive "normal" variables self.archive_wrfvar_input() # archive wrfvar_input files # get start_date from config.json start_date = utils.return_validate( self.config['options_general']['date_start']) if (start_date == datestart): # very first timestep self.archive_static() # archive static variables self.cleanup()
def _initialize(self): ''' Check if archive dir exists, create if not. The archive dir is used to ... ''' # create archive dir utils._create_directory(self.config['filesystem']['upp_archive_dir']) # create post_dir (remove old one if needed) utils.silentremove(self.post_dir) utils._create_directory(self.post_dir)
def _save_namelists(self): ''' write coarse and fine WRF namelist.input to the respective run directories as namelist.forecast ''' # define namelist directories coarse_namelist_dir = os.path.join( self.config['filesystem']['work_dir'], 'wrf_coarse') fine_namelist_dir = os.path.join(self.config['filesystem']['work_dir'], 'wrf_fine') # create directories [ utils._create_directory(directory) for directory in [coarse_namelist_dir, fine_namelist_dir] ] # remove old files if needed [ utils.silentremove(filename) for filename in [ os.path.join(dn, 'namelist.forecast') for dn in [coarse_namelist_dir, fine_namelist_dir] ] ] # write namelists self.nml_coarse.write( os.path.join(coarse_namelist_dir, 'namelist.forecast')) self.nml_fine.write( os.path.join(fine_namelist_dir, 'namelist.forecast'))
def check_wrf_rundir(self): ''' check if rundir exists if rundir doesn't exist, copy over content of self.config['filesystem']['wrf_dir']/run ''' utils._create_directory(self.wrf_rundir) # create list of files in self.config['filesystem']['wrf_dir']/run files = glob.glob( os.path.join(self.config['filesystem']['wrf_dir'], 'run', '*')) for fl in files: fname = os.path.basename(fl) if (os.path.splitext(fname)[1] == '.exe'): # don't copy over the executables continue shutil.copyfile(fl, os.path.join(self.wrf_rundir, fname))
def archive_static(self): ''' archive non-changing files ''' # loop over all domains static_archivedir = os.path.join(self.archivedir, 'static') utils._create_directory(static_archivedir) for domain in range(1, self.ndoms + 1): # iterate over all variables that need to be archived for var in self.static_var: datestr_in = self.startdate.strftime('%Y-%m-%d_%H:%M:%S') # define and load input file input_fn = var + '_d0' + str(domain) + '_' + datestr_in input_file = os.path.join(self.rundir, input_fn) output_file = os.path.join(static_archivedir, input_fn) # copy wrfvar_input to archive dir shutil.copyfile(input_file, output_file)
def create_obsproc_dir(self, workdir): ''' symlink all files required to run obsproc.exe into obsproc workdir ''' # cleanup utils.silentremove(workdir) # create work directory utils._create_directory(workdir) # symlink error files files = [ 'DIR.txt', 'HEIGHT.txt', 'PRES.txt', 'RH.txt', 'TEMP.txt', 'UV.txt', 'obserr.txt' ] for fl in files: os.symlink(os.path.join(self.obsproc_dir, fl), os.path.join(workdir, fl)) # symlink obsproc.exe os.symlink(os.path.join(self.obsproc_dir, 'src', 'obsproc.exe'), os.path.join(workdir, 'obsproc.exe'))
def archive_wrfvar_input(self): ''' archive wrfvar_input files ''' # loop over all domains wrfvar_archivedir = os.path.join(self.archivedir, 'wrfvar') utils._create_directory(wrfvar_archivedir) start_date = utils.return_validate( self.config['options_general']['date_start']) for domain in range(1, self.ndoms + 1): # iterate over all variables that need to be archived for cdate in pandas.date_range(self.startdate, self.enddate, freq='2H')[:-1]: if (cdate != start_date): datestr_in = cdate.strftime('%Y-%m-%d_%H:%M:%S') # define and load input file input_fn = ('wrfvar_input' + '_d0' + str(domain) + '_' + datestr_in) input_file = os.path.join(self.rundir, input_fn) output_file = os.path.join(wrfvar_archivedir, input_fn) # copy wrfvar_input to archive dir shutil.copyfile(input_file, output_file)
def __init__(self, datestring, cylc_suite_def_path): config.__init__(self) dt = utils.convert_cylc_time(datestring) wrfout_time = datetime.datetime.strftime(dt, '%Y-%m-%d_%H:%M:%S') nml = self.config['options_wrf']['namelist.input'] max_dom = utils.get_max_dom(nml) rundir = self.config['filesystem']['wrf_run_dir'] archivedir = self.config['filesystem']['archive_dir'] gis_archive = os.path.join(archivedir, 'gis', wrfout_time) utils._create_directory(gis_archive) for dom in range(1, max_dom + 1): wrfout = os.path.join(rundir, 'wrfout_d0' + str(dom) + '_' + wrfout_time) archived = os.path.join(archivedir, 'wrfout_d0' + str(dom) + '_' + wrfout_time) utils.silentremove(archived) os.system('nc3tonc4 ' + wrfout + ' ' + archived) try: gis_out = os.path.join( gis_archive, 'meteo_gis_d0' + str(dom) + '_' + wrfout_time) os.system('cdo -f nc4c -z zip_4 selvar,Q2,T2,U10,V10 ' + wrfout + ' ' + gis_out) except Exception: pass plot_archive = os.path.join(archivedir, 'plot', wrfout_time) utils._create_directory(plot_archive) wrfncl = os.path.join(cylc_suite_def_path, 'bin', 'wrf_Surface3.ncl') os.system('ncl ' + wrfncl + ' inputfile=' + r'\"' + archived + r'\" outputfile=\"' + plot_archive + r'/surface_d0' + str(dom) + '.png' + r'\"') plot_latest = os.path.join(archivedir, 'plot', 'latest') try: os.symlink(plot_archive, plot_latest) except OSError, e: if e.errno == errno.EEXIST: os.remove(plot_latest) os.symlink(plot_archive, plot_latest)
def _create_directory_structure(self, suitename, basedir=None): ''' Create directory structure for the Cylc configuration ''' # set basedir to users home directory if not supplied if not basedir: basedir = os.path.join(os.path.expanduser("~"), 'cylc-suites') # subdirectories to create subdirs = ['bin', 'control', 'doc', 'inc'] # create subdirectories [ utils._create_directory(os.path.join(basedir, suitename, subdir)) for subdir in subdirs ] # copy over helper scripts for cylc cylcDir = pkg_resources.resource_filename('wrfpy', 'cylc/') targetDir = os.path.join(basedir, suitename, 'bin') copy_tree(cylcDir, targetDir) # create empty json config file in suite directory # this does not overwrite an existing config file config.__init__(self, os.path.join(basedir, suitename, 'config.json'))
def __init__(self): config.__init__(self) # load config # define and create wps working directory self.wps_workdir = os.path.join(self.config['filesystem']['work_dir'], 'wps') utils._create_directory(self.wps_workdir)
def _prepare_post_dir(self): ''' Create and prepare post_dir ''' #logger.debug('Preparing postprd directory: %s' %config['post_dir']) # create self.post_dir if it does not exist yet utils._create_directory(self.post_dir) # Link all the relevant files need to compute various diagnostics relpath_to_link = [ 'EmisCoeff/Big_Endian/EmisCoeff.bin', 'AerosolCoeff/Big_Endian/AerosolCoeff.bin', 'CloudCoeff/Big_Endian/CloudCoeff.bin', 'SpcCoeff/Big_Endian/imgr_g11.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/imgr_g11.TauCoeff.bin', 'SpcCoeff/Big_Endian/imgr_g12.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/imgr_g12.TauCoeff.bin', 'SpcCoeff/Big_Endian/imgr_g13.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/imgr_g13.TauCoeff.bin', 'SpcCoeff/Big_Endian/imgr_g15.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/imgr_g15.TauCoeff.bin', 'SpcCoeff/Big_Endian/imgr_mt1r.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/imgr_mt1r.TauCoeff.bin', 'SpcCoeff/Big_Endian/imgr_mt2.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/imgr_mt2.TauCoeff.bin', 'SpcCoeff/Big_Endian/imgr_insat3d.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/imgr_insat3d.TauCoeff.bin', 'SpcCoeff/Big_Endian/amsre_aqua.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/amsre_aqua.TauCoeff.bin', 'SpcCoeff/Big_Endian/tmi_trmm.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/tmi_trmm.TauCoeff.bin', 'SpcCoeff/Big_Endian/ssmi_f13.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/ssmi_f13.TauCoeff.bin', 'SpcCoeff/Big_Endian/ssmi_f14.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/ssmi_f14.TauCoeff.bin', 'SpcCoeff/Big_Endian/ssmi_f15.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/ssmi_f15.TauCoeff.bin', 'SpcCoeff/Big_Endian/ssmis_f16.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/ssmis_f16.TauCoeff.bin', 'SpcCoeff/Big_Endian/ssmis_f17.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/ssmis_f17.TauCoeff.bin', 'SpcCoeff/Big_Endian/ssmis_f18.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/ssmis_f18.TauCoeff.bin', 'SpcCoeff/Big_Endian/ssmis_f19.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/ssmis_f19.TauCoeff.bin', 'SpcCoeff/Big_Endian/ssmis_f20.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/ssmis_f20.TauCoeff.bin', 'SpcCoeff/Big_Endian/seviri_m10.SpcCoeff.bin', 'TauCoeff/ODPS/Big_Endian/seviri_m10.TauCoeff.bin', 'SpcCoeff/Big_Endian/v.seviri_m10.SpcCoeff.bin' ] # abspath coefficients for crtm2 (simulated synthetic satellites) abspath_coeff = [ os.path.join(self.crtm_dir, relpath) for relpath in relpath_to_link ] # abspath wrf_cntrl param file abspath_pf = os.path.join(self.config['filesystem']['upp_dir'], 'parm', 'wrf_cntrl.parm') # concatenate lists of paths abspath_to_link = abspath_coeff + [abspath_pf] # create a symlink for every file in abspath_to_link for fl in abspath_to_link: utils.check_file_exists(fl) # check if file exist and is readable os.symlink(fl, os.path.join(self.post_dir, os.path.basename(fl))) # symlink wrf_cntrl.parm to config['post_dir']/fort.14 os.symlink(abspath_pf, os.path.join(self.post_dir, 'fort.14')) # symlink microphysic's tables - code used is based on mp_physics option # used in the wrfout file os.symlink( os.path.join(self.config['filesystem']['wrf_run_dir'], 'ETAMPNEW_DATA'), os.path.join(self.post_dir, 'nam_micro_lookup.dat')) os.symlink( os.path.join(self.config['filesystem']['wrf_run_dir'], 'ETAMPNEW_DATA.expanded_rain'), os.path.join(self.post_dir, 'hires_micro_lookup.dat'))