def run_at_time(self, init_time): task_info = TaskInfo() task_info.init_time = init_time fcst_vars = util.getlist(self.p.getstr('config', 'FCST_VARS')) lead_seq = util.getlistint(self.p.getstr('config', 'LEAD_SEQ')) for lead in lead_seq: task_info.lead = lead for fcst_var in fcst_vars: task_info.fcst_var = fcst_var # loop over models to compare accums = util.getlist( self.p.getstr('config', fcst_var + "_ACCUM")) ob_types = util.getlist( self.p.getstr('config', fcst_var + "_OBTYPE")) for accum in accums: task_info.level = accum for ob_type in ob_types: task_info.ob_type = ob_type if lead < int(accum): continue # self.run_at_time_fcst(task_info) self.run_at_time_once(task_info.getValidTime(), task_info.level, task_info.ob_type, task_info.fcst_var)
def __init__(self, config, logger): super(SeriesByInitWrapper, self).__init__(config, logger) # Retrieve any necessary values (dirs, executables) # from the param file(s) self.app_name = 'SeriesByInit' self.stat_list = util.getlist(self.config.getstr( 'config', 'STAT_LIST')) self.var_list = util.getlist(self.config.getstr('config', 'VAR_LIST')) self.regrid_with_met_tool = self.config.getbool( 'config', 'REGRID_USING_MET_TOOL') self.extract_tiles_dir = self.config.getdir('EXTRACT_TILES_OUTPUT_DIR') self.series_out_dir = self.config.getdir('SERIES_BY_INIT_OUTPUT_DIR') self.series_filtered_out_dir = \ self.config.getdir('SERIES_BY_INIT_FILTERED_OUTPUT_DIR') self.filter_opts = \ self.config.getstr('config', 'SERIES_ANALYSIS_FILTER_OPTS') self.fcst_ascii_file_prefix = 'FCST_ASCII_FILES_' self.anly_ascii_file_prefix = 'ANLY_ASCII_FILES_' # Needed for generating plots self.sbi_plotting_out_dir = '' # For building the argument string via # CommandBuilder: met_install_dir = self.config.getdir('MET_INSTALL_DIR') self.app_path = os.path.join(met_install_dir, 'bin/series_analysis') self.app_name = os.path.basename(self.app_path) self.inaddons = [] self.infiles = [] self.outdir = "" self.outfile = "" self.args = [] self.logger.info("Initialized SeriesByInitWrapper")
def __init__(self, config, logger): """!Constructor for TCMPRPlotterWrapper Args: @param p: The configuration instance, contains the conf file information. @param logger: A logger, can be None """ # pylint:disable=too-many-instance-attributes # All these instance attributes are needed to support the # plot_tcmpr.R functionality. super(TCMPRPlotterWrapper, self).__init__(config, logger) self.app_name = 'plot_tcmpr.R' self._init_tcmpr_script() # The only required argument for plot_tcmpr.R, the name of # the tcst file to plot. self.input_data = self.config.getdir('TCMPR_DATA_DIR') # Optional arguments self.plot_config_file = self.config.getstr('config', 'CONFIG_FILE') self.output_base_dir = self.config.getdir('TCMPR_PLOT_OUTPUT_DIR') self.prefix = self.config.getstr('config', 'PREFIX') self.title = self.config.getstr('config', 'TITLE') self.subtitle = self.config.getstr('config', 'SUBTITLE') self.xlab = self.config.getstr('config', 'XLAB') self.ylab = self.config.getstr('config', 'YLAB') self.xlim = self.config.getstr('config', 'XLIM') self.ylim = self.config.getstr('config', 'YLIM') self.filter = self.config.getstr('config', 'FILTER') self.filtered_tcst_data = self.config.getstr( 'config', 'FILTERED_TCST_DATA_FILE') self.dep_vars = util.getlist(self.config.getstr('config', 'DEP_VARS')) self.scatter_x = self.config.getstr('config', 'SCATTER_X') self.scatter_y = self.config.getstr('config', 'SCATTER_Y') self.skill_ref = self.config.getstr('config', 'SKILL_REF') self.series = self.config.getstr('config', 'SERIES') self.series_ci = self.config.getstr('config', 'SERIES_CI') self.legend = self.config.getstr('config', 'LEGEND') self.lead = self.config.getstr('config', 'LEAD') self.plot_types = util.getlist( self.config.getstr('config', 'PLOT_TYPES')) self.rp_diff = self.config.getstr('config', 'RP_DIFF') self.demo_year = self.config.getstr('config', 'DEMO_YR') self.hfip_baseline = self.config.getstr('config', 'HFIP_BASELINE') self.footnote_flag = self.config.getstr('config', 'FOOTNOTE_FLAG') self.plot_config_options = self.config.getstr('config', 'PLOT_CONFIG_OPTS') self.save_data = self.config.getstr('config', 'SAVE_DATA') # Optional flags, by default these will be set to False in the # produtil config files. self.no_ee = self.config.getbool('config', 'NO_EE') self.no_log = self.config.getbool('config', 'NO_LOG') self.save = self.config.getbool('config', 'SAVE')
def set_fcst_or_obs_dict_items(self, d_type): self.c_dict[d_type+'_MIN_FORECAST'] = self.config.getint('config', d_type+'_PCP_COMBINE_MIN_FORECAST', 0) self.c_dict[d_type+'_MAX_FORECAST'] = self.config.getint('config', d_type+'_PCP_COMBINE_MAX_FORECAST', 256) self.c_dict[d_type+'_INPUT_DATATYPE'] = self.config.getstr('config', d_type+'_PCP_COMBINE_INPUT_DATATYPE', '') self.c_dict[d_type+'_DATA_INTERVAL'] = self.config.getint('config', d_type+'_PCP_COMBINE_DATA_INTERVAL', 1) self.c_dict[d_type+'_TIMES_PER_FILE'] = self.config.getint('config', d_type+'_PCP_COMBINE_TIMES_PER_FILE', -1) self.c_dict[d_type+'_IS_DAILY_FILE'] = self.config.getbool('config', d_type+'_PCP_COMBINE_IS_DAILY_FILE', False) self.c_dict[d_type+'_LEVEL'] = self.config.getstr('config', d_type+'_PCP_COMBINE_INPUT_LEVEL', '-1') self.c_dict[d_type+'_INPUT_DIR'] = self.config.getdir(d_type+'_PCP_COMBINE_INPUT_DIR', '') self.c_dict[d_type+'_INPUT_TEMPLATE'] = self.config.getraw('filename_templates', d_type+'_PCP_COMBINE_INPUT_TEMPLATE') self.c_dict[d_type+'_OUTPUT_DIR'] = self.config.getdir(d_type+'_PCP_COMBINE_OUTPUT_DIR', '') self.c_dict[d_type+'_OUTPUT_TEMPLATE'] = self.config.getraw('filename_templates', d_type+'_PCP_COMBINE_OUTPUT_TEMPLATE') self.c_dict[d_type+'_STAT_LIST'] = \ util.getlist(self.config.getstr('config', d_type+'_PCP_COMBINE_STAT_LIST', '')) self.c_dict[d_type+'_RUN_METHOD'] = \ self.config.getstr('config', d_type+'_PCP_COMBINE_METHOD') if self.c_dict[d_type+'_RUN_METHOD'] == 'DERIVE' and \ len(self.c_dict[d_type+'_STAT_LIST']) == 0: self.logger.error('Statistic list is empty. ' + \ 'Must set ' + d_type + '_PCP_COMBINE_STAT_LIST if running ' +\ 'derive mode') exit(1) self.c_dict[d_type+'_DERIVE_LOOKBACK'] = \ self.config.getint('config', d_type+'_PCP_COMBINE_DERIVE_LOOKBACK', 0)
def test_no_value_as_list(): """! Tests that a key with no list of strings returns an empty list.""" conf_obj = get_config_obj() expected_unassigned = [] unassigned = util.getlist(conf_obj.getstr('config', 'UNASSIGNED_VALUE')) assert unassigned == expected_unassigned
def parse_var_fourier_decomp(self): """! Parse metplus_final.conf for variable information on the Fourier decomposition Args: Returns: fourier_decom_list - list of objects containing Fourier decomposition information for the variables """ fourier_decom_list = [] all_conf = self.config.keys('config') indices = [] regex = re.compile("FCST_VAR(\d+)_NAME") for conf in all_conf: result = regex.match(conf) if result is not None: indices.append(result.group(1)) for n in indices: if self.config.has_option('config', "FCST_VAR" + n + "_NAME"): run_fourier = self.config.getbool( 'config', "VAR" + n + "_FOURIER_DECOMP", False) fourier_wave_num_pairs = util.getlist( self.config.getstr('config', "VAR" + n + "_WAVE_NUM_LIST", "")) if run_fourier == False: fourier_wave_num_pairs = "" fd_info = self.FourierDecompInfo() fd_info.run_fourier = run_fourier fd_info.wave_num_pairings = fourier_wave_num_pairs fourier_decom_list.append(fd_info) return fourier_decom_list
def run_at_time(self, init_time, valid_time): task_info = TaskInfo() task_info.init_time = init_time task_info.valid_time = valid_time var_list = util.parse_var_list(self.p) model_templates = util.getlist( self.p.getraw('filename_templates', 'FCST_GRID_STAT_INPUT_TEMPLATE')) model_types = util.getlist(self.p.getraw('config', 'MODEL_TYPE')) max_forecasts = util.getlist( self.p.getraw('config', 'FCST_MAX_FORECAST')) self.check_model(model_templates, model_types, max_forecasts) lead_seq = util.getlistint(self.p.getstr('config', 'LEAD_SEQ')) for md in range(len(model_templates)): cur_model = [ model_templates[md], model_types[md], max_forecasts[md] ] for lead in lead_seq: task_info.lead = lead for var_info in var_list: self.run_at_time_once(task_info, var_info, cur_model)
def grid2grid_anom_plot_format(self): self.logger.info("Formatting for plotting for grid2grid-anom") #read config use_init = self.p.getbool('config', 'LOOP_BY_INIT', True) if use_init: start_t = self.p.getstr('config', 'INIT_BEG') end_t = self.p.getstr('config', 'INIT_END') self.add_env_var("FCST_VALID_BEG", "") self.add_env_var("FCST_VALID_END", "") self.add_env_var("FCST_INIT_BEG", start_t) self.add_env_var("FCST_INIT_END", end_t) else: start_t = self.p.getstr('config', 'VALID_BEG') end_t = self.p.getstr('config', 'VALID_END') self.add_env_var("FCST_VALID_BEG", start_t) self.add_env_var("FCST_VALID_END", end_t) self.add_env_var("FCST_INIT_BEG", "") self.add_env_var("FCST_INIT_END", "") stat_analysis_lookin_dir = self.p.getdir('STAT_ANALYSIS_LOOKIN_DIR') stat_analysis_out_dir = self.p.getdir('STAT_ANALYSIS_OUT_DIR') var_list = util.parse_var_list(self.p) region_list = util.getlist(self.p.getstr('config', 'REGION_LIST')) lead_list = util.getlistint(self.p.getstr('config', 'LEAD_LIST')) model_list = util.getlist(self.p.getstr('config', 'MODEL_LIST')) if use_init: loop_beg_hour = self.p.getint('config', 'INIT_BEG_HOUR') loop_end_hour = self.p.getint('config', 'INIT_END_HOUR') loop_inc = self.p.getint('config', 'INIT_INC') else: loop_beg_hour = self.p.getint('config', 'VALID_BEG_HOUR') loop_end_hour = self.p.getint('config', 'VALID_END_HOUR') loop_inc = self.p.getint('config', 'VALID_INC') loop_hour = loop_beg_hour while loop_hour <= loop_end_hour: loop_hour_str = str(loop_hour).zfill(2) #filtering times based on if files made based on init_time or valid_time if use_init: start_t = self.p.getstr('config', 'INIT_BEG') end_t = self.p.getstr('config', 'INIT_END') self.add_env_var("FCST_VALID_BEG", "") self.add_env_var("FCST_VALID_END", "") self.add_env_var("FCST_VALID_HOUR", "") self.add_env_var("FCST_INIT_BEG", start_t + "_" + loop_hour_str + "0000") self.add_env_var("FCST_INIT_END", end_t + "_" + loop_hour_str + "0000") self.add_env_var("FCST_INIT_HOUR", '"' + loop_hour_str + '"') else: start_t = self.p.getstr('config', 'VALID_BEG') end_t = self.p.getstr('config', 'VALID_END') self.add_env_var("FCST_VALID_BEG", start_t + "_" + loop_hour_str + "0000") self.add_env_var("FCST_VALID_END", end_t + "_" + loop_hour_str + "0000") self.add_env_var("FCST_VALID_HOUR", '"' + loop_hour_str + '"') self.add_env_var("FCST_INIT_BEG", "") self.add_env_var("FCST_INIT_END", "") self.add_env_var("FCST_INIT_HOUR", "") for model in model_list: self.add_env_var('MODEL', model) #build -lookin directory self.set_lookin_dir( os.path.join(stat_analysis_lookin_dir, loop_hour_str + 'Z', model)) for var_info in var_list: fcst_var_name = var_info.fcst_name fcst_var_level = var_info.fcst_level #fcst_var_extra = var_info.fcst_extra.replace(" = ", "").rstrip(";") obs_var_name = var_info.obs_name obs_var_level = var_info.obs_level #obs_var_extra = var_info.obs_extra.replace(" = ", "").rstrip(";") self.add_env_var('FCST_VAR_NAME', fcst_var_name) self.add_env_var('FCST_VAR_LEVEL', fcst_var_level) self.add_env_var('OBS_VAR_NAME', obs_var_name) self.add_env_var('OBS_VAR_LEVEL', obs_var_level) interp_mthd = [] if var_info.fcst_name == 'HGT' or var_info.obs_name == 'HGT': fourier_decomp_height = self.p.getbool( 'config', 'FOURIER_HEIGHT_DECOMP') if fourier_decomp_height: wave_num_beg_list = util.getlist( self.p.getstr('config', 'WAVE_NUM_BEG_LIST')) wave_num_end_list = util.getlist( self.p.getstr('config', 'WAVE_NUM_END_LIST')) if len(wave_num_beg_list) != len( wave_num_end_list): print( "ERROR: WAVE_NUM_BEG_LIST and WAVE_NUM_END_LIST do not have the same number of elements" ) exit(1) else: interp_mthd.append("NEAREST") for wn in range(len(wave_num_beg_list)): wb = wave_num_beg_list[wn] we = wave_num_end_list[wn] wave_num_pairing = "WV1_" + wb + "-" + we interp_mthd.append(wave_num_pairing) else: interp_mthd.append("NEAREST") else: interp_mthd.append("NEAREST") for region in region_list: self.add_env_var('REGION', region) for lead in lead_list: if lead < 10: lead_string = '0' + str(lead) else: lead_string = str(lead) self.add_env_var('LEAD', lead_string) if not os.path.exists( os.path.join(stat_analysis_out_dir, loop_hour_str + "Z", model, region)): os.makedirs( os.path.join(stat_analysis_out_dir, loop_hour_str + "Z", model, region)) for im in interp_mthd: self.add_env_var('INTERP', im) if im == "NEAREST": ##dump_row_file = os.path.join(stat_analysis_out_dir, ## loop_hour_str+"Z", model, region, model+"_f"+lead_string+"_"+fcst_var_name+fcst_var_level+".stat") dump_row_file = os.path.join( stat_analysis_out_dir, loop_hour_str + "Z", model, region, model + "_f" + lead_string + "_fcst" + fcst_var_name + fcst_var_level + "_obs" + obs_var_name + obs_var_level + ".stat") ##dump_row_file = os.path.join(stat_analysis_out_dir, ## loop_hour_str+"Z", model, region, model+"_f"+lead_string+"_fcst"+fcst_var_name+fcst_var_level+fcst_var_extra+"_obs"+obs_var_name+obs_var_level+obs_var_extra+".stat") else: ##dump_row_file = os.path.join(stat_analysis_out_dir, ## loop_hour_str+"Z", model, region, model+"_f"+lead_string+"_"+fcst_var_name+fcst_var_level+".stat") dump_row_file = os.path.join( stat_analysis_out_dir, loop_hour_str + "Z", model, region, model + "_f" + lead_string + "_fcst" + fcst_var_name + fcst_var_level + "_obs" + obs_var_name + obs_var_level + "_" + im + ".stat") ##dump_row_file = os.path.join(stat_analysis_out_dir, ## loop_hour_str+"Z", model, region, model+"_f"+lead_string+"_fcst"+fcst_var_name+fcst_var_level+fcst_var_extra+"_obs"+obs_var_name+obs_var_level+obs_var_extra+"_"+im+".stat") job = "-job filter -dump_row " + dump_row_file self.add_env_var("JOB", job) #get stat_analysis config file self.set_param_file( self.p.getstr('config', 'STAT_ANALYSIS_CONFIG')) #environment self.logger.debug("") self.logger.debug( "ENVIRONMENT FOR NEXT COMMAND: ") self.logger.debug("") self.logger.debug( "COPYABLE ENVIRONMENT FOR NEXT COMMAND: ") self.logger.debug("") #build command cmd = self.get_command() if cmd is None: print( "ERROR: stat_analysis could not generate command" ) return self.logger.info("") self.build() self.clear() loop_hour += loop_inc
def create_c_dict(self): """! Read in and store all the values from the config file. This will make it easier to reassign values while unit testing and make it easier when retrieving these values, especially when they are needed multiple times by different methods. Args: Returns: tc_stat_dict - a dictionary of the key-value representation of options set in the config file. """ self.logger.info('Creating tc-stat dictionary...') c_dict = super(TcStatWrapper, self).create_c_dict() # Useful for logging # Logging output: TIME UTC |TYPE (DEBUG, INFO, WARNING, etc.) | # [File : function]| Message cur_filename = sys._getframe().f_code.co_filename cur_function = sys._getframe().f_code.co_name # Check for the MET_INSTALL_DIR, if it is missing, then # we cannot invoke the MET tool. if not self.config.getdir('MET_INSTALL_DIR'): self.logger.error(cur_filename + '|' + cur_function + ': MET install ' + 'directory not found in config file. Exiting.') sys.exit(1) c_dict['APP_PATH'] = os.path.join( self.config.getdir('MET_INSTALL_DIR'), 'bin/tc_stat') c_dict['APP_NAME'] = os.path.basename(c_dict['APP_PATH']) if self.by_config: c_dict['AMODEL'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_AMODEL')) c_dict['BMODEL'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_BMODEL')) c_dict['DESC'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_DESC')) c_dict['STORM_ID'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_STORM_ID')) c_dict['BASIN'] = util.getlist( self.config.getstr('config', 'TC_STAT_BASIN')) c_dict['CYCLONE'] = util.getlist( self.config.getstr('config', 'TC_STAT_CYCLONE')) c_dict['STORM_NAME'] = util.getlist( self.config.getstr('config', 'TC_STAT_STORM_NAME')) c_dict['INIT_BEG'] = self.config.getstr('config', 'TC_STAT_INIT_BEG') c_dict['INIT_END'] = self.config.getstr('config', 'TC_STAT_INIT_END') c_dict['INIT_INCLUDE'] = util.getlist( self.config.getstr('config', 'TC_STAT_INIT_INCLUDE')) c_dict['INIT_EXCLUDE'] = util.getlist( self.config.getstr('config', 'TC_STAT_INIT_EXCLUDE')) c_dict['INIT_HOUR'] = util.getlist( self.config.getstr('config', 'TC_STAT_INIT_HOUR')) c_dict['VALID_BEG'] = self.config.getstr('config', 'TC_STAT_INIT_BEG') c_dict['VALID_END'] = self.config.getstr('config', 'TC_STAT_INIT_END') c_dict['VALID_INCLUDE'] = util.getlist( self.config.getstr('config', 'TC_STAT_VALID_INCLUDE')) c_dict['VALID_EXCLUDE'] = util.getlist( self.config.getstr('config', 'TC_STAT_VALID_EXCLUDE')) c_dict['LEAD_REQ'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_LEAD_REQ')) c_dict['INIT_MASK'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_INIT_MASK')) c_dict['VALID_MASK'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_VALID_MASK')) c_dict['VALID_HOUR'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_VALID_HOUR')) c_dict['LEAD'] = \ util.getlist(self.config.getstr('config', 'TC_STAT_LEAD')) c_dict['TRACK_WATCH_WARN'] = \ util.getlist( self.config.getstr('config', 'TC_STAT_TRACK_WATCH_WARN')) c_dict['COLUMN_THRESH_NAME'] = \ util.getlist( self.config.getstr('config', 'TC_STAT_COLUMN_THRESH_NAME')) c_dict['COLUMN_THRESH_VAL'] = util.getlist( self.config.getstr('config', 'TC_STAT_COLUMN_THRESH_VAL')) c_dict['COLUMN_STR_NAME'] = \ util.getlist( self.config.getstr('config', 'TC_STAT_COLUMN_STR_NAME')) c_dict['COLUMN_STR_VAL'] = \ util.getlist( self.config.getstr('config', 'TC_STAT_COLUMN_STR_VAL')) c_dict['INIT_THRESH_NAME'] = util.getlist( self.config.getstr('config', 'TC_STAT_INIT_THRESH_NAME')) c_dict['INIT_THRESH_VAL'] = util.getlist( self.config.getstr('config', 'TC_STAT_INIT_THRESH_VAL')) c_dict['INIT_STR_NAME'] = \ util.getlist( self.config.getstr('config', 'TC_STAT_INIT_STR_NAME')) c_dict['INIT_STR_VAL'] = \ util.getlist( self.config.getstr('config', 'TC_STAT_INIT_STR_VAL')) try: c_dict['WATER_ONLY'] = \ self.config.getbool('config', 'TC_STAT_WATER_ONLY') except ValueError: # WATER_ONLY not defined in any configuration files, # set to False and proceed. self.logger.warn( cur_filename + '|' + cur_function + ': WATER_ONLY undefined in config file. Setting to False.' ) c_dict['WATER_ONLY'] = False pass try: c_dict['LANDFALL'] = \ self.config.getbool('config', 'TC_STAT_LANDFALL') except ValueError: # Not set by user in MET tc_stat config file or METplus config # file. Set to False and continue ingesting config file values. self.logger.warn( cur_filename + '|' + cur_function + ': LANDFALL' + ' undefined in config file. Setting to False...') c_dict['LANDFALL'] = False pass c_dict['LANDFALL_BEG'] = \ self.config.getstr('config', 'TC_STAT_LANDFALL_BEG') c_dict['LANDFALL_END'] = \ self.config.getstr('config', 'TC_STAT_LANDFALL_END') c_dict['JOBS_LIST'] = \ self.config.getstr('config', 'TC_STAT_JOBS_LIST') else: # via command line, only one job requested c_dict['CMD_LINE_JOB'] = self.config.getstr( 'config', 'TC_STAT_CMD_LINE_JOB') c_dict['MATCH_POINTS'] = \ self.config.getstr('config', 'TC_STAT_MATCH_POINTS').upper() c_dict['OUTPUT_BASE'] = self.config.getdir('OUTPUT_BASE') c_dict['TMP_DIR'] = self.config.getdir('TMP_DIR') c_dict['METPLUS_BASE'] = self.config.getdir('METPLUS_BASE') c_dict['MET_INSTALL_DIR'] = self.config.getdir('MET_INSTALL_DIR') c_dict['INPUT_DIR'] = self.config.getdir('TC_STAT_INPUT_DIR') c_dict['OUTPUT_DIR'] = self.config.getdir('TC_STAT_OUTPUT_DIR') c_dict['PARM_BASE'] = self.config.getdir('PARM_BASE') c_dict['CONFIG_FILE'] = self.config.getstr('config', 'TC_STAT_CONFIG_FILE') return c_dict
def create_pb_dict(self): """! Create a data structure (dictionary) that contains all the values set in the configuration files Args: Returns: pb_dict - a dictionary containing the settings in the configuration files (that aren't in the metplus_data, metplus_system, and metplus_runtime config files. """ pb_dict = dict() # Directories pb_dict['APP_PATH'] = os.path.join(self.p.getdir('MET_INSTALL_DIR'), 'bin/pb2nc') pb_dict['APP_NAME'] = os.path.basename(pb_dict['APP_PATH']) pb_dict['PROJ_DIR'] = self.p.getdir('dir', 'PROJ_DIR') pb_dict['TMP_DIR'] = self.p.getdir('dir', 'TMP_DIR') pb_dict['METPLUS_BASE'] = self.p.getdir('dir', 'METPLUS_BASE') pb_dict['MET_BUILD_BASE'] = self.p.getdir('dir', 'MET_BUILD_BASE') pb_dict['MET_INSTALL_DIR'] = self.p.getdir('dir', 'MET_INSTALL_DIR') pb_dict['PREPBUFR_DATA_DIR'] = self.p.getstr('dir', 'PREPBUFR_DATA_DIR') pb_dict['PREPBUFR_MODEL_DIR_NAME'] = \ self.p.getstr('dir', 'PREPBUFR_MODEL_DIR_NAME') pb_dict['PB2NC_OUTPUT_DIR'] = self.p.getstr('dir', 'PB2NC_OUTPUT_DIR') pb_dict['PARM_BASE'] = self.p.getdir('dir', 'PARM_BASE') pb_dict['OUTPUT_BASE'] = self.p.getstr('dir', 'OUTPUT_BASE') # Configuration pb_dict['TIME_METHOD'] = self.p.getstr('config', 'TIME_METHOD') pb_dict['PB2NC_CONFIG_FILE'] = self.p.getstr('config', 'PB2NC_CONFIG_FILE') pb_dict['PB2NC_MESSAGE_TYPE'] = util.getlist( self.p.getstr('config', 'PB2NC_MESSAGE_TYPE')) pb_dict['VERTICAL_LOCATION'] = self.p.getstr('config', 'VERTICAL_LOCATION') grid_id = self.p.getstr('config', 'PB2NC_GRID') if grid_id.startswith('G'): # Reformat grid ids that begin with 'G' ( G10, G1, etc.) to format # Gnnn pb_dict['PB2NC_GRID'] = self.reformat_grid_id(grid_id) else: pb_dict['PB2NC_GRID'] = grid_id pb_dict['PB2NC_POLY'] = self.p.getstr('config', 'PB2NC_POLY') pb_dict['PB2NC_STATION_ID'] = util.getlist( self.p.getstr('config', 'PB2NC_STATION_ID')) # Retrieve YYYYMMDD begin and end time pb_dict['BEG_TIME'] = self.p.getstr('config', 'BEG_TIME')[0:8] pb_dict['END_TIME'] = self.p.getstr('config', 'END_TIME')[0:8] pb_dict['INTERVAL_TIME'] = \ self.p.getstr('config', 'INTERVAL_TIME')[0:2] pb_dict['OBS_BUFR_VAR_LIST'] = util.getlist( self.p.getstr('config', 'OBS_BUFR_VAR_LIST')) pb_dict['START_HOUR'] = self.p.getstr('config', 'START_HOUR') pb_dict['END_HOUR'] = self.p.getstr('config', 'END_HOUR') pb_dict['START_DATE'] = self.p.getstr('config', 'START_DATE') pb_dict['END_DATE'] = self.p.getstr('config', 'END_DATE') pb_dict['TIME_SUMMARY_FLAG'] = self.p.getbool('config', 'TIME_SUMMARY_FLAG') pb_dict['TIME_SUMMARY_BEG'] = self.p.getstr('config', 'TIME_SUMMARY_BEG') pb_dict['TIME_SUMMARY_END'] = self.p.getstr('config', 'TIME_SUMMARY_END') pb_dict['TIME_SUMMARY_VAR_NAMES'] = util.getlist( self.p.getstr('conf', 'TIME_SUMMARY_VAR_NAMES')) pb_dict['TIME_SUMMARY_TYPES'] = util.getlist( self.p.getstr('config', 'TIME_SUMMARY_TYPES')) pb_dict['OBS_WINDOW_BEGIN'] = self.p.getstr('config', 'OBS_WINDOW_BEGIN') pb_dict['OBS_WINDOW_END'] = self.p.getstr('config', 'OBS_WINDOW_END') pb_dict['OVERWRITE_NC_OUTPUT'] = \ self.p.getstr('config', 'OVERWRITE_NC_OUTPUT').lower() # Filename templates and regex patterns for input dirs and filenames pb_dict['NC_FILE_TMPL'] = self.p.getraw('filename_templates', 'NC_FILE_TMPL') pb_dict['PREPBUFR_FILE_REGEX'] = self.p.getraw('regex_pattern', 'PREPBUFR_FILE_REGEX') pb_dict['PREPBUFR_DIR_REGEX'] = self.p.getraw('regex_pattern', 'PREPBUFR_DIR_REGEX') # non-MET executables pb_dict['WGRIB2'] = self.p.getdir('exe', 'WGRIB2') pb_dict['RM_EXE'] = self.p.getdir('exe', 'RM_EXE') pb_dict['CUT_EXE'] = self.p.getdir('exe', 'CUT_EXE') pb_dict['TR_EXE'] = self.p.getdir('exe', 'TR_EXE') pb_dict['NCAP2_EXE'] = self.p.getdir('exe', 'NCAP2_EXE') pb_dict['CONVERT_EXE'] = self.p.getdir('exe', 'CONVERT_EXE') pb_dict['NCDUMP_EXE'] = self.p.getdir('exe', 'NCDUMP_EXE') pb_dict['EGREP_EXE'] = self.p.getdir('exe', 'EGREP_EXE') return pb_dict
def create_c_dict(self): """! Create a dictionary containing all the values set in the config file. This will make it easier for unit testing. Args: Returns: c_dict - A dictionary of the values from the config file """ c_dict = super(TcPairsWrapper, self).create_c_dict() c_dict['MISSING_VAL_TO_REPLACE'] =\ self.config.getstr('config', 'TC_PAIRS_MISSING_VAL_TO_REPLACE', '-99') c_dict['MISSING_VAL'] =\ self.config.getstr('config', 'TC_PAIRS_MISSING_VAL', '-9999') c_dict['TC_PAIRS_CONFIG_FILE'] = self.config.getstr( 'config', 'TC_PAIRS_CONFIG_FILE') c_dict['INIT_BEG'] = self.config.getraw('config', 'INIT_BEG') c_dict['INIT_END'] = self.config.getraw('config', 'INIT_END') c_dict['INIT_TIME_FMT'] = self.config.getstr('config', 'INIT_TIME_FMT') c_dict['INIT_INCREMENT'] = self.config.getint('config', 'INIT_INCREMENT') c_dict['INIT_INCLUDE'] = util.getlist( self.config.getstr('config', 'INIT_INCLUDE')) c_dict['INIT_EXCLUDE'] = util.getlist( self.config.getstr('config', 'INIT_EXCLUDE')) c_dict['VALID_BEG'] = self.config.getstr('config', 'VALID_BEG') c_dict['VALID_END'] = self.config.getstr('config', 'VALID_END') c_dict['ADECK_DIR'] = \ self.config.getdir('TC_PAIRS_ADECK_INPUT_DIR', '') c_dict['BDECK_DIR'] = \ self.config.getdir('TC_PAIRS_BDECK_INPUT_DIR') c_dict['EDECK_DIR'] = \ self.config.getdir('TC_PAIRS_EDECK_INPUT_DIR', '') c_dict['OUTPUT_DIR'] = self.config.getdir('TC_PAIRS_OUTPUT_DIR') c_dict['READ_ALL_FILES'] = self.config.getbool( 'config', 'TC_PAIRS_READ_ALL_FILES') c_dict['OUTPUT_BASE'] = self.config.getstr('dir', 'OUTPUT_BASE') c_dict['CYCLONE'] = util.getlist( self.config.getstr('config', 'TC_PAIRS_CYCLONE', '')) c_dict['MODEL'] = util.getlist( self.config.getstr('config', 'MODEL', '')) c_dict['STORM_ID'] = util.getlist( self.config.getstr('config', 'TC_PAIRS_STORM_ID', '')) c_dict['BASIN'] = util.getlist( self.config.getstr('config', 'TC_PAIRS_BASIN', '')) c_dict['STORM_NAME'] = util.getlist( self.config.getstr('config', 'TC_PAIRS_STORM_NAME')) c_dict['DLAND_FILE'] = self.config.getstr('config', 'TC_PAIRS_DLAND_FILE') c_dict['ADECK_TEMPLATE'] = self.config.getraw( 'filename_templates', 'TC_PAIRS_ADECK_TEMPLATE', '') c_dict['BDECK_TEMPLATE'] = self.config.getraw( 'filename_templates', 'TC_PAIRS_BDECK_TEMPLATE') c_dict['EDECK_TEMPLATE'] = self.config.getraw( 'filename_templates', 'TC_PAIRS_EDECK_TEMPLATE', '') c_dict['OUTPUT_TEMPLATE'] = self.config.getraw( 'filename_templates', 'TC_PAIRS_OUTPUT_TEMPLATE') c_dict['SKIP_REFORMAT'] = self.config.getbool( 'config', 'TC_PAIRS_SKIP_IF_REFORMAT_EXISTS', False) c_dict['SKIP_OUTPUT'] = self.config.getbool( 'config', 'TC_PAIRS_SKIP_IF_OUTPUT_EXISTS', False) c_dict['REFORMAT_DECK'] = self.config.getbool( 'config', 'TC_PAIRS_REFORMAT_DECK', False) c_dict['REFORMAT_DECK_TYPE'] = \ self.config.getstr('config', 'TC_PAIRS_REFORMAT_TYPE', 'SBU') c_dict['REFORMAT_DIR'] = \ self.config.getdir('TC_PAIRS_REFORMAT_DIR', os.path.join(c_dict['OUTPUT_BASE'], 'track_data_atcf')) c_dict['GET_ADECK'] = True if c_dict['ADECK_TEMPLATE'] else False c_dict['GET_EDECK'] = True if c_dict['EDECK_TEMPLATE'] else False return c_dict
def main(): """!Main program. Master METplus script that invokes the necessary Python scripts to perform various activities, such as series analysis.""" # Setup Task logger, Until Conf object is created, Task logger is # only logging to tty, not a file. logger = logging.getLogger('master_metplus') logger.info('Starting METplus v%s', util.get_version_number()) # Parse arguments, options and return a config instance. conf = config_metplus.setup(filename='master_metplus.py') # NOW we have a conf object p, we can now get the logger # and set the handler to write to the LOG_METPLUS # TODO: Frimel setting up logger file handler. # Setting up handler i.e util.get_logger should be moved to # the setup wrapper and encapsulated in the config object. # than you would get it this way logger=p.log(). The config # object has-a logger we want. logger = util.get_logger(conf) logger.info('Running METplus v%s called with command: %s', util.get_version_number(), ' '.join(sys.argv)) # check for deprecated config items and warn user to remove/replace them util.check_for_deprecated_config(conf, logger) config = ConfigWrapper(conf, logger) # set staging dir to OUTPUT_BASE/stage if not set if not config.has_option('dir', 'STAGING_DIR'): config.set('dir', 'STAGING_DIR', os.path.join(config.getdir('OUTPUT_BASE'), "stage")) # handle dir to write temporary files util.handle_tmp_dir(config) # This is available in each subprocess from os.system BUT # we also set it in each process since they may be called stand alone. os.environ['MET_BASE'] = config.getdir('MET_BASE') config.env = os.environ.copy() # Use config object to get the list of processes to call process_list = util.getlist(config.getstr('config', 'PROCESS_LIST')) # Keep this comment. # When running commands in the process_list, reprocess the # original command line using (item))[sys.argv[1:]]. # # You could call each task (ie. run_tc_pairs.py) without any args since # the final METPLUS_CONF file was just created from config_metplus.setup, # and each task, also calls setup, which use an existing final conf # file over command line args. # # both work ... # Note: Using (item))sys.argv[1:], is preferable since # it doesn't depend on the conf file existing. processes = [] for item in process_list: try: logger = config.log(item) command_builder = \ getattr(sys.modules[__name__], item + "Wrapper")(config, logger) # if Usage specified in PROCESS_LIST, print usage and exit if item == 'Usage': command_builder.run_all_times() exit(1) except AttributeError: raise NameError("Process %s doesn't exist" % item) processes.append(command_builder) loop_order = config.getstr('config', 'LOOP_ORDER', '') if loop_order == '': loop_order = config.getstr('config', 'LOOP_METHOD') if loop_order == "processes": for process in processes: # referencing using repr(process.app_name) in # log since it may be None, # if not set in the command builder subclass' contsructor, # and no need to generate an exception because of that. produtil.log.postmsg('master_metplus Calling run_all_times ' 'in: %s wrapper.' % repr(process.app_name)) process.run_all_times() elif loop_order == "times": util.loop_over_times_and_call(config, processes) else: logger.error("Invalid LOOP_METHOD defined. " + \ "Options are processes, times") exit() # scrub staging directory if requested if config.getbool('config', 'SCRUB_STAGING_DIR', False) and\ os.path.exists(config.getdir('STAGING_DIR')): staging_dir = config.getdir('STAGING_DIR') logger.info("Scrubbing staging dir: %s", staging_dir) shutil.rmtree(staging_dir) # rewrite final conf so it contains all of the default values used util.write_final_conf(conf, logger) logger.info('METplus has successfully finished running.') exit()
def create_c_dict(self): """!Create a dictionary containing the values set in the config file that are required for running ensemble stat. This will make it easier for unit testing. Returns: @returns A dictionary of the ensemble stat values from the config file. """ c_dict = super(EnsembleStatWrapper, self).create_c_dict() c_dict['ONCE_PER_FIELD'] = self.config.getbool( 'config', 'ENSEMBLE_STAT_ONCE_PER_FIELD', False) c_dict['FCST_INPUT_DATATYPE'] = \ self.config.getstr('config', 'FCST_ENSEMBLE_STAT_INPUT_DATATYPE', '') c_dict['OBS_POINT_INPUT_DATATYPE'] = \ self.config.getstr('config', 'OBS_ENSEMBLE_STAT_INPUT_POINT_DATATYPE', '') c_dict['OBS_GRID_INPUT_DATATYPE'] = \ self.config.getstr('config', 'OBS_ENSEMBLE_STAT_INPUT_GRID_DATATYPE', '') c_dict['GRID_VX'] = self.config.getstr('config', 'ENSEMBLE_STAT_GRID_VX', 'FCST') c_dict['CONFIG_FILE'] = \ self.config.getstr('config', 'ENSEMBLE_STAT_CONFIG_FILE', c_dict['CONFIG_DIR']+'/EnsembleStatConfig_SFC') c_dict['ENS_THRESH'] = \ self.config.getstr('config', 'ENSEMBLE_STAT_ENS_THRESH', '1.0') # met_obs_error_table is not required, if it is not defined # set it to the empty string '', that way the MET default is used. c_dict['MET_OBS_ERROR_TABLE'] = \ self.config.getstr('config', 'ENSEMBLE_STAT_MET_OBS_ERROR_TABLE', '') # No Default being set this is REQUIRED TO BE DEFINED in conf file. c_dict['N_MEMBERS'] = \ self.config.getint('config', 'ENSEMBLE_STAT_N_MEMBERS') c_dict['OBS_POINT_INPUT_DIR'] = \ self.config.getdir('OBS_ENSEMBLE_STAT_POINT_INPUT_DIR', '') c_dict['OBS_POINT_INPUT_TEMPLATE'] = \ self.config.getraw('filename_templates', 'OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE') c_dict['OBS_GRID_INPUT_DIR'] = \ self.config.getdir('OBS_ENSEMBLE_STAT_GRID_INPUT_DIR', '') c_dict['OBS_GRID_INPUT_TEMPLATE'] = \ self.config.getraw('filename_templates', 'OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE') # The ensemble forecast files input directory and filename templates c_dict['FCST_INPUT_DIR'] = \ self.config.getdir('FCST_ENSEMBLE_STAT_INPUT_DIR', '') # This is a raw string and will be interpreted to generate the # ensemble member filenames. This may be a list of 1 or n members. c_dict['FCST_INPUT_TEMPLATE'] = \ util.getlist(self.config.getraw('filename_templates', 'FCST_ENSEMBLE_STAT_INPUT_TEMPLATE')) c_dict['OUTPUT_DIR'] = self.config.getdir('ENSEMBLE_STAT_OUTPUT_DIR') # handle window variables [FCST/OBS]_[FILE_]_WINDOW_[BEGIN/END] self.handle_window_variables(c_dict, 'ensemble_stat') # need to set these so that find_data will succeed c_dict['OBS_POINT_WINDOW_BEGIN'] = c_dict['OBS_WINDOW_BEGIN'] c_dict['OBS_POINT_WINDOW_END'] = c_dict['OBS_WINDOW_END'] c_dict['OBS_GRID_WINDOW_BEGIN'] = c_dict['OBS_WINDOW_BEGIN'] c_dict['OBS_GRID_WINDOW_END'] = c_dict['OBS_WINDOW_END'] c_dict['OBS_POINT_FILE_WINDOW_BEGIN'] = c_dict['OBS_FILE_WINDOW_BEGIN'] c_dict['OBS_POINT_FILE_WINDOW_END'] = c_dict['OBS_FILE_WINDOW_END'] c_dict['OBS_GRID_FILE_WINDOW_BEGIN'] = c_dict['OBS_FILE_WINDOW_BEGIN'] c_dict['OBS_GRID_FILE_WINDOW_END'] = c_dict['OBS_FILE_WINDOW_END'] return c_dict
def create_c_dict(self): """! Create a data structure (dictionary) that contains all the values set in the configuration files Args: Returns: c_dict - a dictionary containing the settings in the configuration files (that aren't in the metplus_data, metplus_system, and metplus_runtime config files. """ c_dict = super(PB2NCWrapper, self).create_c_dict() c_dict['SKIP_IF_OUTPUT_EXISTS'] = self.config.getbool('config', 'PB2NC_SKIP_IF_OUTPUT_EXISTS', False) c_dict['OFFSETS'] = util.getlistint(self.config.getstr('config', 'PB2NC_OFFSETS', '0')) # Directories c_dict['OBS_INPUT_DIR'] = self.config.getdir('PB2NC_INPUT_DIR') c_dict['OUTPUT_DIR'] = self.config.getdir('PB2NC_OUTPUT_DIR') c_dict['OBS_INPUT_TEMPLATE'] = self.config.getraw('filename_templates', 'PB2NC_INPUT_TEMPLATE') c_dict['OUTPUT_TEMPLATE'] = self.config.getraw('filename_templates', 'PB2NC_OUTPUT_TEMPLATE') c_dict['OBS_INPUT_DATATYPE'] = self.config.getstr('config', 'PB2NC_INPUT_DATATYPE', '') # Configuration c_dict['CONFIG_FILE'] = self.config.getstr('config', 'PB2NC_CONFIG_FILE') c_dict['MESSAGE_TYPE'] = util.getlist( self.config.getstr('config', 'PB2NC_MESSAGE_TYPE', '[]')) tmp_message_type = str(c_dict['MESSAGE_TYPE']).replace("\'", "\"") c_dict['MESSAGE_TYPE'] = ''.join(tmp_message_type) c_dict['STATION_ID'] = util.getlist( self.config.getstr('config', 'PB2NC_STATION_ID', '[]')) tmp_message_type = str(c_dict['STATION_ID']).replace("\'", "\"") c_dict['STATION_ID'] = ''.join(tmp_message_type.split()) grid_id = self.config.getstr('config', 'PB2NC_GRID') if grid_id.startswith('G'): # Reformat grid ids that begin with 'G' ( G10, G1, etc.) to format # Gnnn c_dict['GRID'] = self.reformat_grid_id(grid_id) else: c_dict['GRID'] = grid_id c_dict['POLY'] = self.config.getstr('config', 'PB2NC_POLY') c_dict['BUFR_VAR_LIST'] = util.getlist( self.config.getstr('config', 'PB2NC_OBS_BUFR_VAR_LIST', '[]')) c_dict['TIME_SUMMARY_FLAG'] = self.config.getbool('config', 'PB2NC_TIME_SUMMARY_FLAG') c_dict['TIME_SUMMARY_BEG'] = self.config.getstr('config', 'PB2NC_TIME_SUMMARY_BEG') c_dict['TIME_SUMMARY_END'] = self.config.getstr('config', 'PB2NC_TIME_SUMMARY_END') c_dict['TIME_SUMMARY_VAR_NAMES'] = util.getlist( self.config.getstr('config', 'PB2NC_TIME_SUMMARY_VAR_NAMES')) c_dict['TIME_SUMMARY_TYPES'] = util.getlist( self.config.getstr('config', 'PB2NC_TIME_SUMMARY_TYPES')) c_dict['OBS_WINDOW_BEGIN'] = \ self.config.getseconds('config', 'PB2NC_WINDOW_BEGIN', self.config.getseconds('config', 'OBS_WINDOW_BEGIN', 0)) c_dict['OBS_WINDOW_END'] = \ self.config.getseconds('config', 'PB2NC_WINDOW_END', self.config.getseconds('config', 'OBS_WINDOW_END', 0)) c_dict['OBS_FILE_WINDOW_BEGIN'] = \ self.config.getseconds('config', 'PB2NC_FILE_WINDOW_BEGIN', self.config.getseconds('config', 'OBS_FILE_WINDOW_BEGIN', 0)) c_dict['OBS_FILE_WINDOW_END'] = \ self.config.getseconds('config', 'PB2NC_FILE_WINDOW_END', self.config.getseconds('config', 'OBS_FILE_WINDOW_END', 0)) c_dict['ALLOW_MULTIPLE_FILES'] = True return c_dict
def compare_results(p, p_b): a_dir = p.getstr('config', 'OUTPUT_BASE') b_dir = p_b.getstr('config', 'OUTPUT_BASE') print("****************************") print("* TEST RESULTS *") print("****************************") good = True processes = util.getlist(p.getstr('config', 'PROCESS_LIST')) use_init = p.getbool('config', 'LOOP_BY_INIT') if use_init: time_format = p.getstr('config', 'INIT_TIME_FMT') start_t = p.getstr('config', 'INIT_BEG') end_t = p.getstr('config', 'INIT_END') time_interval = p.getint('config', 'INIT_INC') else: time_format = p.getstr('config', 'VALID_TIME_FMT') start_t = p.getstr('config', 'VALID_BEG') end_t = p.getstr('config', 'VALID_END') time_interval = p.getint('config', 'VALID_INC') loop_time = calendar.timegm(time.strptime(start_t, time_format)) end_time = calendar.timegm(time.strptime(end_t, time_format)) while loop_time <= end_time: run_time = time.strftime("%Y%m%d%H%M", time.gmtime(loop_time)) print("Checking "+run_time) for process in processes: print("Checking output from "+process) if process == "GridStat": # out_subdir = "uswrp/met_out/QPF/200508070000/grid_stat" out_a = p.getstr('config', "GRID_STAT_OUT_DIR") out_b = p_b.getstr('config', "GRID_STAT_OUT_DIR") glob_string = "{:s}/{:s}/grid_stat/*" files_a = glob.glob(glob_string.format(out_a, run_time)) files_b = glob.glob(glob_string.format(out_b, run_time)) elif process == "PcpCombineObs": out_a = p.getstr('config', "OBS_PCP_COMBINE_OUTPUT_DIR") out_b = p_b.getstr('config', "OBS_PCP_COMBINE_OUTPUT_DIR") glob_string = "{:s}/{:s}/*" files_a = glob.glob(glob_string.format(out_a, run_time[0:8])) files_b = glob.glob(glob_string.format(out_b, run_time[0:8])) elif process == "PcpCombineModel": out_a = p.getstr('config', "FCST_PCP_COMBINE_OUTPUT_DIR") out_b = p_b.getstr('config', "FCST_PCP_COMBINE_OUTPUT_DIR") glob_string = "{:s}/{:s}/*" files_a = glob.glob(glob_string.format(out_a, run_time[0:8])) files_b = glob.glob(glob_string.format(out_b, run_time[0:8])) elif process == "RegridDataPlane": out_a = p.getstr('config', "OBS_REGRID_DATA_PLANE_OUTPUT_DIR") out_b = p_b.getstr('config', "OBS_REGRID_DATA_PLANE_OUTPUT_DIR") glob_string = "{:s}/{:s}/*" files_a = glob.glob(glob_string.format(out_a, run_time[0:8])) files_b = glob.glob(glob_string.format(out_b, run_time[0:8])) elif process == "TcPairs": out_a = p.getstr('config', "TC_PAIRS_DIR") out_b = p_b.getstr('config', "TC_PAIRS_DIR") glob_string = "{:s}/{:s}/*" files_a = glob.glob(glob_string.format(out_a, run_time[0:8])) files_b = glob.glob(glob_string.format(out_b, run_time[0:8])) elif process == "ExtractTiles": # TODO FIX DIR out_a = p.getstr('config', "EXTRACT_OUT_DIR") out_b = p_b.getstr('config', "EXTRACT_OUT_DIR") glob_string = "{:s}/{:s}/*/*" date_dir = run_time[0:8]+"_"+run_time[8:10] files_a = glob.glob(glob_string.format(out_a, date_dir)) files_b = glob.glob(glob_string.format(out_b, date_dir)) elif process == "SeriesByInit": # TODO FIX DIR out_a = p.getstr('config', "SERIES_INIT_FILTERED_OUT_DIR") out_b = p_b.getstr('config', "SERIES_INIT_FILTERED_OUT_DIR") glob_string = "{:s}/{:s}/*/*" date_dir = run_time[0:8]+"_"+run_time[8:10] files_a = glob.glob(glob_string.format(out_a, date_dir)) files_b = glob.glob(glob_string.format(out_b, date_dir)) elif process == "SeriesByLead": # TODO FIX DIR out_a = p.getstr('config', "SERIES_LEAD_FILTERED_OUT_DIR") out_b = p_b.getstr('config', "SERIES_LEAD_FILTERED_OUT_DIR") glob_string = "{:s}/{:s}/*/*" date_dir = run_time[0:8]+"_"+run_time[8:10] files_a = glob.glob(glob_string.format(out_a, date_dir)) files_b = glob.glob(glob_string.format(out_b, date_dir)) else: print("PROCESS:"+process+" is not valid") continue if not compare_output_files(files_a, files_b, a_dir, b_dir): good = False loop_time += time_interval if good: print("Success") else: print("ERROR: Some differences") return good
def main(): logger = logging.getLogger('run_example') init_time = 0 start_time = 0 end_time = 0 time_interval = 1 short_opts = "c:r:h" long_opts = ["config=", "help", "runtime="] # All command line input, get options and arguments try: opts, args = getopt.gnu_getopt(sys.argv[1:], short_opts, long_opts) except getopt.GetoptError as err: print(str(err)) usage('SCRIPT IS EXITING DUE TO UNRECOGNIZED COMMAND LINE OPTION') for k, v in opts: if k in ('-c', '--config'): # adds the conf file to the list of arguments. args.append(config_launcher.set_conf_file_path(v)) elif k in ('-h', '--help'): usage() exit() elif k in ('-r', '--runtime'): start_time = v end_time = v else: assert False, "UNHANDLED OPTION" if not args: args = None (parm, infiles, moreopt) = config_launcher.parse_launch_args(args, usage, None, logger) p = config_launcher.launch(infiles, moreopt) logger = util.get_logger(p) logger.setLevel(logging.DEBUG) if start_time == 0: start_time = p.getstr('config', 'START_TIME') end_time = p.getstr('config', 'END_TIME') time_interval = p.getstr('config', 'TIME_INTERVAL') # Get the list of processes to call process_list = util.getlist(p.getstr('config', 'PROCESS_LIST')) model_type = p.getstr('config', 'MODEL_TYPE') fcst_vars = util.getlist(p.getstr('config', 'FCST_VARS')) lead_seq = util.getlistint(p.getstr('config', 'LEAD_SEQ')) init_time = start_time while init_time <= end_time: print("") print("****************************************") print("* RUNNING MET+") print("* EVALUATING " + model_type + " at init time: " + init_time) print("****************************************") logger.info("****************************************") logger.info("* RUNNING MET+") logger.info("* EVALUATING " + model_type + " at init time: " + init_time) logger.info("****************************************") for lead in lead_seq: for fcst_var in fcst_vars: # loop over models to compare accums = util.getlist(p.getstr('config', fcst_var + "_ACCUM")) ob_types = util.getlist( p.getstr('config', fcst_var + "_OBTYPE")) for accum in accums: for ob_type in ob_types: if lead < int(accum): continue obs_var = p.getstr('config', ob_type + "_VAR") logger.info("") logger.info("") logger.info("For " + init_time + " F" + str(lead) + ", processing " + model_type + "_" + fcst_var + "_" + accum + " vs " + ob_type + " " + obs_var + "_" + accum) valid_time = util.shift_time(init_time, lead) data_interval = p.getint('config', ob_type + '_DATA_INTERVAL') if int(valid_time[8:10]) % data_interval != 0: logger.warning("No observation for valid time: " + valid_time + ". Skipping...") continue for process in process_list: if process == "pcp_combine": run_pcp = CG_pcp_combine(p, logger) run_pcp.run_at_time(valid_time, accum, ob_type, fcst_var) elif process == "regrid_data_plane": run_regrid = CG_regrid_data_plane(p, logger) run_regrid.run_at_time(valid_time, accum, ob_type) elif process == "grid_stat": run_grid_stat = CG_grid_stat(p, logger) run_grid_stat.run_at_time( init_time, lead, accum, ob_type, fcst_var) else: print("ERROR: Invalid process in process list") exit(1) init_time = util.shift_time(init_time, int(time_interval)) (logger).info("END OF EXECUTION")
def create_point_stat_dict(self): """! Create a dictionary that holds all the values set in the METplus config file for the point-stat wrapper. Args: None Returns: ps_dict - A dictionary containing the key-value pairs set in the METplus configuration file. """ # pylint:disable=protected-access # Need to call sys.__getframe() to get the filename and method/func # for logging information. # Used for logging. cur_filename = sys._getframe().f_code.co_filename cur_function = sys._getframe().f_code.co_name self.logger.info("INFO|:" + cur_function + '|' + cur_filename + '| ' + "Creating point-stat dictionary ...") ps_dict = dict() # directories ps_dict['APP_PATH'] = os.path.join(self.p.getdir('MET_INSTALL_DIR'), 'bin/point_stat') ps_dict['APP_NAME'] = os.path.basename(ps_dict['APP_PATH']) ps_dict['PROJ_DIR'] = self.p.getdir('dir', 'PROJ_DIR') ps_dict['TMP_DIR'] = self.p.getdir('dir', 'TMP_DIR') ps_dict['METPLUS_BASE'] = self.p.getdir('dir', 'METPLUS_BASE') ps_dict['MET_BUILD_BASE'] = self.p.getdir('dir', 'MET_BUILD_BASE') ps_dict['MET_INSTALL_DIR'] = self.p.getdir('dir', 'MET_INSTALL_DIR') ps_dict['PARM_BASE'] = self.p.getdir('dir', 'PARM_BASE') ps_dict['OUTPUT_BASE'] = self.p.getstr('dir', 'OUTPUT_BASE') ps_dict['FCST_INPUT_DIR'] = self.p.getstr('dir', 'FCST_INPUT_DIR') ps_dict['OBS_INPUT_DIR'] = self.p.getstr('dir', 'OBS_INPUT_DIR') ps_dict['POINT_STAT_OUTPUT_DIR'] = \ self.p.getstr('dir', 'POINT_STAT_OUTPUT_DIR') # Configuration ps_dict['TIME_METHOD'] = self.p.getstr('config', 'TIME_METHOD') ps_dict['LOOP_METHOD'] = self.p.getstr('config', 'LOOP_METHOD') ps_dict['MODEL_NAME'] = self.p.getstr('config', 'MODEL_NAME') ps_dict['OBS_NAME'] = self.p.getstr('config', 'OBS_NAME') ps_dict['POINT_STAT_CONFIG_FILE'] = \ self.p.getstr('config', 'POINT_STAT_CONFIG_FILE') ps_dict['REGRID_TO_GRID'] = self.p.getstr('config', 'REGRID_TO_GRID') ps_dict['POINT_STAT_GRID'] = self.p.getstr('config', 'POINT_STAT_GRID') ps_dict['POINT_STAT_POLY'] = util.getlist( self.p.getstr('config', 'POINT_STAT_POLY')) ps_dict['POINT_STAT_STATION_ID'] = util.getlist( self.p.getstr('config', 'POINT_STAT_STATION_ID')) ps_dict['POINT_STAT_MESSAGE_TYPE'] = util.getlist( self.p.getstr('config', 'POINT_STAT_MESSAGE_TYPE')) # Retrieve YYYYMMDD begin and end time ps_dict['BEG_TIME'] = self.p.getstr('config', 'BEG_TIME')[0:8] ps_dict['END_TIME'] = self.p.getstr('config', 'END_TIME')[0:8] ps_dict['START_HOUR'] = self.p.getstr('config', 'START_HOUR') ps_dict['END_HOUR'] = self.p.getstr('config', 'END_HOUR') ps_dict['START_DATE'] = self.p.getstr('config', 'START_DATE') ps_dict['END_DATE'] = self.p.getstr('config', 'END_DATE') ps_dict['FCST_HR_START'] = self.p.getstr('config', 'FCST_HR_START') ps_dict['FCST_HR_END'] = self.p.getstr('config', 'FCST_HR_END') ps_dict['FCST_HR_INTERVAL'] = self.p.getstr('config', 'FCST_HR_INTERVAL') ps_dict['OBS_WINDOW_BEGIN'] = self.p.getstr('config', 'OBS_WINDOW_BEGIN') ps_dict['OBS_WINDOW_END'] = self.p.getstr('config', 'OBS_WINDOW_END') # Filename templates and regex patterns for input dirs and filenames ps_dict['FCST_INPUT_FILE_REGEX'] = \ self.p.getraw('regex_pattern', 'FCST_INPUT_FILE_REGEX') ps_dict['OBS_INPUT_FILE_REGEX'] = \ self.p.getraw('regex_pattern', 'OBS_INPUT_FILE_REGEX') # non-MET executables ps_dict['WGRIB2'] = self.p.getdir('exe', 'WGRIB2') ps_dict['RM_EXE'] = self.p.getdir('exe', 'RM_EXE') ps_dict['CUT_EXE'] = self.p.getdir('exe', 'CUT_EXE') ps_dict['TR_EXE'] = self.p.getdir('exe', 'TR_EXE') ps_dict['NCAP2_EXE'] = self.p.getdir('exe', 'NCAP2_EXE') ps_dict['CONVERT_EXE'] = self.p.getdir('exe', 'CONVERT_EXE') ps_dict['NCDUMP_EXE'] = self.p.getdir('exe', 'NCDUMP_EXE') ps_dict['EGREP_EXE'] = self.p.getdir('exe', 'EGREP_EXE') return ps_dict
def test_getlist_has_commas(): l = 'gt2.7, >3.6, eq42, "has,commas,in,it"' test_list = util.getlist(l) assert (test_list == ['gt2.7', '>3.6', 'eq42', 'has,commas,in,it'])
def test_getlist(): l = 'gt2.7, >3.6, eq42' test_list = util.getlist(l) assert (test_list == ['gt2.7', '>3.6', 'eq42'])
def grid2grid_sfc_plot_format(self): self.logger.info("Formatting for plotting for grid2grid-sfc") #read config use_init = self.p.getbool('config', 'LOOP_BY_INIT', True) if use_init: start_t = self.p.getstr('config', 'INIT_BEG') end_t = self.p.getstr('config', 'INIT_END') self.add_env_var("FCST_VALID_BEG", "") self.add_env_var("FCST_VALID_END", "") self.add_env_var("FCST_INIT_BEG", start_t) self.add_env_var("FCST_INIT_END", end_t) else: start_t = self.p.getstr('config', 'VALID_BEG') end_t = self.p.getstr('config', 'VALID_END') self.add_env_var("FCST_VALID_BEG", start_t) self.add_env_var("FCST_VALID_END", end_t) self.add_env_var("FCST_INIT_BEG", "") self.add_env_var("FCST_INIT_END", "") stat_analysis_lookin_dir = self.p.getdir('STAT_ANALYSIS_LOOKIN_DIR') stat_analysis_out_dir = self.p.getdir('STAT_ANALYSIS_OUT_DIR') var_list = util.parse_var_list(self.p) region_list = util.getlist(self.p.getstr('config', 'REGION_LIST')) lead_list = util.getlistint(self.p.getstr('config', 'LEAD_LIST')) model_list = util.getlist(self.p.getstr('config', 'MODEL_LIST')) self.add_env_var('INTERP', 'NEAREST') if use_init: loop_beg_hour = self.p.getint('config', 'INIT_BEG_HOUR') loop_end_hour = self.p.getint('config', 'INIT_END_HOUR') loop_inc = self.p.getint('config', 'INIT_INC') else: loop_beg_hour = self.p.getint('config', 'VALID_BEG_HOUR') loop_end_hour = self.p.getint('config', 'VALID_END_HOUR') loop_inc = self.p.getint('config', 'VALID_INC') loop_hour = loop_beg_hour while loop_hour <= loop_end_hour: loop_hour_str = str(loop_hour).zfill(2) #filtering times based on if files made based on init_time or valid_time if use_init: start_t = self.p.getstr('config', 'INIT_BEG') end_t = self.p.getstr('config', 'INIT_END') self.add_env_var("FCST_VALID_BEG", "") self.add_env_var("FCST_VALID_END", "") self.add_env_var("FCST_VALID_HOUR", "") self.add_env_var("FCST_INIT_BEG", start_t + "_" + loop_hour_str + "0000") self.add_env_var("FCST_INIT_END", end_t + "_" + loop_hour_str + "0000") self.add_env_var("FCST_INIT_HOUR", '"' + loop_hour_str + '"') else: start_t = self.p.getstr('config', 'VALID_BEG') end_t = self.p.getstr('config', 'VALID_END') self.add_env_var("FCST_VALID_BEG", start_t + "_" + loop_hour_str + "0000") self.add_env_var("FCST_VALID_END", end_t + "_" + loop_hour_str + "0000") self.add_env_var("FCST_VALID_HOUR", '"' + loop_hour_str + '"') self.add_env_var("FCST_INIT_BEG", "") self.add_env_var("FCST_INIT_END", "") self.add_env_var("FCST_INIT_HOUR", "") for model in model_list: self.add_env_var('MODEL', model) #build -lookin directory self.set_lookin_dir( os.path.join(stat_analysis_lookin_dir, loop_hour_str + 'Z', model)) for var_info in var_list: fcst_var_name = var_info.fcst_name fcst_var_level = var_info.fcst_level #fcst_var_extra = var_info.fcst_extra.replace(" = ", "").rstrip(";") obs_var_name = var_info.obs_name obs_var_level = var_info.obs_level #obs_var_extra = var_info.obs_extra.replace(" = ", "").rstrip(";") self.add_env_var('FCST_VAR_NAME', fcst_var_name) self.add_env_var('FCST_VAR_LEVEL', fcst_var_level) self.add_env_var('OBS_VAR_NAME', obs_var_name) self.add_env_var('OBS_VAR_LEVEL', obs_var_level) for region in region_list: self.add_env_var('REGION', region) for lead in lead_list: if lead < 10: lead_string = '0' + str(lead) else: lead_string = str(lead) self.add_env_var('LEAD', lead_string) if not os.path.exists( os.path.join(stat_analysis_out_dir, loop_hour_str + "Z", model, region)): os.makedirs( os.path.join(stat_analysis_out_dir, loop_hour_str + "Z", model, region)) ##dump_row_file = os.path.join(stat_analysis_out_dir, ## loop_hour_str+"Z", model, region, model+"_f"+lead_string+"_"+fcst_var_name+fcst_var_level+".stat") dump_row_file = os.path.join( stat_analysis_out_dir, loop_hour_str + "Z", model, region, model + "_f" + lead_string + "_fcst" + fcst_var_name + fcst_var_level + "_obs" + obs_var_name + obs_var_level + ".stat") ##dump_row_file = os.path.join(stat_analysis_out_dir, ## loop_hour_str+"Z", model, region, model+"_f"+lead_string+"_fcst"+fcst_var_name+fcst_var_level+fcst_var_extra+"_obs"+obs_var_name+obs_var_level+obs_var_extra+".stat") job = "-job filter -dump_row " + dump_row_file self.add_env_var("JOB", job) #get stat_analysis config file self.set_param_file( self.p.getstr('config', 'STAT_ANALYSIS_CONFIG')) #environment self.logger.debug("") self.logger.debug("ENVIRONMENT FOR NEXT COMMAND: ") self.logger.debug("") self.logger.debug( "COPYABLE ENVIRONMENT FOR NEXT COMMAND: ") self.logger.debug("") #build command cmd = self.get_command() if cmd is None: print( "ERROR: stat_analysis could not generate command" ) return self.logger.info("") self.build() self.clear() loop_hour += loop_inc
def compare_results(param_a, param_b): p, p_b = get_params(param_a, param_b) a_dir = p.getdir('OUTPUT_BASE') b_dir = p_b.getdir('OUTPUT_BASE') print("****************************") print("* TEST RESULTS *") print("****************************") print(param_a + " vs") print(param_b) good = True processes = util.getlist(p.getstr('config', 'PROCESS_LIST')) # TODO: Not all apps that use_init will write dirs on init, could be valid use_init = util.is_loop_by_init(p) if use_init: time_format = p.getstr('config', 'INIT_TIME_FMT') start_t = p.getstr('config', 'INIT_BEG') end_t = p.getstr('config', 'INIT_END') time_interval = p.getint('config', 'INIT_INCREMENT') else: time_format = p.getstr('config', 'VALID_TIME_FMT') start_t = p.getstr('config', 'VALID_BEG') end_t = p.getstr('config', 'VALID_END') time_interval = p.getint('config', 'VALID_INCREMENT') loop_time = calendar.timegm(time.strptime(start_t, time_format)) end_time = calendar.timegm(time.strptime(end_t, time_format)) while loop_time <= end_time: run_time = time.strftime("%Y%m%d%H%M", time.gmtime(loop_time)) print("Checking " + run_time) for process in processes: print("Checking output from " + process) if process == "GridStat": # out_subdir = "uswrp/met_out/QPF/200508070000/grid_stat" out_a = p.getdir("GRID_STAT_OUTPUT_DIR") out_b = p_b.getdir("GRID_STAT_OUTPUT_DIR") glob_string = "{:s}/{:s}/grid_stat/*" files_a = glob.glob(glob_string.format(out_a, run_time)) files_b = glob.glob(glob_string.format(out_b, run_time)) elif process == "Mode": # out_subdir = "uswrp/met_out/QPF/200508070000/grid_stat" out_a = p.getdir("MODE_OUTPUT_DIR") out_b = p_b.getdir("MODE_OUTPUT_DIR") glob_string = "{:s}/{:s}/mode/*" files_a = glob.glob(glob_string.format(out_a, run_time)) files_b = glob.glob(glob_string.format(out_b, run_time)) elif process == "PcpCombine": out_o_a = "" out_a = "" if p.getbool('config', 'OBS_PCP_COMBINE_RUN', False): out_o_a = p.getdir("OBS_PCP_COMBINE_OUTPUT_DIR") out_o_b = p_b.getdir("OBS_PCP_COMBINE_OUTPUT_DIR") glob_string = "{:s}/{:s}/*" files_o_a = glob.glob( glob_string.format(out_o_a, run_time[0:8])) files_o_b = glob.glob( glob_string.format(out_o_b, run_time[0:8])) if p.getbool('config', 'FCST_PCP_COMBINE_RUN', False): out_a = p.getdir("FCST_PCP_COMBINE_OUTPUT_DIR") out_b = p_b.getdir("FCST_PCP_COMBINE_OUTPUT_DIR") glob_string = "{:s}/{:s}/*" files_a = glob.glob( glob_string.format(out_a, run_time[0:8])) files_b = glob.glob( glob_string.format(out_b, run_time[0:8])) # if both fcst and obs are set, run obs here then fcst will run # at the end of the if blocks if out_o_a != "" and out_a != "" and not compare_output_files( files_o_a, files_o_b, a_dir, b_dir): good = False # if only obs ran, set variables so that it runs at end of if blocks elif out_o_a != "": files_a = files_o_a files_b = files_o_b elif process == "RegridDataPlane": out_a = p.getdir("OBS_REGRID_DATA_PLANE_OUTPUT_DIR") out_b = p_b.getdir("OBS_REGRID_DATA_PLANE_OUTPUT_DIR") glob_string = "{:s}/{:s}/*" files_a = glob.glob(glob_string.format(out_a, run_time[0:8])) files_b = glob.glob(glob_string.format(out_b, run_time[0:8])) elif process == "TcPairs": out_a = p.getdir("TC_PAIRS_OUTPUT_DIR") out_b = p_b.getdir("TC_PAIRS_OUTPUT_DIR") glob_string = "{:s}/{:s}/*" files_a = glob.glob(glob_string.format(out_a, run_time[0:8])) files_b = glob.glob(glob_string.format(out_b, run_time[0:8])) elif process == "ExtractTiles": # TODO FIX DIR out_a = p.getdir("EXTRACT_TILES_OUTPUT_DIR") out_b = p_b.getdir("EXTRACT_TILES_OUTPUT_DIR") glob_string = "{:s}/{:s}/*/*" date_dir = run_time[0:8] + "_" + run_time[8:10] files_a = glob.glob(glob_string.format(out_a, date_dir)) files_b = glob.glob(glob_string.format(out_b, date_dir)) elif process == "SeriesByInit": # TODO FIX DIR out_a = p.getdir("SERIES_BY_INIT_FILTERED_OUTPUT_DIR") out_b = p_b.getdir("SERIES_BY_INIT_FILTERED_OUTPUT_DIR") glob_string = "{:s}/{:s}/*/*" date_dir = run_time[0:8] + "_" + run_time[8:10] files_a = glob.glob(glob_string.format(out_a, date_dir)) files_b = glob.glob(glob_string.format(out_b, date_dir)) elif process == "SeriesByLead": # TODO FIX DIR out_a = p.getdir("SERIES_BY_LEAD_FILTERED_OUTPUT_DIR") out_b = p_b.getdir("SERIES_BY_LEAD_FILTERED_OUTPUT_DIR") glob_string = "{:s}/{:s}/*/*" date_dir = run_time[0:8] + "_" + run_time[8:10] files_a = glob.glob(glob_string.format(out_a, date_dir)) files_b = glob.glob(glob_string.format(out_b, date_dir)) else: print("PROCESS:" + process + " is not valid") continue if not compare_output_files(files_a, files_b, a_dir, b_dir): good = False loop_time += time_interval if good: print("Success") else: print("ERROR: Some differences") return good
def parse_vars_with_level_thresh_list(self): """! Parse metplus_final.conf for variable information, collecting the variable level information as a list Args: Returns: var_info - list of objects containing variable information """ var_info = [] all_conf = self.config.keys('config') fcst_indices = [] regex = re.compile("FCST_VAR(\d+)_NAME") for conf in all_conf: result = regex.match(conf) if result is not None: fcst_indices.append(result.group(1)) for n in fcst_indices: if self.config.has_option('config', "FCST_VAR" + n + "_NAME"): fcst_name = self.config.getstr('config', "FCST_VAR" + n + "_NAME") if self.config.has_option('config', "FCST_VAR" + n + "_LEVELS"): fcst_levels = util.getlist( self.config.getstr('config', "FCST_VAR" + n + "_LEVELS")) else: self.logger.error("FCST_VAR" + n + "_LEVELS not defined") exit(1) if self.config.has_option('config', "FCST_VAR" + n + "_OPTIONS"): fcst_extra = self.config.getraw( 'config', "FCST_VAR" + n + "_OPTIONS") else: fcst_extra = "" if self.config.has_option('config', "FCST_VAR" + n + "_THRESH"): fcst_thresh = util.getlist( self.config.getstr('config', "FCST_VAR" + n + "_THRESH")) else: fcst_thresh = "" if self.config.has_option('config', "OBS_VAR" + n + "_NAME"): obs_name = self.config.getstr('config', "OBS_VAR" + n + "_NAME") else: obs_name = fcst_name if self.config.has_option('config', "OBS_VAR" + n + "_LEVELS"): obs_levels = util.getlist( self.config.getstr('config', "OBS_VAR" + n + "_LEVELS")) if len(fcst_levels) != len(obs_levels): self.logger.error( "FCST_VAR" + n + "_LEVELS and OBS_VAR" + n + "_LEVELS do not have the same number of elements") exit(1) else: obs_levels = fcst_levels if self.config.has_option('config', "OBS_VAR" + n + "_OPTIONS"): obs_extra = self.config.getraw('config', "OBS_VAR" + n + "_OPTIONS") else: obs_extra = "" if self.config.has_option('config', "OBS_VAR" + n + "_THRESH"): obs_thresh = util.getlist( self.config.getstr('config', "OBS_VAR" + n + "_THRESH")) if len(fcst_thresh) != len(obs_thresh): self.logger.error( "FCST_VAR" + n + "_THRESH and OBS_VAR" + n + "_THRESH do not have the same number of elements") exit(1) else: obs_thresh = fcst_thresh else: self.logger.error("FCST_VAR" + n + "_NAME not defined") exit(1) fo = util.FieldObj() fo.fcst_name = fcst_name fo.obs_name = obs_name fo.fcst_extra = fcst_extra fo.obs_extra = obs_extra fo.fcst_thresh = fcst_thresh fo.obs_thresh = obs_thresh fo.fcst_level = fcst_levels fo.obs_level = obs_levels fo.index = n var_info.append(fo) return var_info
def create_plots(self, verif_case, verif_type): """! Read in metplus_final.conf variables and call function for the specific verification plots to run Args: verif_case - string of the verification case to make plots for verif_type - string of the verification type to make plots for Returns: """ self.logger.info("Running plots for VERIF_CASE = " + verif_case + ", VERIF_TYPE = " + verif_type) #read config plot_time = self.config.getstr('config', 'PLOT_TIME') valid_beg_YYYYmmdd = self.config.getstr('config', 'VALID_BEG', "") valid_end_YYYYmmdd = self.config.getstr('config', 'VALID_END', "") valid_hour_method = self.config.getstr('config', 'VALID_HOUR_METHOD') valid_hour_beg = self.config.getstr('config', 'VALID_HOUR_BEG') valid_hour_end = self.config.getstr('config', 'VALID_HOUR_END') valid_hour_increment = self.config.getstr('config', 'VALID_HOUR_INCREMENT') init_beg_YYYYmmdd = self.config.getstr('config', 'INIT_BEG', "") init_end_YYYYmmdd = self.config.getstr('config', 'INIT_END', "") init_hour_method = self.config.getstr('config', 'INIT_HOUR_METHOD') init_hour_beg = self.config.getstr('config', 'INIT_HOUR_BEG') init_hour_end = self.config.getstr('config', 'INIT_HOUR_END') init_hour_increment = self.config.getstr('config', 'INIT_HOUR_INCREMENT') stat_files_input_dir = self.config.getdir('STAT_FILES_INPUT_DIR') plotting_out_dir = self.config.getdir('PLOTTING_OUTPUT_DIR') plotting_scripts_dir = self.config.getdir('PLOTTING_SCRIPTS_DIR') plot_stats_list = self.config.getstr('config', 'PLOT_STATS_LIST') ci_method = self.config.getstr('config', 'CI_METHOD') verif_grid = self.config.getstr('config', 'VERIF_GRID') event_equalization = self.config.getstr('config', 'EVENT_EQUALIZATION', "True") var_list = self.parse_vars_with_level_thresh_list() fourier_decom_list = self.parse_var_fourier_decomp() region_list = util.getlist(self.config.getstr('config', 'REGION_LIST')) lead_list = util.getlist(self.config.getstr('config', 'LEAD_LIST')) model_name_str_list, model_plot_name_str_list = self.parse_model_list() logging_filename = self.config.getstr('config', 'LOG_METPLUS') logging_level = self.config.getstr('config', 'LOG_LEVEL') met_base = self.config.getstr('dir', 'MET_BASE') #set envir vars based on config self.add_env_var('PLOT_TIME', plot_time) if plot_time == 'valid': self.add_env_var('START_DATE_YYYYmmdd', valid_beg_YYYYmmdd) self.add_env_var('END_DATE_YYYYmmdd', valid_end_YYYYmmdd) elif plot_time == 'init': self.add_env_var('START_DATE_YYYYmmdd', init_beg_YYYYmmdd) self.add_env_var('END_DATE_YYYYmmdd', init_end_YYYYmmdd) else: self.logger.error( "Invalid entry for PLOT_TIME, use 'valid' or 'init'") exit(1) self.add_env_var('STAT_FILES_INPUT_DIR', stat_files_input_dir) self.add_env_var('PLOTTING_OUT_DIR', plotting_out_dir) self.add_env_var('PLOT_STATS_LIST', plot_stats_list) self.add_env_var('MODEL_NAME_LIST', model_name_str_list) self.add_env_var('MODEL_PLOT_NAME_LIST', model_plot_name_str_list) self.add_env_var('CI_METHOD', ci_method) self.add_env_var('VERIF_GRID', verif_grid) self.add_env_var('EVENT_EQUALIZATION', event_equalization) self.add_env_var('LOGGING_FILENAME', logging_filename) self.add_env_var('LOGGING_LEVEL', logging_level) plotting_out_dir_full = os.path.join(plotting_out_dir, verif_case, verif_type) if os.path.exists(plotting_out_dir_full): self.logger.info(plotting_out_dir_full + " exists, removing") util.rmtree(plotting_out_dir_full) util.mkdir_p(os.path.join(plotting_out_dir_full, "imgs")) util.mkdir_p(os.path.join(plotting_out_dir_full, "data")) self.add_env_var('PLOTTING_OUT_DIR_FULL', plotting_out_dir_full) with open(met_base + '/version.txt') as met_version_txt: met_version_line = met_version_txt.readline() met_version = float( met_version_line.strip('\n').partition('/met-')[2].partition( '_')[0]) self.add_env_var('MET_VERSION', str(met_version)) if met_version < 6.0: self.logger.exit("Please run with MET version >= 6.0") exit(1) #build valid and init hour information valid_beg_HHMMSS = calendar.timegm( time.strptime(valid_hour_beg, "%H%M")) valid_end_HHMMSS = calendar.timegm( time.strptime(valid_hour_end, "%H%M")) init_beg_HHMMSS = calendar.timegm(time.strptime(init_hour_beg, "%H%M")) init_end_HHMMSS = calendar.timegm(time.strptime(init_hour_end, "%H%M")) valid_hour_list = self.create_hour_group_list( valid_beg_HHMMSS, valid_end_HHMMSS, int(valid_hour_increment)) init_hour_list = self.create_hour_group_list(init_beg_HHMMSS, init_end_HHMMSS, int(init_hour_increment)) valid_init_time_pairs = self.pair_valid_init_times( valid_hour_list, valid_hour_method, init_hour_list, init_hour_method) #loop through time information for valid_init_time_pair in valid_init_time_pairs: self.add_env_var('VALID_TIME_INFO', valid_init_time_pair.valid) self.add_env_var('INIT_TIME_INFO', valid_init_time_pair.init) #loop through variable information for var_info in var_list: self.add_env_var('FCST_VAR_NAME', var_info.fcst_name) self.add_env_var('OBS_VAR_NAME', var_info.obs_name) fcst_var_level_list = var_info.fcst_level obs_var_level_list = var_info.obs_level if len(var_info.fcst_extra) == 0: self.add_env_var('FCST_VAR_EXTRA', "None") else: self.add_env_var('FCST_VAR_EXTRA', var_info.fcst_extra) if len(var_info.obs_extra) == 0: self.add_env_var('OBS_VAR_EXTRA', "None") else: self.add_env_var('OBS_VAR_EXTRA', var_info.obs_extra) if len(var_info.fcst_thresh) == 0 or len( var_info.obs_thresh) == 0: fcst_var_thresh_list = ["None"] obs_var_thresh_list = ["None"] else: fcst_var_thresh_list = var_info.fcst_thresh obs_var_thresh_list = var_info.obs_thresh #check for fourier decompositon for variable, add to interp list interp_list = util.getlist( self.config.getstr('config', 'INTERP', "")) var_fourier_decomp_info = fourier_decom_list[var_list.index( var_info)] if var_fourier_decomp_info.run_fourier: for pair in var_fourier_decomp_info.wave_num_pairings: interp_list.append("WV1_" + pair) #loop through interpolation information for interp in interp_list: self.add_env_var('INTERP', interp) #loop through region information for region in region_list: self.add_env_var('REGION', region) #call specific plot definitions to make plots if verif_case == "precip": self.create_plots_precip(fcst_var_level_list, obs_var_level_list, fcst_var_thresh_list, obs_var_thresh_list, lead_list, plotting_scripts_dir)
def main(): """!Main program. Master MET+ script that invokes the necessary Python scripts to perform various activities, such as series analysis.""" # Job Logger produtil.log.jlogger.info('Top of master_metplus') # Setup Task logger, Until Conf object is created, Task logger is # only logging to tty, not a file. logger = logging.getLogger('master_metplus') logger.info('logger Top of master_metplus.') # Used for logging and usage statment cur_filename = sys._getframe().f_code.co_filename cur_function = sys._getframe().f_code.co_name short_opts = "c:r:h" long_opts = ["config=", "help", "runtime="] # All command line input, get options and arguments try: opts, args = getopt.gnu_getopt(sys.argv[1:], short_opts, long_opts) except getopt.GetoptError as err: print(str(err)) usage('SCRIPT IS EXITING DUE TO UNRECOGNIZED COMMAND LINE OPTION') for k, v in opts: if k in ('-c', '--config'): # adds the conf file to the list of arguments. print("ADDED CONF FILE: " + v) args.append(config_launcher.set_conf_file_path(v)) elif k in ('-h', '--help'): usage() exit() elif k in ('-r', '--runtime'): start_time = v end_time = v else: assert False, "UNHANDLED OPTION" if not args: args = None (parm, infiles, moreopt) = config_launcher.parse_launch_args(args, usage, None, logger) p = config_launcher.launch(infiles, moreopt) # NOW I have a conf object p, I can now setup the handler # to write to the LOG_FILENAME. logger = util.get_logger(p) # This is available in each subprocess from os.system BUT # we also set it in each process since they may be called stand alone. os.environ['MET_BASE'] = p.getdir('MET_BASE') # Use config object to get the list of processes to call process_list = util.getlist(p.getstr('config', 'PROCESS_LIST')) # Keep this comment. # When running commands in the process_list, reprocess the # original command line using (item))[sys.argv[1:]]. # # You could call each task (ie. run_tc_pairs.py) without any args since # the final METPLUS_CONF file was just created from config_metplus.setup, # and each task, also calls setup, which use an existing final conf # file over command line args. # # both work ... # Note: Using (item))sys.argv[1:], is preferable since # it doesn't depend on the conf file existing. processes = [] for item in process_list: try: command_builder = getattr(sys.modules[__name__], item + "Wrapper")(p, logger) except AttributeError: raise NameError("Process %s doesn't exist" % item) exit() processes.append(command_builder) if p.getstr('config', 'LOOP_METHOD') == "processes": for process in processes: process.run_all_times() elif p.getstr('config', 'LOOP_METHOD') == "times": time_format = p.getstr('config', 'INIT_TIME_FMT') start_t = p.getstr('config', 'INIT_BEG') end_t = p.getstr('config', 'INIT_END') time_interval = p.getint('config', 'INIT_INC') if time_interval < 60: print( "ERROR: time_interval parameter must be greater than 60 seconds" ) exit(1) init_time = calendar.timegm(time.strptime(start_t, time_format)) end_time = calendar.timegm(time.strptime(end_t, time_format)) while init_time <= end_time: run_time = time.strftime("%Y%m%d%H%M", time.gmtime(init_time)) print("") print("****************************************") print("* RUNNING MET+") print("* at init time: " + run_time) print("****************************************") logger.info("****************************************") logger.info("* RUNNING MET+") logger.info("* at init time: " + run_time) logger.info("****************************************") for process in processes: process.run_at_time(run_time) process.clear() init_time += time_interval else: print("ERROR: Invalid LOOP_METHOD defined. " + \ "Options are processes, times") exit() exit() for item in process_list: cmd_shell = cmd.to_shell() logger.info("INFO | [" + cur_filename + ":" + cur_function + "] | " + "Running: " + cmd_shell) ret = run(cmd) if ret != 0: logger.error("ERROR | [" + cur_filename + ":" + cur_function + "] | " + "Problem executing: " + cmd_shell) exit(0)
def run_at_time_once(self, ti, v, cur_model): valid_time = ti.getValidTime() init_time = ti.getInitTime() grid_stat_base_dir = self.p.getstr('config', 'GRID_STAT_OUT_DIR') if self.p.getbool('config', 'LOOP_BY_INIT'): grid_stat_out_dir = os.path.join(grid_stat_base_dir, init_time, "grid_stat") else: grid_stat_out_dir = os.path.join(grid_stat_base_dir, valid_time, "grid_stat") fcst_level = v.fcst_level fcst_level_type = "" if (fcst_level[0].isalpha()): fcst_level_type = fcst_level[0] fcst_level = fcst_level[1:] obs_level = v.obs_level obs_level_type = "" if (obs_level[0].isalpha()): obs_level_type = obs_level[0] obs_level = obs_level[1:] #model_type = self.p.getstr('config', 'MODEL_TYPE') model_type = cur_model[1] obs_dir = self.p.getstr('config', 'OBS_GRID_STAT_INPUT_DIR') obs_template = self.p.getraw('filename_templates', 'OBS_GRID_STAT_INPUT_TEMPLATE') model_dir = self.p.getstr('config', 'FCST_GRID_STAT_INPUT_DIR') config_dir = self.p.getstr('config', 'CONFIG_DIR') ymd_v = valid_time[0:8] if not os.path.exists(grid_stat_out_dir): os.makedirs(grid_stat_out_dir) # get model to compare model_path = self.find_model(ti.lead, init_time, fcst_level, cur_model) if model_path == "": print("ERROR: COULD NOT FIND FILE IN " + model_dir) return self.add_input_file(model_path) # TODO: Handle range of levels obsSts = sts.StringSub(self.logger, obs_template, valid=valid_time, init=init_time, level=str(obs_level.split('-')[0]).zfill(2)) obs_file = obsSts.doStringSub() obs_path = os.path.join(obs_dir, obs_file) self.add_input_file(obs_path) self.set_param_file(self.p.getstr('config', 'GRID_STAT_CONFIG')) self.set_output_dir(grid_stat_out_dir) # set up environment variables for each grid_stat run # get fcst and obs thresh parameters # verify they are the same size fcst_str = "FCST_" + v.fcst_name + "_" + fcst_level + "_THRESH" obs_str = "OBS_" + v.obs_name + "_" + obs_level + "_THRESH" fcst_cat_thresh = "" obs_cat_thresh = "" fcst_threshs = [] obs_threshs = [] if self.p.has_option('config', fcst_str): fcst_threshs = util.getlistfloat(self.p.getstr('config', fcst_str)) fcst_cat_thresh = "cat_thresh=[ " for fcst_thresh in fcst_threshs: fcst_cat_thresh += "gt" + str(fcst_thresh) + ", " fcst_cat_thresh = fcst_cat_thresh[0:-2] + " ];" if self.p.has_option('config', obs_str): obs_threshs = util.getlistfloat(self.p.getstr('config', obs_str)) obs_cat_thresh = "cat_thresh=[ " for obs_thresh in obs_threshs: obs_cat_thresh += "gt" + str(obs_thresh) + ", " obs_cat_thresh = obs_cat_thresh[0:-2] + " ];" if len(fcst_threshs) != len(obs_threshs): self.logger.error("run_example: Number of forecast and "\ "observation thresholds must be the same") exit(1) # TODO: Allow NetCDF level with more than 2 dimensions i.e. (1,*,*) # TODO: Need to check data type for PROB fcst? non PROB obs? fcst_field = "" obs_field = "" # TODO: change PROB mode to put all cat thresh values in 1 item if self.p.getbool('config', 'FCST_IS_PROB'): for fcst_thresh in fcst_threshs: fcst_field += "{ name=\"PROB\"; level=\""+fcst_level_type + \ fcst_level.zfill(2) + "\"; prob={ name=\"" + \ v.fcst_name + \ "\"; thresh_lo="+str(fcst_thresh)+"; } }," for obs_thresh in obs_threshs: obs_field += "{ name=\""+v.obs_name+"_"+obs_level.zfill(2) + \ "\"; level=\"(*,*)\"; cat_thresh=[ gt" + \ str(obs_thresh)+" ]; }," else: # data_type = self.p.getstr('config', 'OBS_NATIVE_DATA_TYPE') obs_data_type = util.get_filetype(self.p, obs_path) model_data_type = util.get_filetype(self.p, model_path) if obs_data_type == "NETCDF": obs_field += "{ name=\"" + v.obs_name+"_" + obs_level.zfill(2) + \ "\"; level=\"(*,*)\"; " else: obs_field += "{ name=\""+v.obs_name + \ "\"; level=\"["+obs_level_type + \ obs_level.zfill(2)+"]\"; " if model_data_type == "NETCDF": fcst_field += "{ name=\""+v.fcst_name+"_"+fcst_level.zfill(2) + \ "\"; level=\"(*,*)\"; " else: fcst_field += "{ name=\""+v.fcst_name + \ "\"; level=\"["+fcst_level_type + \ fcst_level.zfill(2)+"]\"; " fcst_field += fcst_cat_thresh + " }," # obs_field += "{ name=\"" + v.obs_name+"_" + obs_level.zfill(2) + \ # "\"; level=\"(*,*)\"; " obs_field += obs_cat_thresh + " }," # remove last comma and } to be added back after extra options fcst_field = fcst_field[0:-2] obs_field = obs_field[0:-2] fcst_field += v.fcst_extra + "}" obs_field += v.obs_extra + "}" ob_type = self.p.getstr('config', "OB_TYPE") verif_polys = util.getlist(self.p.getstr('config', "VERIFICATION_POLY")) verif_poly = "[" for vp in verif_polys: verif_poly += "\"" + vp + "\", " verif_poly = os.path.expandvars(verif_poly[0:-2] + "]") self.add_env_var("MODEL", model_type) self.add_env_var("FCST_VAR", v.fcst_name) self.add_env_var("OBS_VAR", v.obs_name) # TODO: Change ACCUM to LEVEL in GridStatConfig_MEAN/PROB and here self.add_env_var("ACCUM", v.fcst_level) self.add_env_var("OBTYPE", ob_type) self.add_env_var("CONFIG_DIR", config_dir) self.add_env_var("FCST_FIELD", fcst_field) self.add_env_var("OBS_FIELD", obs_field) self.add_env_var("MET_VALID_HHMM", valid_time[4:8]) self.add_env_var("VERIF_POLY", verif_poly) cmd = self.get_command() self.logger.debug("") self.logger.debug("ENVIRONMENT FOR NEXT COMMAND: ") self.print_env_item("MODEL") self.print_env_item("FCST_VAR") self.print_env_item("OBS_VAR") self.print_env_item("ACCUM") self.print_env_item("OBTYPE") self.print_env_item("CONFIG_DIR") self.print_env_item("FCST_FIELD") self.print_env_item("OBS_FIELD") self.print_env_item("MET_VALID_HHMM") self.print_env_item("VERIF_POLY") self.logger.debug("") self.logger.debug("COPYABLE ENVIRONMENT FOR NEXT COMMAND: ") self.print_env_copy([ "MODEL", "FCST_VAR", "OBS_VAR", "ACCUM", "OBTYPE", "CONFIG_DIR", "FCST_FIELD", "OBS_FIELD", "MET_VALID_HHMM" ]) self.logger.debug("") cmd = self.get_command() if cmd is None: print("ERROR: grid_stat could not generate command") return self.logger.info("") self.build() self.clear()
def create_c_dict(self): c_dict = super(ModeWrapper, self).create_c_dict() # set to prevent find_obs from getting multiple files within # a time window. Does not refer to time series of files c_dict['ALLOW_MULTIPLE_FILES'] = False c_dict['OUTPUT_DIR'] = self.config.getdir('MTD_OUTPUT_DIR', self.config.getdir('OUTPUT_BASE')) c_dict['CONFIG_FILE'] = self.config.getstr('config', 'MTD_CONFIG', '') c_dict['MIN_VOLUME'] = self.config.getstr('config', 'MTD_MIN_VOLUME', '2000') c_dict['SINGLE_RUN'] = self.config.getbool('config', 'MTD_SINGLE_RUN', False) c_dict['SINGLE_DATA_SRC'] = self.config.getstr('config', 'MTD_SINGLE_DATA_SRC', 'FCST') # only read FCST conf if processing forecast data if not c_dict['SINGLE_RUN'] or c_dict['SINGLE_DATA_SRC'] == 'FCST': c_dict['FCST_IS_PROB'] = self.config.getbool('config', 'FCST_IS_PROB', False) c_dict['FCST_INPUT_DIR'] = \ self.config.getdir('FCST_MTD_INPUT_DIR', c_dict['INPUT_BASE']) c_dict['FCST_INPUT_TEMPLATE'] = \ self.config.getraw('filename_templates', 'FCST_MTD_INPUT_TEMPLATE') c_dict['FCST_INPUT_DATATYPE'] = \ self.config.getstr('config', 'FCST_MTD_INPUT_DATATYPE', '') if self.config.has_option('config', 'FCST_MTD_CONV_RADIUS'): c_dict['FCST_CONV_RADIUS'] = self.config.getstr('config', 'FCST_MTD_CONV_RADIUS') elif self.config.has_option('config', 'MTD_CONV_RADIUS'): c_dict['FCST_CONV_RADIUS'] = self.config.getstr('config', 'MTD_CONV_RADIUS') else: self.logger.error('[config] FCST_MTD_CONV_RADIUS not set in config') exit(1) if self.config.has_option('config', 'FCST_MTD_CONV_THRESH'): c_dict['FCST_CONV_THRESH'] = self.config.getstr('config', 'FCST_MTD_CONV_THRESH') elif self.config.has_option('config', 'MTD_CONV_THRESH'): c_dict['FCST_CONV_THRESH'] = self.config.getstr('config', 'MTD_CONV_THRESH') else: self.logger.error('[config] FCST_MTD_CONV_THRESH not set in config') exit(1) # check that values are valid if not util.validate_thresholds(util.getlist(c_dict['FCST_CONV_THRESH'])): self.logger.error('FCST_MTD_CONV_THRESH items must start with a comparison operator (>,>=,==,!=,<,<=,gt,ge,eq,ne,lt,le)') exit(1) # only read OBS conf if processing observation data if not c_dict['SINGLE_RUN'] or c_dict['SINGLE_DATA_SRC'] == 'OBS': c_dict['OBS_IS_PROB'] = self.config.getbool('config', 'OBS_IS_PROB', False) c_dict['OBS_INPUT_DIR'] = \ self.config.getdir('OBS_MTD_INPUT_DIR', c_dict['INPUT_BASE']) c_dict['OBS_INPUT_TEMPLATE'] = \ self.config.getraw('filename_templates', 'OBS_MTD_INPUT_TEMPLATE') c_dict['OBS_INPUT_DATATYPE'] = \ self.config.getstr('config', 'OBS_MTD_INPUT_DATATYPE', '') if self.config.has_option('config', 'OBS_MTD_CONV_RADIUS'): c_dict['OBS_CONV_RADIUS'] = self.config.getstr('config', 'OBS_MTD_CONV_RADIUS') elif self.config.has_option('config', 'MTD_CONV_RADIUS'): c_dict['OBS_CONV_RADIUS'] = self.config.getstr('config', 'MTD_CONV_RADIUS') else: self.logger.error('[config] OBS_MTD_CONV_RADIUS not set in config') exit(1) if self.config.has_option('config', 'OBS_MTD_CONV_THRESH'): c_dict['OBS_CONV_THRESH'] = self.config.getstr('config', 'OBS_MTD_CONV_THRESH') elif self.config.has_option('config', 'MTD_CONV_THRESH'): c_dict['OBS_CONV_THRESH'] = self.config.getstr('config', 'MTD_CONV_THRESH') else: self.logger.error('[config] OBS_MTD_CONV_THRESH not set in config') exit(1) # check that values are valid if not util.validate_thresholds(util.getlist(c_dict['OBS_CONV_THRESH'])): self.logger.error('OBS_MTD_CONV_THRESH items must start with a comparison operator (>,>=,==,!=,<,<=,gt,ge,eq,ne,lt,le)') exit(1) # handle window variables [FCST/OBS]_[FILE_]_WINDOW_[BEGIN/END] self.handle_window_variables(c_dict, 'mtd') return c_dict
def grid2grid_sfc_plots(self): logging_filename = self.logger.handlers[0].baseFilename self.add_env_var("LOGGING_FILENAME", logging_filename) plotting_scripts_dir = self.p.getdir('PLOTTING_SCRIPTS_DIR') #read config use_init = self.p.getbool('config', 'LOOP_BY_INIT', True) if use_init: start_t = self.p.getstr('config', 'INIT_BEG') end_t = self.p.getstr('config', 'INIT_END') loop_beg_hour = self.p.getint('config', 'INIT_BEG_HOUR') loop_end_hour = self.p.getint('config', 'INIT_END_HOUR') loop_inc = self.p.getint('config', 'INIT_INC') date_filter_method = "Initialization" self.add_env_var("START_T", start_t) self.add_env_var("END_T", end_t) self.add_env_var("DATE_FILTER_METHOD", date_filter_method) else: start_t = self.p.getstr('config', 'VALID_BEG') end_t = self.p.getstr('config', 'VALID_END') loop_beg_hour = self.p.getint('config', 'VALID_BEG_HOUR') loop_end_hour = self.p.getint('config', 'VALID_END_HOUR') loop_inc = self.p.getint('config', 'VALID_INC') date_filter_method = "Valid" self.add_env_var("START_T", start_t) self.add_env_var("END_T", end_t) self.add_env_var("DATE_FILTER_METHOD", date_filter_method) stat_files_input_dir = self.p.getdir('STAT_FILES_INPUT_DIR') plotting_out_dir = self.p.getdir('PLOTTING_OUT_DIR') if os.path.exists(plotting_out_dir): self.logger.info(plotting_out_dir + " exist, removing") util.rmtree(plotting_out_dir) region_list = util.getlist(self.p.getstr('config', 'REGION_LIST')) lead_list = util.getlistint(self.p.getstr('config', 'LEAD_LIST')) model_list = self.p.getstr('config', 'MODEL_LIST') plot_stats_list = self.p.getstr('config', 'PLOT_STATS_LIST') self.add_env_var("STAT_FILES_INPUT_DIR", stat_files_input_dir) self.add_env_var("PLOTTING_OUT_DIR", plotting_out_dir) self.add_env_var("MODEL_LIST", model_list) self.add_env_var("PLOT_STATS_LIST", plot_stats_list) var_list = util.parse_var_list(self.p) loop_hour = loop_beg_hour while loop_hour <= loop_end_hour: loop_hour_str = str(loop_hour).zfill(2) self.add_env_var('CYCLE', loop_hour_str) for var_info in var_list: fcst_var_name = var_info.fcst_name fcst_var_level = var_info.fcst_level #fcst_var_extra = var_info.fcst_extra.replace(" = ", "").rstrip(";") obs_var_name = var_info.obs_name obs_var_level = var_info.obs_level #obs_var_extra = var_info.obs_extra.replace(" = ", "").rstrip(";") self.add_env_var('FCST_VAR_NAME', fcst_var_name) self.add_env_var('FCST_VAR_LEVEL', fcst_var_level) self.add_env_var('OBS_VAR_NAME', obs_var_name) self.add_env_var('OBS_VAR_LEVEL', obs_var_level) for region in region_list: self.add_env_var('REGION', region) for lead in lead_list: if lead < 10: lead_string = '0' + str(lead) else: lead_string = str(lead) self.add_env_var('LEAD', lead_string) py_cmd = os.path.join("python") + " " + os.path.join( plotting_scripts_dir, "plot_grid2grid_sfc_ts.py") process = subprocess.Popen(py_cmd, env=self.env, shell=True) process.wait() print("") self.add_env_var("LEAD_LIST", self.p.getstr('config', 'LEAD_LIST')) py_cmd = os.path.join("python") + " " + os.path.join( plotting_scripts_dir, "plot_grid2grid_sfc_tsmean.py") process = subprocess.Popen(py_cmd, env=self.env, shell=True) process.wait() print("") loop_hour += loop_inc
def grid2grid_pres_plots(self): logging_filename = self.logger.handlers[0].baseFilename self.add_env_var("LOGGING_FILENAME", logging_filename) plotting_scripts_dir = self.p.getdir('PLOTTING_SCRIPTS_DIR') #read config use_init = self.p.getbool('config', 'LOOP_BY_INIT', True) if use_init: start_t = self.p.getstr('config', 'INIT_BEG') end_t = self.p.getstr('config', 'INIT_END') loop_beg_hour = self.p.getint('config', 'INIT_BEG_HOUR') loop_end_hour = self.p.getint('config', 'INIT_END_HOUR') loop_inc = self.p.getint('config', 'INIT_INC') date_filter_method = "Initialization" self.add_env_var("START_T", start_t) self.add_env_var("END_T", end_t) self.add_env_var("DATE_FILTER_METHOD", date_filter_method) else: start_t = self.p.getstr('config', 'VALID_BEG') end_t = self.p.getstr('config', 'VALID_END') loop_beg_hour = self.p.getint('config', 'VALID_BEG_HOUR') loop_end_hour = self.p.getint('config', 'VALID_END_HOUR') loop_inc = self.p.getint('config', 'VALID_INC') date_filter_method = "Valid" self.add_env_var("START_T", start_t) self.add_env_var("END_T", end_t) self.add_env_var("DATE_FILTER_METHOD", date_filter_method) stat_files_input_dir = self.p.getdir('STAT_FILES_INPUT_DIR') plotting_out_dir = self.p.getdir('PLOTTING_OUT_DIR') if os.path.exists(plotting_out_dir): self.logger.info(plotting_out_dir + " exist, removing") util.rmtree(plotting_out_dir) region_list = util.getlist(self.p.getstr('config', 'REGION_LIST')) lead_list = util.getlistint(self.p.getstr('config', 'LEAD_LIST')) model_list = self.p.getstr('config', 'MODEL_LIST') plot_stats_list = self.p.getstr('config', 'PLOT_STATS_LIST') self.add_env_var("STAT_FILES_INPUT_DIR", stat_files_input_dir) self.add_env_var("PLOTTING_OUT_DIR", plotting_out_dir) self.add_env_var("MODEL_LIST", model_list) self.add_env_var("PLOT_STATS_LIST", plot_stats_list) #need to grab var info in special way that differs from util.parse_var_list #need variables with cooresponding list of levels; logic derived from util.parse_var_list var_info_list = [] # find all FCST_VARn_NAME keys in the conf files all_conf = self.p.keys('config') fcst_indices = [] regex = re.compile("FCST_VAR(\d+)_NAME") for conf in all_conf: result = regex.match(conf) if result is not None: fcst_indices.append(result.group(1)) # loop over all possible variables and add them to list for n in fcst_indices: # get fcst var info if available if self.p.has_option('config', "FCST_VAR" + n + "_NAME"): fcst_name = self.p.getstr('config', "FCST_VAR" + n + "_NAME") fcst_extra = "" if self.p.has_option('config', "FCST_VAR" + n + "_OPTIONS"): fcst_extra = self.p.getraw('config', "FCST_VAR" + n + "_OPTIONS") fcst_levels = util.getlist( self.p.getstr('config', "FCST_VAR" + n + "_LEVELS")) # if OBS_VARn_X does not exist, use FCST_VARn_X if self.p.has_option('config', "OBS_VAR" + n + "_NAME"): obs_name = self.p.getstr('config', "OBS_VAR" + n + "_NAME") else: obs_name = fcst_name obs_extra = "" if self.p.has_option('config', "OBS_VAR" + n + "_OPTIONS"): obs_extra = self.p.getraw('config', "OBS_VAR" + n + "_OPTIONS") ##else: ## obs_extra = fcst_extra ##fcst_levels = util.getlist(self.p.getstr('config', "FCST_VAR"+n+"_LEVELS")) if self.p.has_option('config', "OBS_VAR" + n + "_LEVELS"): obs_levels = util.getlist( self.p.getstr('config', "FCST_VAR" + n + "_LEVELS")) else: obs_levels = fcst_levels if len(fcst_levels) != len(obs_levels): print("ERROR: FCST_VAR"+n+"_LEVELS and OBS_VAR"+n+\ "_LEVELS do not have the same number of elements") exit(1) fo = util.FieldObj() fo.fcst_name = fcst_name fo.obs_name = obs_name fo.fcst_extra = fcst_extra fo.obs_extra = obs_extra fo.fcst_level = fcst_levels fo.obs_level = obs_levels var_info_list.append(fo) loop_hour = loop_beg_hour while loop_hour <= loop_end_hour: loop_hour_str = str(loop_hour).zfill(2) self.add_env_var('CYCLE', loop_hour_str) for v in var_info_list: fcst_var_levels_list = v.fcst_level self.add_env_var('FCST_VAR_NAME', v.fcst_name) #self.add_env_var('FCST_VAR_EXTRA', v.fcst_extra) self.add_env_var( 'FCST_VAR_LEVELS_LIST', ''.join(fcst_var_levels_list).replace( "P", " P").lstrip().replace(" P", ", P")) obs_var_levels_list = v.obs_level self.add_env_var('OBS_VAR_NAME', v.obs_name) #self.add_env_var('OBS_VAR_EXTRA', v.obs_extra) self.add_env_var( 'OBS_VAR_LEVELS_LIST', ''.join(obs_var_levels_list).replace( "P", " P").lstrip().replace(" P", ", P")) for region in region_list: self.add_env_var('REGION', region) for lead in lead_list: if lead < 10: lead_string = '0' + str(lead) else: lead_string = str(lead) self.add_env_var('LEAD', lead_string) for vl in range(len(fcst_var_levels_list)): self.add_env_var('FCST_VAR_LEVEL', fcst_var_levels_list[vl]) self.add_env_var('OBS_VAR_LEVEL', obs_var_levels_list[vl]) py_cmd = os.path.join( "python") + " " + os.path.join( plotting_scripts_dir, "plot_grid2grid_pres_ts.py") process = subprocess.Popen(py_cmd, env=self.env, shell=True) process.wait() print("") ####py_cmd = os.path.join("python3")+" "+os.path.join(plotting_scripts_dir, "plot_grid2grid_pres_tp.py") #add python3 at top of script py_cmd = os.path.join("python") + " " + os.path.join( plotting_scripts_dir, "plot_grid2grid_pres_tp.py") process = subprocess.Popen(py_cmd, env=self.env, shell=True) process.wait() print("") self.add_env_var("LEAD_LIST", self.p.getstr('config', 'LEAD_LIST')) py_cmd = os.path.join("python") + " " + os.path.join( plotting_scripts_dir, "plot_grid2grid_pres_tsmean.py") process = subprocess.Popen(py_cmd, env=self.env, shell=True) process.wait() print("") ####py_cmd = os.path.join("python3")+" "+os.path.join(plotting_scripts_dir, "plot_grid2grid_pres_tpmean.py") #add python3 at top of script py_cmd = os.path.join("python") + " " + os.path.join( plotting_scripts_dir, "plot_grid2grid_pres_tpmean.py") process = subprocess.Popen(py_cmd, env=self.env, shell=True) process.wait() print("") loop_hour += loop_inc
def create_c_dict(self): c_dict = super(ModeWrapper, self).create_c_dict() c_dict['CONFIG_FILE'] = self.config.getstr('config', 'MODE_CONFIG') c_dict['OBS_INPUT_DIR'] = \ self.config.getdir('OBS_MODE_INPUT_DIR') c_dict['OBS_INPUT_TEMPLATE'] = \ self.config.getraw('filename_templates', 'OBS_MODE_INPUT_TEMPLATE') c_dict['OBS_INPUT_DATATYPE'] = \ self.config.getstr('config', 'OBS_MODE_INPUT_DATATYPE', '') c_dict['FCST_INPUT_DIR'] = \ self.config.getdir('FCST_MODE_INPUT_DIR') c_dict['FCST_INPUT_TEMPLATE'] = \ self.config.getraw('filename_templates', 'FCST_MODE_INPUT_TEMPLATE') c_dict['FCST_INPUT_DATATYPE'] = \ self.config.getstr('config', 'FCST_MODE_INPUT_DATATYPE', '') c_dict['OUTPUT_DIR'] = self.config.getdir('MODE_OUTPUT_DIR') c_dict['ONCE_PER_FIELD'] = True c_dict['QUILT'] = self.config.getbool('config', 'MODE_QUILT', False) fcst_conv_radius, obs_conv_radius = \ self.handle_fcst_and_obs_field('MODE_CONV_RADIUS', 'FCST_MODE_CONV_RADIUS', 'OBS_MODE_CONV_RADIUS') c_dict['FCST_CONV_RADIUS'] = fcst_conv_radius c_dict['OBS_CONV_RADIUS'] = obs_conv_radius fcst_conv_thresh, obs_conv_thresh = self.handle_fcst_and_obs_field( 'MODE_CONV_THRESH', 'FCST_MODE_CONV_THRESH', 'OBS_MODE_CONV_THRESH') c_dict['FCST_CONV_THRESH'] = fcst_conv_thresh c_dict['OBS_CONV_THRESH'] = obs_conv_thresh fcst_merge_thresh, obs_merge_thresh = \ self.handle_fcst_and_obs_field('MODE_MERGE_THRESH', 'FCST_MODE_MERGE_THRESH', 'OBS_MODE_MERGE_THRESH') c_dict['FCST_MERGE_THRESH'] = fcst_merge_thresh c_dict['OBS_MERGE_THRESH'] = obs_merge_thresh fcst_merge_flag, obs_merge_flag = \ self.handle_fcst_and_obs_field('MODE_MERGE_FLAG', 'FCST_MODE_MERGE_FLAG', 'OBS_MODE_MERGE_FLAG') c_dict['FCST_MERGE_FLAG'] = fcst_merge_flag c_dict['OBS_MERGE_FLAG'] = obs_merge_flag c_dict['ALLOW_MULTIPLE_FILES'] = False c_dict['MERGE_CONFIG_FILE'] = self.config.getstr( 'config', 'MODE_MERGE_CONFIG_FILE', '') # handle window variables [FCST/OBS]_[FILE_]_WINDOW_[BEGIN/END] self.handle_window_variables(c_dict, 'mode') c_dict['VERIFICATION_MASK_TEMPLATE'] = \ self.config.getraw('filename_templates', 'MODE_VERIFICATION_MASK_TEMPLATE') c_dict['VERIFICATION_MASK'] = '' # check that values are valid error_message = 'items must start with a comparison operator '+\ '(>,>=,==,!=,<,<=,gt,ge,eq,ne,lt,le)' if not util.validate_thresholds( util.getlist(c_dict['FCST_CONV_THRESH'])): self.logger.error('MODE_FCST_CONV_THRESH {}'.format(error_message)) exit(1) if not util.validate_thresholds(util.getlist( c_dict['OBS_CONV_THRESH'])): self.logger.error('MODE_OBS_CONV_THRESH {}'.format(error_message)) exit(1) if not util.validate_thresholds( util.getlist(c_dict['FCST_MERGE_THRESH'])): self.logger.error( 'MODE_FCST_MERGE_THRESH {}'.format(error_message)) exit(1) if not util.validate_thresholds( util.getlist(c_dict['OBS_MERGE_THRESH'])): self.logger.error('MODE_OBS_MERGE_THRESH {}'.format(error_message)) exit(1) return c_dict
def create_c_dict(self): """! Create a dictionary that holds all the values set in the METplus config file for the point-stat wrapper. Returns: c_dict - A dictionary containing the key-value pairs set in the METplus configuration file. """ c_dict = super(PointStatWrapper, self).create_c_dict() c_dict['ALLOW_MULTIPLE_FILES'] = True c_dict['OFFSETS'] = util.getlistint( self.config.getstr('config', 'POINT_STAT_OFFSETS', '0')) c_dict['FCST_INPUT_TEMPLATE'] = \ self.config.getraw('filename_templates', 'FCST_POINT_STAT_INPUT_TEMPLATE') c_dict['OBS_INPUT_TEMPLATE'] = \ self.config.getraw('filename_templates', 'OBS_POINT_STAT_INPUT_TEMPLATE') c_dict['FCST_INPUT_DATATYPE'] = \ self.config.getstr('config', 'FCST_POINT_STAT_INPUT_DATATYPE', '') c_dict['OBS_INPUT_DATATYPE'] = \ self.config.getstr('config', 'OBS_POINT_STAT_INPUT_DATATYPE', '') c_dict['FCST_INPUT_DIR'] = self.config.getdir( 'FCST_POINT_STAT_INPUT_DIR') c_dict['OBS_INPUT_DIR'] = self.config.getdir( 'OBS_POINT_STAT_INPUT_DIR') c_dict['OUTPUT_DIR'] = \ self.config.getdir('POINT_STAT_OUTPUT_DIR') # Configuration c_dict['CONFIG_FILE'] = \ self.config.getstr('config', 'POINT_STAT_CONFIG_FILE') c_dict['MODEL'] = self.config.getstr('config', 'MODEL') c_dict['POINT_STAT_CONFIG_FILE'] = \ self.config.getstr('config', 'POINT_STAT_CONFIG_FILE') regrid = self.config.getstr('config', 'POINT_STAT_REGRID_TO_GRID') # if not surrounded by quotes and not NONE, add quotes if regrid[0] != '"' and regrid != 'NONE': regrid = '"' + regrid + '"' c_dict['REGRID_TO_GRID'] = regrid c_dict['POINT_STAT_GRID'] = self.config.getstr('config', 'POINT_STAT_GRID') c_dict['POINT_STAT_POLY'] = util.getlist( self.config.getstr('config', 'POINT_STAT_POLY', '')) c_dict['POINT_STAT_STATION_ID'] = util.getlist( self.config.getstr('config', 'POINT_STAT_STATION_ID', '')) c_dict['POINT_STAT_MESSAGE_TYPE'] = util.getlist( self.config.getstr('config', 'POINT_STAT_MESSAGE_TYPE', '')) # handle window variables [FCST/OBS]_[FILE_]_WINDOW_[BEGIN/END] self.handle_window_variables(c_dict, 'point_stat') c_dict['NEIGHBORHOOD_WIDTH'] = self.config.getstr( 'config', 'POINT_STAT_NEIGHBORHOOD_WIDTH', '') c_dict['NEIGHBORHOOD_SHAPE'] = self.config.getstr( 'config', 'POINT_STAT_NEIGHBORHOOD_SHAPE', '') c_dict['VERIFICATION_MASK_TEMPLATE'] = \ self.config.getraw('filename_templates', 'POINT_STAT_VERIFICATION_MASK_TEMPLATE') c_dict['VERIFICATION_MASK'] = '' return c_dict