def test_crow_variable_hour(): # Test that do_string_sub() correctly creates the valid hour # without any zero-padding when given the following as input: # pgbf{lead?fmt=%H}.gfs.{valid?fmt=%Y%M%D%H} # pgbf([0-9]{1,3}).gfs.(2[0-9]{9}) # crow input files with 3, 2, and 1-digit lead times: crow_input_file_3 = 'pgbf219.gfs.2017060418' crow_input_file_2 = 'pgbf18.gfs.2017062000' crow_input_file_1 = 'pgbf3.gfs.2017060418' lead_1 = int('3') * 3600 lead_2 = int('18') * 3600 lead_3 = int('219') * 3600 valid_2 = datetime.datetime.strptime('2017062000', '%Y%m%d%H') valid_1 = valid_3 = datetime.datetime.strptime('2017060418', '%Y%m%d%H') templ = 'pgbf{lead?fmt=%1H}.gfs.{valid?fmt=%Y%m%d%H}' crow_1_output = do_string_sub(templ, valid=valid_1, lead=lead_1) crow_2_output = do_string_sub(templ, valid=valid_2, lead=lead_2) crow_3_output = do_string_sub(templ, valid=valid_3, lead=lead_3) # print("crow_1 output: ", crow_1_output) # print("crow_2 output: ", crow_2_output) # print("crow_3 output: ", crow_3_output) assert (crow_1_output == crow_input_file_1 and crow_2_output == crow_input_file_2 and crow_3_output == crow_input_file_3)
def test_shift_time_lead_negative(): init_string = datetime.datetime.strptime("2019020700", '%Y%m%d%H') lead_string = int("60") * 3600 templ = "dwd_{init?fmt=%Y%m%d%H}_{lead?fmt=%.3H?shift=-86400}_{lead?fmt=%.3H}" expected_filename = "dwd_2019020700_036_060" filename = do_string_sub(templ, init=init_string, lead=lead_string) assert (filename == expected_filename)
def test_multiple_valid_substitution_init_complex(): init_string = datetime.datetime.strptime("2016061018", '%Y%m%d%H') lead_string = int("6") * 3600 templ = "ncar.ral.CoSPA.HRRR.{init?fmt=%Y-%m-%dT%H:%M:%S}.PT{lead?fmt=%.2H}:00.nc" expected_filename = "ncar.ral.CoSPA.HRRR.2016-06-10T18:00:00.PT06:00.nc" filename = do_string_sub(templ, init=init_string, lead=lead_string) assert (filename == expected_filename)
def test_multiple_valid_substitution_valid(): valid_string = datetime.datetime.strptime("2018020112", '%Y%m%d%H') lead_string = int("123") * 3600 templ = "{valid?fmt=%Y%m%d%H}/gfs.t{valid?fmt=%H}.pgrb2.0p25.{lead?fmt=%HHH}" expected_filename = "2018020112/gfs.t12.pgrb2.0p25.123" filename = do_string_sub(templ, valid=valid_string, lead=lead_string) assert (filename == expected_filename)
def test_find_input_files(metplus_config, offsets, offset_to_find): pb = pb2nc_wrapper(metplus_config) # for valid 20190201_12, offsets 3 and 5, create files to find # in the fake input directory based on input template input_dict = {'valid': datetime.datetime(2019, 2, 1, 12)} fake_input_dir = os.path.join(pb.config.getdir('OUTPUT_BASE'), 'pbin') if not os.path.exists(fake_input_dir): os.makedirs(fake_input_dir) pb.c_dict['OBS_INPUT_DIR'] = fake_input_dir for offset in [3, 5]: input_dict['offset'] = int(offset * 3600) time_info = time_util.ti_calculate(input_dict) create_file = do_string_sub(pb.c_dict['OBS_INPUT_TEMPLATE'], **time_info) create_fullpath = os.path.join(fake_input_dir, create_file) open(create_fullpath, 'a').close() # unset offset in time dictionary so it will be computed del input_dict['offset'] # set offset list pb.c_dict['OFFSETS'] = offsets # look for input files based on offset list result = pb.find_input_files(input_dict) # check if correct offset file was found, if None expected, check against None if offset_to_find is None: assert result is None else: assert result['offset_hours'] == offset_to_find
def test_cycle_hour(): cycle_string = 0 valid_string = datetime.datetime.strptime("20180103", '%Y%m%d') templ = "prefix.{valid?fmt=%Y%m%d}.tm{cycle?fmt=%2H}" expected_filename = "prefix.20180103.tm00" filename = do_string_sub(templ, valid=valid_string, cycle=cycle_string) assert (filename == expected_filename)
def test_multiple_valid_substitution_init(): init_string = datetime.datetime.strptime("2017060400", '%Y%m%d%H') lead_string = 0 templ = "{init?fmt=%Y%m%d%H}/gfs.t{init?fmt=%H}z.pgrb2.0p25.f{lead?fmt=%.2H}" expected_filename = "2017060400/gfs.t00z.pgrb2.0p25.f00" filename = do_string_sub(templ, init=init_string, lead=lead_string) assert (filename == expected_filename)
def test_h_lead_no_pad_3_digit_sub(): file_template = "{init?fmt=%Y%m%d%H}_A{lead?fmt=%1H}h" init_time = datetime.datetime.strptime("1987020103", '%Y%m%d%H') lead_time = int("102") * 3600 out_string = do_string_sub(file_template, init=init_time, lead=lead_time) assert(out_string == "1987020103_A102h")
def test_do_string_sub_recurse(templ, expected_filename): init_string = datetime.datetime.strptime("2017060400", '%Y%m%d%H') lead_string = int("6") * 3600 filename = do_string_sub(templ, init=init_string, lead=lead_string, skip_missing_tags=True, recurse=True) assert(filename == expected_filename)
def test_do_string_sub_no_recurse_no_missing(templ, expected_filename): date_dt = datetime.datetime.strptime("2017060400", '%Y%m%d%H') basin_regex = "([a-zA-Z]{2})" cyclone_regex = "([0-9]{2,4})" filename = do_string_sub(templ, date=date_dt, basin=basin_regex, cyclone=cyclone_regex) assert(filename == expected_filename)
def test_ccpa_template(): passed = True valid_string = datetime.datetime.strptime("2019022403", '%Y%m%d%H') lead_string = 10800 templ = "ccpa.{valid?fmt=%Y%m%d}/06/ccpa.t{valid?fmt=%H}z.{lead?fmt=%.2H}h.hrap.conus.gb2" expected_filename = "ccpa.20190224/06/ccpa.t03z.03h.hrap.conus.gb2" filename = do_string_sub(templ, valid=valid_string, lead=lead_string) if filename != expected_filename: passed = False valid_string = datetime.datetime.strptime("2019022406", '%Y%m%d%H') lead_string = int("6") * 3600 expected_filename = "ccpa.20190224/06/ccpa.t06z.06h.hrap.conus.gb2" filename = do_string_sub(templ, valid=valid_string, lead=lead_string) if filename == expected_filename: passed = False return passed
def test_do_string_sub_skip_missing_tags(): init_string = datetime.datetime.strptime("2017060400", '%Y%m%d%H') lead_string = int("6") * 3600 templ = "{init?fmt=%Y%m%d%H}_{missing_tag?fmt=%H}_f{lead?fmt=%2H}" expected_filename = "2017060400_{missing_tag?fmt=%H}_f06" filename = do_string_sub(templ, init=init_string, lead=lead_string, skip_missing_tags=True) assert (filename == expected_filename)
def test_gdas_substitution(): # Test that the string template substitution works correctly for GDAS # prepbufr files, which do not make use of the cycle hour or the offset # to generate the valid time. valid_string = "2018010411" valid_obj = datetime.datetime.strptime(valid_string, '%Y%m%d%H') templ = "prepbufr.gdas.{valid?fmt=%Y%m%d%H}.nc" expected_filename = 'prepbufr.gdas.' + valid_string + '.nc' filename = do_string_sub(templ, valid=valid_obj) assert (filename == expected_filename)
def test_ym_date_dir(): # Test that the ym directory can be read in and does substitution correctly # e.g. /d1/METplus_TC/adeck_orig/201708/atcfunix.gfs.2017080100 date_str = '201708' templ = '/d1/METplus_TC/adeck_orig/{date?fmt=%s}/' \ 'atcfunix.gfs.2017080100.dat' filename = do_string_sub(templ, date=date_str) expected_filename = '/d1/METplus_TC/adeck_orig/201708/' \ 'atcfunix.gfs.2017080100.dat' assert filename == expected_filename
def test_ymd_date_dir(): # Test that the ymd directory can be read in and does substitution correctly # e.g. /d1/METplus_TC/adeck_orig/20170811/atcfunix.gfs.2017080100 init_str = datetime.datetime.strptime('2017081118', '%Y%m%d%H') date_str = '20170811' templ = '/d1/METplus_TC/adeck_orig/{date?fmt=%s}/atcfunix.gfs.' \ '{init?fmt=%Y%m%d%H}.dat' filename = do_string_sub(templ, date=date_str, init=init_str) expected_filename = '/d1/METplus_TC/adeck_orig/20170811/' \ 'atcfunix.gfs.2017081118.dat' assert filename == expected_filename
def test_ymd_region_cyclone(): # Test that we can recreate the full file path with a date, # region, and cyclone # /d1/METplus_TC/bdeck_orig/20170811/bal052017.dat date_str = '201708' region_str = 'al' cyclone_str = '05' year_str = '2017' templ = '/d1/METplus_TC/bdeck/{date?fmt=%s}/b{region?fmt=%s}' \ '{cyclone?fmt=%s}{misc?fmt=%s}.dat' full_file = do_string_sub(templ, date=date_str, region=region_str, cyclone=cyclone_str, misc=year_str) expected_full_file = '/d1/METplus_TC/bdeck/201708/bal052017.dat' assert full_file == expected_full_file
# +'_dump_row.stat' # ) # model_stat_file = os.path.join(input_base_dir, # model_stat_filename) model_stat_template = dump_row_filename_template string_sub_dict = { 'model': model_name, 'model_reference': model_plot_name, 'obtype': model_obtype, 'fcst_lead': fcst_lead, 'fcst_level': fcst_var_level, 'obs_level': obs_var_level, 'fcst_thresh': fcst_var_thresh, 'obs_thresh': obs_var_thresh, } model_stat_file = do_string_sub(model_stat_template, **string_sub_dict) if os.path.exists(model_stat_file): nrow = sum(1 for line in open(model_stat_file)) if nrow == 0: logger.warning("Model "+str(model_num)+" "+model_name+" " +"with plot name "+model_plot_name+" " +"file: "+model_stat_file+" empty") model_level_now_data = ( pd.DataFrame(np.nan, index=model_level_now_data_index, columns=[ 'TOTAL' ]) ) else: logger.debug("Model "+str(model_num)+" "+model_name+" " +"with plot name "+model_plot_name+" " +"file: "+model_stat_file+" exists")
def test_shift_time_negative(): init_string = datetime.datetime.strptime("2017060400", '%Y%m%d%H') templ = "{init?fmt=%Y%m%d%H?shift=-86400}" expected_filename = "2017060300" filename = do_string_sub(templ, init=init_string) assert (filename == expected_filename)
def test_offset_hour(): expected_hour = "03" offset = 10800 templ = "{offset?fmt=%2H}" offset_hour = do_string_sub(templ, offset=offset) assert (offset_hour == expected_hour)
def test_underscore_in_time_fmt(): valid_string = datetime.datetime.strptime("20170604010203", '%Y%m%d%H%M%S') templ = "{valid?fmt=%Y%m%d_%H%M%S}" expected_filename = "20170604_010203" filename = do_string_sub(templ, valid=valid_string) assert (filename == expected_filename)