def test_create_grid2obs_regex_all():
    pytest.skip('deprecated function')
    # Test that the regex created from the template that has valid
    # cycle, lead and offset is correct (expected).
    logger = logging.getLogger("test")
    templ = \
        '/path/to/nam.{init?fmt=%Y%m%d}/rap.t{cycle?fmt=%HH}z.awphys{lead?fmt=%H}.' \
        'tm{offset?fmt=%H}.grib2'

    # variables to set for StringSub
    init_str = "20180815"
    cycle_str = '00'
    lead_str = '60'
    offset_str = '00'

    ss = StringSub(logger,
                   templ,
                   init=init_str,
                   cycle=cycle_str,
                   lead=lead_str,
                   offset=offset_str)
    actual_regex = ss.create_grid2obs_regex()
    expected_regex = '/path/to/nam.([0-9]{8})/rap.t([0-9]{2,3})z.' \
                     'awphys([0-9]{1,3}).tm([0-9]{2,3}).grib2$'
    assert actual_regex == expected_regex
def test_offset_hour():
    logger = logging.getLogger("dummy")
    expected_hour = "03"
    offset = 10800
    templ = "{offset?fmt=%2H}"
    ss = StringSub(logger, templ, offset=offset)
    offset_hour = ss.doStringSub()
    assert (offset_hour == expected_hour)
def test_h_lead_pad_2_digit_sub():
    logger = logging.getLogger("test")
    file_template = "{init?fmt=%Y%m%d%H}_A{lead?fmt=%.3H}h"
    init_time = datetime.datetime.strptime("1987020103", '%Y%m%d%H')
    lead_time = int("3") * 3600
    fSts = StringSub(logger, file_template, init=init_time, lead=lead_time)
    out_string = fSts.doStringSub()
    assert (out_string == "1987020103_A003h")
def test_shift_time_negative():
    init_string = datetime.datetime.strptime("2017060400", '%Y%m%d%H')
    logger = logging.getLogger("testing")
    templ = "{init?fmt=%Y%m%d%H?shift=-86400}"
    expected_filename = "2017060300"
    ss = StringSub(logger, templ, init=init_string)
    filename = ss.doStringSub()
    assert (filename == expected_filename)
def test_shift_time_lead_negative():
    init_string = datetime.datetime.strptime("2019020700", '%Y%m%d%H')
    lead_string = int("60") * 3600
    logger = logging.getLogger("testing")
    templ = "dwd_{init?fmt=%Y%m%d%H}_{lead?fmt=%.3H?shift=-86400}_{lead?fmt=%.3H}"
    expected_filename = "dwd_2019020700_036_060"
    ss = StringSub(logger, templ, init=init_string, lead=lead_string)
    filename = ss.doStringSub()
    assert (filename == expected_filename)
def test_multiple_valid_substitution_valid():
    valid_string = datetime.datetime.strptime("2018020112", '%Y%m%d%H')
    lead_string = int("123") * 3600
    logger = logging.getLogger("testing")
    templ = "{valid?fmt=%Y%m%d%H}/gfs.t{valid?fmt=%H}.pgrb2.0p25.{lead?fmt=%HHH}"
    expected_filename = "2018020112/gfs.t12.pgrb2.0p25.123"
    ss = StringSub(logger, templ, valid=valid_string, lead=lead_string)
    filename = ss.doStringSub()
    assert (filename == expected_filename)
def test_multiple_valid_substitution_init():
    init_string = datetime.datetime.strptime("2017060400", '%Y%m%d%H')
    lead_string = 0
    logger = logging.getLogger("testing")
    templ = "{init?fmt=%Y%m%d%H}/gfs.t{init?fmt=%H}z.pgrb2.0p25.f{lead?fmt=%.2H}"
    expected_filename = "2017060400/gfs.t00z.pgrb2.0p25.f00"
    ss = StringSub(logger, templ, init=init_string, lead=lead_string)
    filename = ss.doStringSub()
    assert (filename == expected_filename)
def test_multiple_valid_substitution_init_complex():
    init_string = datetime.datetime.strptime("2016061018", '%Y%m%d%H')
    lead_string = int("6") * 3600
    logger = logging.getLogger("testing")
    templ = "ncar.ral.CoSPA.HRRR.{init?fmt=%Y-%m-%dT%H:%M:%S}.PT{lead?fmt=%.2H}:00.nc"
    expected_filename = "ncar.ral.CoSPA.HRRR.2016-06-10T18:00:00.PT06:00.nc"
    ss = StringSub(logger, templ, init=init_string, lead=lead_string)
    filename = ss.doStringSub()
    assert (filename == expected_filename)
def test_cycle_hour():
    cycle_string = 0
    valid_string = datetime.datetime.strptime("20180103", '%Y%m%d')
    logger = logging.getLogger("dummy")
    templ = "prefix.{valid?fmt=%Y%m%d}.tm{cycle?fmt=%2H}"
    expected_filename = "prefix.20180103.tm00"
    ss = StringSub(logger, templ, valid=valid_string, cycle=cycle_string)
    filename = ss.doStringSub()
    assert (filename == expected_filename)
def test_crow_variable_hour():
    # Test that StringSub's doStringSub() correctly creates the valid hour
    # without any zero-padding when given the following as input:
    # pgbf{lead?fmt=%H}.gfs.{valid?fmt=%Y%M%D%H}
    # pgbf([0-9]{1,3}).gfs.(2[0-9]{9})
    logger = logging.getLogger("crow_data")

    # crow input files with 3, 2, and 1-digit lead times:
    crow_input_file_3 = 'pgbf219.gfs.2017060418'
    crow_input_file_2 = 'pgbf18.gfs.2017062000'
    crow_input_file_1 = 'pgbf3.gfs.2017060418'
    lead_1 = int('3') * 3600
    lead_2 = int('18') * 3600
    lead_3 = int('219') * 3600
    valid_2 = datetime.datetime.strptime('2017062000', '%Y%m%d%H')
    valid_1 = valid_3 = datetime.datetime.strptime('2017060418', '%Y%m%d%H')
    templ = 'pgbf{lead?fmt=%H}.gfs.{valid?fmt=%Y%m%d%H}'
    ss_1 = StringSub(logger, templ, valid=valid_1, lead=lead_1)
    ss_2 = StringSub(logger, templ, valid=valid_2, lead=lead_2)
    ss_3 = StringSub(logger, templ, valid=valid_3, lead=lead_3)
    crow_1_output = ss_1.doStringSub()
    crow_2_output = ss_2.doStringSub()
    crow_3_output = ss_3.doStringSub()
    # print("crow_1 output: ", crow_1_output)
    # print("crow_2 output: ", crow_2_output)
    # print("crow_3 output: ", crow_3_output)
    assert (crow_1_output == crow_input_file_1
            and crow_2_output == crow_input_file_2
            and crow_3_output == crow_input_file_3)
def test_ym_date_dir():
    # Test that the ym directory can be read in and does substitution correctly
    logger = logging.getLogger("test")
    # e.g. /d1/METplus_TC/adeck_orig/201708/atcfunix.gfs.2017080100
    date_str = '201708'
    templ = '/d1/METplus_TC/adeck_orig/{date?fmt=%s}/' \
            'atcfunix.gfs.2017080100.dat'
    ss = StringSub(logger, templ, date=date_str)
    filename = ss.doStringSub()
    expected_filename = '/d1/METplus_TC/adeck_orig/201708/' \
                        'atcfunix.gfs.2017080100.dat'
    assert filename == expected_filename
def test_gdas_substitution():
    # Test that the string template substitution works correctly for GDAS
    # prepbufr files, which do not make use of the cycle hour or the offset
    # to generate the valid time.
    valid_string = "2018010411"
    valid_obj = datetime.datetime.strptime(valid_string, '%Y%m%d%H')
    logger = logging.getLogger("testing")
    templ = "prepbufr.gdas.{valid?fmt=%Y%m%d%H}.nc"
    expected_filename = 'prepbufr.gdas.' + valid_string + '.nc'
    ss = StringSub(logger, templ, valid=valid_obj)
    filename = ss.doStringSub()
    assert (filename == expected_filename)
예제 #13
0
    def generate_output_nc_filename(self, prepbufr_file_info):
        """! Create the output netCDF filename as specified in the use
        case/custom configuration file.
             Args:
                 @param prepbufr_file_info - a list of the full filepaths of
                                             prepbufr data of interest.
             Returns:
                 a tuple:
                 nc_output_filepath - the full filepath
                 nc_output_filename - the filename follows the format
                                      specified in the configuration file
        """
        # pylint:disable=protected-access
        # Need to call sys.__getframe() to get the filename and method/func
        # for logging information.
        cur_filename = sys._getframe().f_code.co_filename
        cur_function = sys._getframe().f_code.co_name

        self.logger.debug('DEBUG:|' + cur_function + '|' + cur_filename +
                          ' Generating output NetCDF file name...')

        # Get the output directory
        pb2nc_output_dir = self.pb_dict['PB2NC_OUTPUT_DIR']

        # Get the cycle hour and offset hour from the prepbufr file info named
        # tuple
        if prepbufr_file_info.cycle:
            # Get the cycle hour, offset hour and add the appropriate
            # prefix, validation ymd and .nc extension
            cycle = prepbufr_file_info.cycle
            offset = prepbufr_file_info.offset
            date = prepbufr_file_info.date

            string_sub = StringSub(self.logger,
                                   self.pb_dict['NC_FILE_TMPL'],
                                   init=str(date),
                                   cycle=cycle,
                                   offset=offset)
            nc_output_filename = string_sub.doStringSub()
            nc_output_filepath = os.path.join(pb2nc_output_dir,
                                              nc_output_filename)

        else:
            # Typically for files that aren't separated into dated
            # subdirectories, the date is incorporated in the filename.
            # Append the input file name with .nc extension
            # extract the filename portion of the full_filepath
            filename = os.path.basename(prepbufr_file_info.full_filepath)
            nc_output_filename = filename + ".nc"
            nc_output_filepath = os.path.join(pb2nc_output_dir,
                                              nc_output_filename)
        return nc_output_filepath
def test_ymd_date_dir():
    # Test that the ymd directory can be read in and does substitution correctly
    logger = logging.getLogger("test")
    # e.g. /d1/METplus_TC/adeck_orig/20170811/atcfunix.gfs.2017080100
    init_str = datetime.datetime.strptime('2017081118', '%Y%m%d%H')
    date_str = '20170811'
    templ = '/d1/METplus_TC/adeck_orig/{date?fmt=%s}/atcfunix.gfs.' \
            '{init?fmt=%Y%m%d%H}.dat'
    ss = StringSub(logger, templ, date=date_str, init=init_str)
    filename = ss.doStringSub()
    expected_filename = '/d1/METplus_TC/adeck_orig/20170811/' \
                        'atcfunix.gfs.2017081118.dat'
    assert filename == expected_filename
예제 #15
0
def test_calc_valid_for_prepbufr(key, value):
    pytest.skip('deprecated function')
    # Verify that the previous day is correctly calculated when
    # the negative_offset_hour > cycle_hour
    cycle_hour = "00"
    init_string = "2018010306"
    logger = logging.getLogger("dummy")
    templ = "prefix.{valid?fmt=%Y%m%d%H}.tm{cycle?fmt=%H}z.tm{" \
        "offset?fmt=%H}.nc"

    ss = StringSub(logger, templ, init=init_string, cycle=cycle_hour,
               offset=key)
    valid_time = ss.calc_valid_for_prepbufr()
    assert (valid_time == value)
def test_create_grid2obs_regex_gfs():
    pytest.skip('deprecated function')
    # Test that the regex created from a template is what is expected
    logger = logging.getLogger("test")
    templ = '/path/to/gfs/pgbf{lead?fmt=%H}.gfs.{valid?fmt=%Y%m%d%HH}'

    # variables to pass into StringSub
    valid_str = datetime.datetime.strptime('2017081118', '%Y%m%d%H')
    lead_str = 0

    ss = StringSub(logger, templ, valid=valid_str, lead=lead_str)
    actual_regex = ss.create_grid2obs_regex()
    expected_regex = '/path/to/gfs/pgbf([0-9]{1,3}).gfs.([0-9]{10})$'
    assert actual_regex == expected_regex
예제 #17
0
def test_create_cyclone_regex():
    pytest.skip('deprecated function')
    # Test that the regex created from a template is what is expected
    logger = logging.getLogger("test")
    templ = '/d1/METplus_TC/bdeck/{date?fmt=%s}/b{region?fmt=%s}' \
            '{cyclone?fmt=%s}{misc?fmt=%s}.dat'
    date_str = '201708'
    region_str = 'al'
    cyclone_str = '05'
    year_str = '2017'
    ss = StringSub(logger, templ, date=date_str, region=region_str,
                   cyclone=cyclone_str, misc=year_str)
    actual_regex = ss.create_cyclone_regex()
    expected_regex = '/d1/METplus_TC/bdeck/([0-9]{4,10})/b([a-zA-Z]{2})([0-9]' \
                     '{2,3})([a-zA-Z0-9-_.]+).dat'
    assert actual_regex == expected_regex
예제 #18
0
def test_offset_hour():
    logger = logging.getLogger("dummy")
    expected_hour = int(3)
    offset = "03"
    templ = "prefix.{valid?fmt=%Y%m%d}.tm{offset?fmt=%H}"
    ss = StringSub(logger, templ, offset=offset)
    assert (ss.offset_hour == expected_hour)
def test_create_grid2obs_regex_hrrr():
    pytest.skip('deprecated function')
    # Test that the regex created from a template is what is expected
    logger = logging.getLogger("test")
    templ = \
    '/path/to/hrrr/hrrr.t{cycle?fmt=%HH}z.wrfprsf{lead?fmt=%HH}.grib2'

    # variables to set for StringSub
    cycle_str = '15'
    lead_str = '12'

    ss = StringSub(logger, templ, cycle=cycle_str, lead=lead_str)
    actual_regex = ss.create_grid2obs_regex()
    expected_regex = \
        '/path/to/hrrr/hrrr.t([0-9]{2,3})z.wrfprsf([0-9]{1,3}).grib2$'
    assert actual_regex == expected_regex
예제 #20
0
def test_nam_substitution_HH(key, value):
    pytest.skip('time offsets no longer computed in StringSub')
    # Test that the substitution works correctly when given an init time,
    # cycle hour, and negative offset hour.
    init_string = datetime.datetime.strptime("20180102", '%Y%m%d')
    cycle_string = key
    offset_string = 10800 #'03'
    expected_filename = value
    logger = logging.getLogger("test")
    templ = \
        'prepbufr.nam.{valid?fmt=%Y%m%d%H}.t{cycle?fmt=%HH}z.tm{' \
        'offset?fmt=%HH}.nc'
    ss = StringSub(logger, templ, init=init_string, cycle=cycle_string,
                   offset=offset_string)
    filename = ss.do_string_sub()
    # print('nam filename: ', filename)
    assert (filename == expected_filename)
def test_create_grid2obs_regex_gdas():
    pytest.skip('deprecated function')
    # Test that the regex created from a template is what is expected
    logger = logging.getLogger("test")
    templ = \
        '/path/to/gdas/prepbufr.gdas.{valid?fmt=%Y%m%d%HH}'
    # '/path/to/nam.20170611/nam.t[cycle?fmt=%HH}z.prepbufr.tm{offset?fmt=%HH}'
    # templ_hrrr ='/path/to/hrrr.t{cycle?fmt=%HH}z.wrfprsf{lead?fmt=%HH}.grib2'
    # tmpl_gdas = 'prepbufr.gdas.{valid?fmt=%Y%m%d%HH}'

    # variables to set for StringSub
    valid_str = '2017063018'

    ss = StringSub(logger, templ, valid=valid_str)
    actual_regex = ss.create_grid2obs_regex()
    expected_regex = '/path/to/gdas/prepbufr.gdas.([0-9]{10})$'
    assert actual_regex == expected_regex
def test_create_grid2obs_regex_nam():
    pytest.skip('deprecated function')
    # Test that the regex created from a template is what is expected
    logger = logging.getLogger("test")
    templ = \
        '/path/to/nam.20170811/nam.t{cycle?' \
        'fmt=%HH}z.prepbufr.tm{offset?fmt=%HH}'

    # variables to set for StringSub
    cycle_str = '18'
    offset_str = '03'

    ss = StringSub(logger, templ, cycle=cycle_str, offset=offset_str)
    actual_regex = ss.create_grid2obs_regex()
    expected_regex = \
        '/path/to/nam.20170811/nam.t([0-9]{2,3})z.prepbufr.tm([0-9]{2,3})$'
    assert actual_regex == expected_regex
예제 #23
0
def test_ymd_region_cyclone():
    # Test that we can recreate the full file path with a date,
    # region, and cyclone
    logger = logging.getLogger("test")
    # /d1/METplus_TC/bdeck_orig/20170811/bal052017.dat
    date_str = '201708'
    region_str = 'al'
    cyclone_str = '05'
    year_str = '2017'
    # templ = '/d1/METplus_TC/bdeck/{date?fmt=%Y%m}/bal{region?fmt=%s}.dat'
    templ = '/d1/METplus_TC/bdeck/{date?fmt=%s}/b{region?fmt=%s}' \
            '{cyclone?fmt=%s}{misc?fmt=%s}.dat'
    ss = StringSub(logger, templ, date=date_str, region=region_str,
                   cyclone=cyclone_str, misc=year_str)
    full_file = ss.do_string_sub()
    expected_full_file = '/d1/METplus_TC/bdeck/201708/bal052017.dat'
    assert full_file == expected_full_file
예제 #24
0
def test_cycle_hour():
    cycle_string = "00"
    valid_string = "20180103"
    logger = logging.getLogger("dummy")
    templ = "prefix.{valid?fmt=%Y%m%d}.tm{cycle?fmt=%H}"
    ss = StringSub(logger, templ, valid=valid_string, cycle=cycle_string)
    expected_hours = int(0)
    assert (ss.cycle_time_hours == expected_hours)
예제 #25
0
def test_nam_substitution_HHH(key, value):
    # Test that the substitution works correctly when given an init time,
    # cycle hour, and negative offset hour.
    init_string = "20180102"
    cycle_string = key
    offset_string = '03'
    expected_filename = value
    logger = logging.getLogger("test")
    templ = \
        'prepbufr.nam.{valid?fmt=%Y%m%d%H}.t{cycle?fmt=%HHH}z.tm{' \
        'offset?fmt=%HH}.nc'
    ss = StringSub(logger,
                   templ,
                   init=init_string,
                   cycle=cycle_string,
                   offset=offset_string)
    filename = ss.doStringSub()
    print('nam filename: ', filename)
    assert (filename == expected_filename)
예제 #26
0
    def find_deck_files(self, deck, basin, cyclone, model_list, time_info):
        """!Find ADECK or EDECK files that correspond to the BDECk file found
            Args:
                @param deck type of deck (A or E)
                @param basin region of storm from config
                @param cyclone ID number of cyclone from config
                @param model_list list of models that be available
                @param time_info object containing timing information to process
        """
        deck_list = []
        # get matching adeck wildcard expression for first model
        string_sub = StringSub(self.logger,
                               self.c_dict[deck + 'DECK_TEMPLATE'],
                               basin=basin,
                               cyclone=cyclone,
                               model=model_list[0],
                               **time_info)
        deck_expr = os.path.join(self.c_dict[deck + 'DECK_DIR'],
                                 string_sub.do_string_sub())

        # add adeck files if they exist
        for model in model_list:
            deck_glob = deck_expr.replace(model_list[0], model)
            self.logger.debug('Looking for {}DECK file: {}'.format(
                deck, deck_glob))
            deck_files = glob.glob(deck_glob)
            if not deck_files:
                continue

            # there should only be 1 file that matches
            deck_file = deck_files[0]

            # if deck exists, add to list
            if os.path.isfile(deck_file):
                self.logger.debug('Adding {}DECK: {}'.format(deck, deck_file))
                deck_list.append(deck_file)

        return deck_list
예제 #27
0
    def run_at_time(self, input_dict):
        """! Do some processing for the current run time (init or valid)
              Args:
                @param input_dict dictionary containing time information of current run
                        generally contains 'now' (current) time and 'init' or 'valid' time
        """
        # fill in time info dictionary
        time_info = time_util.ti_calculate(input_dict)

        # check if looping by valid or init and log time for run
        loop_by = time_info['loop_by']
        self.logger.info('Running ExampleWrapper at {} time {}'.format(loop_by,
                                                                       time_info[loop_by+'_fmt']))

        # read input directory and template from config dictionary
        input_dir = self.c_dict['INPUT_DIR']
        input_template = self.c_dict['INPUT_TEMPLATE']
        self.logger.info('Input directory is {}'.format(input_dir))
        self.logger.info('Input template is {}'.format(input_template))

        # get forecast leads to loop over
        lead_seq = util.get_lead_sequence(self.config, input_dict)
        for lead in lead_seq:

            # set forecast lead time in hours
            time_info['lead_hours'] = lead

            # recalculate time info items
            time_info = time_util.ti_calculate(time_info)

            # log init, valid, and forecast lead times for current loop iteration
            self.logger.info('Processing forecast lead {} initialized at {} and valid at {}'
                             .format(lead, time_info['init'].strftime('%Y-%m-%d %HZ'),
                                     time_info['valid'].strftime('%Y-%m-%d %HZ')))

            # perform string substitution to find filename based on template and current run time
            # pass in logger, then template, then any items to use to fill in template
            # pass time info with ** in front to expand each dictionary item to a variable
            #  i.e. time_info['init'] becomes init=init_value
            filename = StringSub(self.logger,
                                 input_template,
                                 **time_info).do_string_sub()
            self.logger.info('Looking in input directory for file: {}'.format(filename))

        return True
def test_ccpa_template():
    passed = True
    valid_string = datetime.datetime.strptime("2019022403", '%Y%m%d%H')
    lead_string = 10800
    logger = logging.getLogger("testing")
    templ = "ccpa.{valid?fmt=%Y%m%d}/06/ccpa.t{valid?fmt=%H}z.{lead?fmt=%.2H}h.hrap.conus.gb2"
    expected_filename = "ccpa.20190224/06/ccpa.t03z.03h.hrap.conus.gb2"
    ss = StringSub(logger, templ, valid=valid_string, lead=lead_string)
    filename = ss.doStringSub()
    if filename != expected_filename:
        passed = False

    valid_string = datetime.datetime.strptime("2019022406", '%Y%m%d%H')
    lead_string = int("6") * 3600
    expected_filename = "ccpa.20190224/06/ccpa.t06z.06h.hrap.conus.gb2"
    ss = StringSub(logger, templ, valid=valid_string, lead=lead_string)
    filename = ss.doStringSub()
    if filename == expected_filename:
        passed = False

    return passed
예제 #29
0
    def run_at_time_once(self, input_dict):
        self.clear()
        if self.c_dict['OBS_INPUT_DIR'] == '':
            self.logger.error('Must set PB2NC_INPUT_DIR in config file')
            exit(1)

        if self.c_dict['OBS_INPUT_TEMPLATE'] == '':
            self.logger.error('Must set PB2NC_INPUT_TEMPLATE in config file')
            exit(1)

        if self.c_dict['OUTPUT_DIR'] == '':
            self.logger.error('Must set PB2NC_OUTPUT_DIR in config file')
            exit(1)

        if self.c_dict['OUTPUT_TEMPLATE'] == '':
            self.logger.error('Must set PB2NC_OUTPUT_TEMPLATE in config file')
            exit(1)

        input_dir = self.c_dict['OBS_INPUT_DIR']
        input_template = self.c_dict['OBS_INPUT_TEMPLATE']
        output_dir = self.c_dict['OUTPUT_DIR']
        output_template = self.c_dict['OUTPUT_TEMPLATE']

        infile = None
        # loop over offset list and find first file that matches
        for offset in self.c_dict['OFFSETS']:
            input_dict['offset'] = offset
            time_info = time_util.ti_calculate(input_dict)
            infile = self.find_obs(time_info, None)

            if infile is not None:
                if isinstance(infile, list):
                    for f in infile:
                        self.infiles.append(f)
                else:
                    self.infiles.append(infile)
                self.logger.debug('Adding input file {}'.format(infile))
                break

        if infile is None:
            self.logger.error('Could not find input file in {} matching template {}'
                              .format(input_dir, input_template))
            return False

        outSts = StringSub(self.logger,
                           output_template,
                           **time_info)
        outfile = outSts.do_string_sub()
        outfile = os.path.join(output_dir, outfile)
        self.set_output_path(outfile)

        # if we don't overwrite and the output file exists, warn and continue
        if os.path.exists(outfile) and \
          self.c_dict['SKIP_IF_OUTPUT_EXISTS'] is True:
            self.logger.debug('Skip writing output file {} because it already '
                              'exists. Remove file or change '
                              'PB2NC_SKIP_IF_OUTPUT_EXISTS to False to process'
                              .format(outfile))
            return True

        # set config file since command is reset after each run
        self.param = self.c_dict['CONFIG_FILE']

        # list of fields to print to log
        print_list = ["PB2NC_MESSAGE_TYPE", "PB2NC_STATION_ID",
                      "OBS_WINDOW_BEGIN", "OBS_WINDOW_END",
                      "PB2NC_GRID", "PB2NC_POLY", "OBS_BUFR_VAR_LIST",
                      "TIME_SUMMARY_FLAG", "TIME_SUMMARY_BEG",
                      "TIME_SUMMARY_END", "TIME_SUMMARY_VAR_NAMES",
                      "TIME_SUMMARY_TYPES" ]

        # set environment variables needed for MET application
        self.add_env_var("PB2NC_MESSAGE_TYPE", self.c_dict['MESSAGE_TYPE'])
        self.add_env_var("PB2NC_STATION_ID", self.c_dict['STATION_ID'])
        self.add_env_var("OBS_WINDOW_BEGIN", str(self.c_dict['OBS_WINDOW_BEGIN']))
        self.add_env_var("OBS_WINDOW_END", str(self.c_dict['OBS_WINDOW_END']))
        self.add_env_var("PB2NC_GRID", self.c_dict['GRID'])
        self.add_env_var("PB2NC_POLY", self.c_dict['POLY'])

        tmp_message_type = str(self.c_dict['BUFR_VAR_LIST']).replace("\'", "\"")
        bufr_var_list = ''.join(tmp_message_type.split())
        self.add_env_var("OBS_BUFR_VAR_LIST", bufr_var_list)

        self.add_env_var('TIME_SUMMARY_FLAG',
                         str(self.c_dict['TIME_SUMMARY_FLAG']))
        self.add_env_var('TIME_SUMMARY_BEG',
                         self.c_dict['TIME_SUMMARY_BEG'])
        self.add_env_var('TIME_SUMMARY_END',
                         self.c_dict['TIME_SUMMARY_END'])
        self.add_env_var('TIME_SUMMARY_VAR_NAMES',
                         str(self.c_dict['TIME_SUMMARY_VAR_NAMES']))
        self.add_env_var('TIME_SUMMARY_TYPES',
                         str(self.c_dict['TIME_SUMMARY_TYPES']))

        # send environment variables to logger
        self.logger.debug("ENVIRONMENT FOR NEXT COMMAND: ")
        self.print_user_env_items()
        for l in print_list:
            self.print_env_item(l)
        self.logger.debug("COPYABLE ENVIRONMENT FOR NEXT COMMAND: ")
        self.print_env_copy(print_list)

        cmd = self.get_command()
        if cmd is None:
            self.logger.error("Could not generate command")
            return
        self.build()
예제 #30
0
    def process_data(self, basin, cyclone, model_list, time_info):
        """!Find requested files and run tc_pairs
            Args:
                @param basin region of storm from config
                @param cyclone ID number of cyclone from config
                @param model_list list of models that be available
                @param time_info object containing timing information to process
        """
        # get bdeck file
        bdeck_files = []

        # set regex expressions for basin and cyclone if wildcard is used
        # cast cyclone value to integer if it is not a wildcard
        if cyclone != '*':
            cyclone_regex = cyclone
        else:
            cyclone_regex = "([0-9]{2,4})"

        if basin != '??':
            basin_regex = basin
        else:
            basin_regex = "([a-zA-Z]{2})"

        # get search expression for bdeck files to pass to glob
        string_sub = StringSub(self.logger,
                               self.c_dict['BDECK_TEMPLATE'],
                               basin=basin,
                               cyclone=cyclone,
                               **time_info)
        bdeck_glob = os.path.join(self.c_dict['BDECK_DIR'],
                                  string_sub.do_string_sub())
        self.logger.debug('Looking for BDECK: {}'.format(bdeck_glob))

        # get all files that match expression
        bdeck_files = sorted(glob.glob(bdeck_glob))

        # if no bdeck_files found
        if len(bdeck_files) == 0:
            self.logger.warning('No BDECK files found')
            return False

        # find corresponding adeck or edeck files
        for bdeck_file in bdeck_files:
            self.logger.debug('Found BDECK: {}'.format(bdeck_file))

            # set current basin and cyclone from bdeck file
            # if basin or cyclone are a wildcard, these will be
            # replaced by the value pulled from the bdeck file
            current_basin = basin
            current_cyclone = cyclone

            # if wildcard was used in bdeck, pull out what was
            # substituted for * to find corresponding bdeck file
            matches = []
            if '*' in bdeck_glob or '?' in bdeck_glob:
                # get regex expression to pull out basin and cyclone
                string_sub = StringSub(self.logger,
                                       self.c_dict['BDECK_TEMPLATE'],
                                       basin=basin_regex,
                                       cyclone=cyclone_regex,
                                       **time_info)
                bdeck_regex = os.path.join(self.c_dict['BDECK_DIR'],
                                           string_sub.do_string_sub())

                # capture wildcard values in template
                bdeck_regex = bdeck_regex.replace('*', '(.*)')
                bdeck_regex = bdeck_regex.replace('?', '(.)')

                match = re.match(bdeck_regex, bdeck_file)
                if match:
                    matches = match.groups()
                    tags = get_tags(self.c_dict['BDECK_TEMPLATE'])
                    match_count = 0
                    for tag in tags:
                        # if wildcard is set for tag found, get value
                        # if wildcard if found in template, increment index
                        if tag == 'basin' and basin == '??':
                            current_basin = matches[match_count]
                            match_count += 1
                        elif tag == 'cyclone' and cyclone == '*':
                            current_cyclone = matches[match_count]
                            match_count += 1
                        elif tag == '*' or tag == '?':
                            match_count += 1

            # create lists for deck files, put bdeck in list so it can be handled
            # the same as a and e for reformatting even though it will always be
            # size 1
            bdeck_list = [bdeck_file]
            adeck_list = []
            edeck_list = []

            # get adeck files
            if self.c_dict['GET_ADECK']:
                adeck_list = self.find_deck_files('A', current_basin,
                                                  current_cyclone, model_list,
                                                  time_info)
            # get edeck files
            if self.c_dict['GET_EDECK']:
                edeck_list = self.find_deck_files('E', current_basin,
                                                  current_cyclone, model_list,
                                                  time_info)

            if not adeck_list and not edeck_list:
                self.logger.debug('Could not find any corresponding '
                                  'ADECK or EDECK files')
                continue

            # reformat extra tropical cyclone files if necessary
            if self.c_dict['REFORMAT_DECK']:
                adeck_list = self.reformat_files(adeck_list, 'A', time_info)
                bdeck_list = self.reformat_files(bdeck_list, 'B', time_info)
                edeck_list = self.reformat_files(edeck_list, 'E', time_info)

            self.adeck = adeck_list
            self.bdeck = bdeck_list
            self.edeck = edeck_list

            if self.c_dict['OUTPUT_TEMPLATE']:
                # get output filename from template
                string_sub = StringSub(self.logger,
                                       self.c_dict['OUTPUT_TEMPLATE'],
                                       basin=current_basin,
                                       cyclone=current_cyclone,
                                       **time_info)
                output_file = string_sub.do_string_sub()
            else:
                output_file = 'tc_pairs'
            self.outfile = output_file

            # build command and run tc_pairs
            cmd = self.get_command()
            if cmd is None:
                self.logger.error("Could not generate command")
                return

            output_path = self.get_output_path() + '.tcst'
            if os.path.isfile(
                    output_path) and self.c_dict['SKIP_OUTPUT'] is True:
                self.logger.debug('Skip running tc_pairs because '+\
                                  'output file {} already exists'.format(output_path)+\
                                  'Change TC_PAIRS_SKIP_IF_OUTPUT_EXISTS to False to '+\
                                  'overwrite file')
            else:
                self.build()