Esempio n. 1
0
    def pre_process(self, *args, **kwargs):
        wrf_config = self.get_config(**kwargs)
        wrf_home = wrf_config.get('wrf_home')
        nfs_metgrid_dir = os.path.join(wrf_config.get('nfs_dir'), 'metgrid')

        logging.info('Running em_real...')
        em_real_dir = utils.get_em_real_dir(wrf_home)

        logging.info('Cleaning up files')
        utils.delete_files_with_prefix(em_real_dir, 'met_em*')
        utils.delete_files_with_prefix(em_real_dir, 'rsl*')

        # Linking met_em.*
        # logging.info('Creating met_em.d* symlinks')
        logging.info('Copying met_em.d.zip file')
        utils.copy_files_with_prefix(nfs_metgrid_dir, 'metgrid.zip',
                                     em_real_dir)

        logging.info('Unzipping met_em.d.zip')
        ZipFile(os.path.join(em_real_dir, 'metgrid.zip'),
                'r',
                compression=zipfile.ZIP_DEFLATED).extractall(path=em_real_dir)

        logging.info('Cleaning up met_em.d.zip')
        os.remove(os.path.join(em_real_dir, 'metgrid.zip'))
Esempio n. 2
0
    def post_process(self, *args, **kwargs):
        # make a sym link in the nfs dir
        wrf_config = self.get_config(**kwargs)
        wps_dir = utils.get_wps_dir(wrf_config.get('wrf_home'))

        nfs_metgrid_dir = os.path.join(wrf_config.get('nfs_dir'), 'metgrid')

        utils.create_dir_if_not_exists(nfs_metgrid_dir)
        # utils.delete_files_with_prefix(nfs_metgrid_dir, 'met_em.d*')
        # utils.create_symlink_with_prefix(wps_dir, 'met_em.d*', nfs_metgrid_dir)

        utils.create_zip_with_prefix(wps_dir, 'met_em.d*',
                                     os.path.join(wps_dir, 'metgrid.zip'))

        utils.delete_files_with_prefix(nfs_metgrid_dir, 'met_em.d*')
        utils.move_files_with_prefix(wps_dir, 'metgrid.zip', nfs_metgrid_dir)
Esempio n. 3
0
def pre_ungrib(**kwargs):
    logging.info('Running preprocessing for ungrib...')

    wrf_config = get_wrf_config(**kwargs)

    wps_dir = utils.get_wps_dir(wrf_config.get('wrf_home'))
    logging.info('WPS dir: %s' % wps_dir)

    logging.info('Cleaning up files')
    utils.delete_files_with_prefix(wps_dir, 'FILE:*')
    utils.delete_files_with_prefix(wps_dir, 'PFILE:*')

    # Linking VTable
    if not os.path.exists(os.path.join(wps_dir, 'Vtable')):
        logging.info('Creating Vtable symlink')
        os.symlink(os.path.join(wps_dir, 'ungrib/Variable_Tables/Vtable.NAM'),
                   os.path.join(wps_dir, 'Vtable'))
    pass
Esempio n. 4
0
def run_wps(wrf_config):
    logging.info('Downloading GFS data')
    executor.download_gfs_data(wrf_config)

    logging.info('Replacing the namelist wps file')
    executor.replace_namelist_wps(wrf_config)

    logging.info('Running WPS...')
    executor.run_wps(wrf_config)

    logging.info('Cleaning up wps dir...')
    wps_dir = utils.get_wps_dir(wrf_config.get('wrf_home'))
    shutil.rmtree(wrf_config.get('gfs_dir'))
    utils.delete_files_with_prefix(wps_dir, 'FILE:*')
    utils.delete_files_with_prefix(wps_dir, 'PFILE:*')
    utils.delete_files_with_prefix(wps_dir, 'geo_em.*')
Esempio n. 5
0
def create_rf_plots_wrf(nc_f, plots_output_dir, plots_output_base_dir, lon_min=None, lat_min=None, lon_max=None,
                        lat_max=None, filter_threshold=0.05, run_prefix='WRF'):
    if not all([lon_min, lat_min, lon_max, lat_max]):
        lon_min, lat_min, lon_max, lat_max = constants.SRI_LANKA_EXTENT

    variables = ext_utils.extract_variables(nc_f, 'RAINC, RAINNC', lat_min, lat_max, lon_min, lon_max)

    lats = variables['XLAT']
    lons = variables['XLONG']

    # cell size is calc based on the mean between the lat and lon points
    cz = np.round(np.mean(np.append(lons[1:len(lons)] - lons[0: len(lons) - 1], lats[1:len(lats)]
                                    - lats[0: len(lats) - 1])), 3)
    clevs = [0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 250, 300, 400, 500, 600, 750]
    cmap = cm.s3pcpn

    basemap = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, urcrnrlon=lon_max,
                      urcrnrlat=lat_max, resolution='h')

    data = variables['RAINC'] + variables['RAINNC']
    logging.info('Filtering with the threshold %f' % filter_threshold)
    data[data < filter_threshold] = 0.0
    variables['PRECIP'] = data

    prefix = 'wrf_plots'
    with TemporaryDirectory(prefix=prefix) as temp_dir:
        t0 = dt.datetime.strptime(variables['Times'][0], '%Y-%m-%d_%H:%M:%S')
        t1 = dt.datetime.strptime(variables['Times'][1], '%Y-%m-%d_%H:%M:%S')
        step = (t1 - t0).total_seconds() / 3600.0

        inst_precip = ext_utils.get_two_element_average(variables['PRECIP'])
        cum_precip = ext_utils.get_two_element_average(variables['PRECIP'], return_diff=False)

        for i in range(1, len(variables['Times'])):
            time = variables['Times'][i]
            ts = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M:%S')
            lk_ts = utils.datetime_utc_to_lk(ts, shift_mins=30)
            logging.info('processing %s', time)

            # instantaneous precipitation (hourly)
            inst_file = os.path.join(temp_dir, 'wrf_inst_' + lk_ts.strftime('%Y-%m-%d_%H:%M:%S'))

            ext_utils.create_asc_file(np.flip(inst_precip[i - 1], 0), lats, lons, inst_file + '.asc', cell_size=cz)

            title = {
                'label': 'Hourly rf for %s LK' % lk_ts.strftime('%Y-%m-%d_%H:%M:%S'),
                'fontsize': 30
            }
            ext_utils.create_contour_plot(inst_precip[i - 1], inst_file + '.png', lat_min, lon_min, lat_max, lon_max,
                                          title, clevs=clevs, cmap=cmap, basemap=basemap)

            if (i * step) % 24 == 0:
                t = 'Daily rf from %s LK to %s LK' % (
                    (lk_ts - dt.timedelta(hours=24)).strftime('%Y-%m-%d_%H:%M:%S'), lk_ts.strftime('%Y-%m-%d_%H:%M:%S'))
                d = int(i * step / 24) - 1
                logging.info('Creating images for D%d' % d)
                cum_file = os.path.join(temp_dir, 'wrf_cum_%dd' % d)

                if i * step / 24 > 1:
                    cum_precip_24h = cum_precip[i - 1] - cum_precip[i - 1 - int(24 / step)]
                else:
                    cum_precip_24h = cum_precip[i - 1]

                ext_utils.create_asc_file(np.flip(cum_precip_24h, 0), lats, lons, cum_file + '.asc', cell_size=cz)

                ext_utils.create_contour_plot(cum_precip_24h, cum_file + '.png', lat_min, lon_min, lat_max, lon_max, t,
                                              clevs=clevs, cmap=cmap, basemap=basemap)

                gif_file = os.path.join(temp_dir, 'wrf_inst_%dd' % d)
                images = [os.path.join(temp_dir, 'wrf_inst_' + j.strftime('%Y-%m-%d_%H:%M:%S') + '.png') for j in
                          np.arange(lk_ts - dt.timedelta(hours=24 - step), lk_ts + dt.timedelta(hours=step),
                                    dt.timedelta(hours=step)).astype(dt.datetime)]
                ext_utils.create_gif(images, gif_file + '.gif')

        logging.info('Creating the zips')
        utils.create_zip_with_prefix(temp_dir, '*.png', os.path.join(temp_dir, 'pngs.zip'))
        utils.create_zip_with_prefix(temp_dir, '*.asc', os.path.join(temp_dir, 'ascs.zip'))

        logging.info('Cleaning up instantaneous pngs and ascs - wrf_inst_*')
        utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.png')
        utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.asc')

        logging.info('Copying pngs to ' + plots_output_dir)
        utils.move_files_with_prefix(temp_dir, '*.png', plots_output_dir)
        logging.info('Copying ascs to ' + plots_output_dir)
        utils.move_files_with_prefix(temp_dir, '*.asc', plots_output_dir)
        logging.info('Copying gifs to ' + plots_output_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', plots_output_dir)
        logging.info('Copying zips to ' + plots_output_dir)
        utils.copy_files_with_prefix(temp_dir, '*.zip', plots_output_dir)

        plots_latest_dir = os.path.join(plots_output_base_dir, 'latest', run_prefix, os.path.basename(plots_output_dir))
        # <nfs>/latest/wrf0 .. 3
        utils.create_dir_if_not_exists(plots_latest_dir)
        # todo: this needs to be adjusted to handle the multiple runs
        logging.info('Copying gifs to ' + plots_latest_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', plots_latest_dir)
Esempio n. 6
0
def extract_jaxa_satellite_data(start_ts_utc,
                                end_ts_utc,
                                output_dir,
                                cleanup=True,
                                cum=False,
                                tmp_dir=None,
                                lat_min=5.722969,
                                lon_min=79.52146,
                                lat_max=10.06425,
                                lon_max=82.18992,
                                output_prefix='jaxa_sat',
                                db_adapter_config=None):
    start = utils.datetime_floor(start_ts_utc, 3600)
    end = utils.datetime_floor(end_ts_utc, 3600)

    login = '******'

    url0 = 'ftp://' + login + '@hokusai.eorc.jaxa.jp/realtime/txt/05_AsiaSS/YYYY/MM/DD/gsmap_nrt.YYYYMMDD.HH00.05_AsiaSS.csv.zip'
    url1 = 'ftp://' + login + '@hokusai.eorc.jaxa.jp/now/txt/05_AsiaSS/gsmap_now.YYYYMMDD.HH00_HH59.05_AsiaSS.csv.zip'

    def get_jaxa_url(ts):
        url_switch = (dt.datetime.utcnow() - ts) > dt.timedelta(hours=5)
        _url = url0 if url_switch else url1
        ph = {
            'YYYY': ts.strftime('%Y'),
            'MM': ts.strftime('%m'),
            'DD': ts.strftime('%d'),
            'HH': ts.strftime('%H')
        }
        for k, v in list(ph.items()):
            _url = _url.replace(k, v)
        return _url

    # tmp_dir = os.path.join(output_dir, 'tmp_jaxa/')
    # if not os.path.exists(tmp_dir):
    #     os.mkdir(tmp_dir)
    if tmp_dir is None:
        tmp_dir = tempfile.mkdtemp(prefix='tmp_jaxa')

    url_dest_list = []
    for timestamp in np.arange(start, end,
                               dt.timedelta(hours=1)).astype(dt.datetime):
        url = get_jaxa_url(timestamp)
        url_dest_list.append(
            (url, os.path.join(tmp_dir, os.path.basename(url)),
             os.path.join(
                 output_dir, output_prefix + '_' +
                 timestamp.strftime('%Y-%m-%d_%H:%M') + '.asc')))

    procs = multiprocessing.cpu_count()

    logging.info('Downloading inventory in parallel')
    utils.download_parallel(url_dest_list, procs)
    logging.info('Downloading inventory complete')

    logging.info('Processing files in parallel')
    Parallel(n_jobs=procs)(delayed(process_jaxa_zip_file)
                           (i[1], i[2], lat_min, lon_min, lat_max, lon_max,
                            cum, output_prefix, db_adapter_config)
                           for i in url_dest_list)
    logging.info('Processing files complete')

    logging.info('Creating sat rf gif for today')
    create_daily_gif(start, output_dir, output_prefix + '_today.gif',
                     output_prefix)

    prev_day_gif = os.path.join(output_dir, output_prefix + '_yesterday.gif')
    if not utils.file_exists_nonempty(prev_day_gif) or start.strftime(
            '%H:%M') == '00:00':
        logging.info('Creating sat rf gif for yesterday')
        create_daily_gif(
            utils.datetime_floor(start, 3600 * 24) - dt.timedelta(days=1),
            output_dir, output_prefix + '_yesterday.gif', output_prefix)

    if cum:
        logging.info('Processing cumulative')
        process_cumulative_plot(url_dest_list, start_ts_utc, end_ts_utc,
                                output_dir, lat_min, lon_min, lat_max, lon_max)
        logging.info('Processing cumulative complete')

    # clean up temp dir
    if cleanup:
        logging.info('Cleaning up')
        shutil.rmtree(tmp_dir)
        utils.delete_files_with_prefix(output_dir, '*.archive')
Esempio n. 7
0
def extract_jaxa_satellite_data_every_half_hr(exe_ts_utc,
                                              output_dir,
                                              cleanup=True,
                                              cum=False,
                                              tmp_dir=None,
                                              lat_min=5.722969,
                                              lon_min=79.52146,
                                              lat_max=10.06425,
                                              lon_max=82.18992,
                                              output_prefix='jaxa_sat',
                                              db_adapter_config=None):
    exe_ts_utc = exe_ts_utc - dt.timedelta(minutes=2)
    print(
        '-------------extract_jaxa_satellite_data_half_hr---------------exe_ts_utc:',
        exe_ts_utc)
    exe_ts_utc = exe_ts_utc - dt.timedelta(minutes=30)
    run_minute = int(exe_ts_utc.strftime('%M'))
    print('run_minute : ', run_minute)
    year_str = exe_ts_utc.strftime('%Y')
    month_str = exe_ts_utc.strftime('%m')
    day_str = exe_ts_utc.strftime('%d')
    hour_str = exe_ts_utc.strftime('%H')
    hour_str1 = (exe_ts_utc + dt.timedelta(hours=1)).strftime('%H')
    # if run_minute == 0:
    #    url = 'ftp://*****:*****@hokusai.eorc.jaxa.jp/now/txt/05_AsiaSS/gsmap_now.{}{}{}.{}00_{}59.05_AsiaSS.csv.zip' \
    #        .format(year_str, month_str, day_str, hour_str, hour_str)
    # else:
    #    url = 'ftp://*****:*****@hokusai.eorc.jaxa.jp/now/txt/05_AsiaSS/gsmap_now.{}{}{}.{}30_{}29.05_AsiaSS.csv.zip' \
    #        .format(year_str, month_str, day_str, hour_str, hour_str1)

    if run_minute == 0:
        url = 'ftp://*****:*****@hokusai.eorc.jaxa.jp/now/txt/island/SriLanka/gsmap_now.{}{}{}.{}00_{}59.SriLanka.csv.zip' \
            .format(year_str, month_str, day_str, hour_str, hour_str)
    else:
        url = 'ftp://*****:*****@hokusai.eorc.jaxa.jp/now/txt/island/SriLanka/gsmap_now.{}{}{}.{}30_{}29.SriLanka.csv.zip' \
            .format(year_str, month_str, day_str, hour_str, hour_str1)

    print('Download url : ', url)
    start_time = exe_ts_utc
    end_time = start_time + dt.timedelta(hours=1)
    print('_get_start_end|[start_time, end_time] : ', [start_time, end_time])

    if tmp_dir is None:
        tmp_dir = tempfile.mkdtemp(prefix='tmp_jaxa')

    print('Download url : ', url)
    url_dest_list = [(url, os.path.join(tmp_dir, os.path.basename(url)),
                      os.path.join(
                          output_dir, output_prefix + '_' +
                          exe_ts_utc.strftime('%Y-%m-%d_%H:%M') + '.asc'))]

    procs = multiprocessing.cpu_count()

    logging.info('Downloading inventory in parallel')
    utils.download_parallel(url_dest_list, procs)
    logging.info('Downloading inventory complete')

    logging.info('Processing files in parallel')
    Parallel(n_jobs=procs)(delayed(process_jaxa_zip_file)
                           (i[1], i[2], lat_min, lon_min, lat_max, lon_max,
                            cum, output_prefix, db_adapter_config)
                           for i in url_dest_list)
    logging.info('Processing files complete')

    logging.info('Creating sat rf gif for today')
    create_daily_gif(exe_ts_utc, output_dir, output_prefix + '_today.gif',
                     output_prefix)

    prev_day_gif = os.path.join(output_dir, output_prefix + '_yesterday.gif')
    if not utils.file_exists_nonempty(prev_day_gif) or exe_ts_utc.strftime(
            '%H:%M') == '00:00':
        logging.info('Creating sat rf gif for yesterday')
        create_daily_gif(
            utils.datetime_floor(exe_ts_utc, 3600 * 24) - dt.timedelta(days=1),
            output_dir, output_prefix + '_yesterday.gif', output_prefix)

    if cum:
        logging.info('Processing cumulative')
        process_cumulative_plot(url_dest_list, start_time, end_time,
                                output_dir, lat_min, lon_min, lat_max, lon_max)
        logging.info('Processing cumulative complete')

    # clean up temp dir
    if cleanup:
        logging.info('Cleaning up')
        shutil.rmtree(tmp_dir)
        utils.delete_files_with_prefix(output_dir, '*.archive')
Esempio n. 8
0
def extract_jaxa_satellite_data_half_hr(exe_ts_utc,
                                        output_dir,
                                        cleanup=True,
                                        cum=False,
                                        tmp_dir=None,
                                        lat_min=5.722969,
                                        lon_min=79.52146,
                                        lat_max=10.06425,
                                        lon_max=82.18992,
                                        output_prefix='jaxa_sat',
                                        db_adapter_config=None):
    print(
        '-------------extract_jaxa_satellite_data_half_hr---------------exe_ts_utc:',
        exe_ts_utc)
    exe_ts_utc = exe_ts_utc - dt.timedelta(hours=1)
    login = '******'
    url_hour = 'ftp://' + login + '@hokusai.eorc.jaxa.jp/now/txt/05_AsiaSS/gsmap_now.YYYYMMDD.HH00_HH59.05_AsiaSS.csv.zip'
    url_half_hour = 'ftp://' + login + '@hokusai.eorc.jaxa.jp/now/txt/05_AsiaSS/gsmap_now.YYYYMMDD.HH30_KK29.05_AsiaSS.csv.zip'

    run_minute = int(exe_ts_utc.strftime('%M'))
    print('run_minute : ', run_minute)

    remainder = run_minute % 30
    run_minute = run_minute - remainder
    start_time = exe_ts_utc - dt.timedelta(minutes=remainder)
    end_time = start_time + dt.timedelta(minutes=30)
    print('_get_start_end|[start_time, end_time] : ', [start_time, end_time])
    if run_minute == 0:
        exe_ts_utc = exe_ts_utc.strftime('%Y-%m-%d %H:00:00')

    else:
        exe_ts_utc = exe_ts_utc.strftime('%Y-%m-%d %H:30:00')

    exe_ts_utc = dt.datetime.strptime(exe_ts_utc, '%Y-%m-%d %H:%M:%S')

    def _get_download_url(run_minute):
        remainder = run_minute % 30
        run_minute = run_minute - remainder
        if run_minute == 0:
            return url_hour
        else:
            return url_half_hour

    def _format_url(url):
        ph = {
            'YYYY': exe_ts_utc.strftime('%Y'),
            'MM': exe_ts_utc.strftime('%m'),
            'DD': exe_ts_utc.strftime('%d'),
            'KK': (exe_ts_utc + dt.timedelta(hours=1)).strftime('%H'),
            'HH': exe_ts_utc.strftime('%H')
        }
        for k, v in list(ph.items()):
            url = url.replace(k, v)
        print('url : ', url)
        return url

    if tmp_dir is None:
        tmp_dir = tempfile.mkdtemp(prefix='tmp_jaxa')

    url = _get_download_url(run_minute)
    formatted_url = _format_url(url)
    print('formatted_url : ', formatted_url)
    url_dest_list = [(formatted_url,
                      os.path.join(tmp_dir, os.path.basename(formatted_url)),
                      os.path.join(
                          output_dir, output_prefix + '_' +
                          exe_ts_utc.strftime('%Y-%m-%d_%H:%M') + '.asc'))]

    procs = multiprocessing.cpu_count()

    logging.info('Downloading inventory in parallel')
    utils.download_parallel(url_dest_list, procs)
    logging.info('Downloading inventory complete')

    logging.info('Processing files in parallel')
    Parallel(n_jobs=procs)(delayed(process_jaxa_zip_file)
                           (i[1], i[2], lat_min, lon_min, lat_max, lon_max,
                            cum, output_prefix, db_adapter_config)
                           for i in url_dest_list)
    logging.info('Processing files complete')

    logging.info('Creating sat rf gif for today')
    create_daily_gif(exe_ts_utc, output_dir, output_prefix + '_today.gif',
                     output_prefix)

    prev_day_gif = os.path.join(output_dir, output_prefix + '_yesterday.gif')
    if not utils.file_exists_nonempty(prev_day_gif) or exe_ts_utc.strftime(
            '%H:%M') == '00:00':
        logging.info('Creating sat rf gif for yesterday')
        create_daily_gif(
            utils.datetime_floor(exe_ts_utc, 3600 * 24) - dt.timedelta(days=1),
            output_dir, output_prefix + '_yesterday.gif', output_prefix)

    if cum:
        logging.info('Processing cumulative')
        process_cumulative_plot(url_dest_list, start_time, end_time,
                                output_dir, lat_min, lon_min, lat_max, lon_max)
        logging.info('Processing cumulative complete')

    # clean up temp dir
    if cleanup:
        logging.info('Cleaning up')
        shutil.rmtree(tmp_dir)
        utils.delete_files_with_prefix(output_dir, '*.archive')
Esempio n. 9
0
def run_em_real(wrf_config):
    logging.info('Running em_real...')

    wrf_home = wrf_config.get('wrf_home')
    em_real_dir = utils.get_em_real_dir(wrf_home)
    procs = wrf_config.get('procs')
    run_id = wrf_config.get('run_id')
    output_dir = utils.create_dir_if_not_exists(
        os.path.join(wrf_config.get('nfs_dir'), 'results', run_id, 'wrf'))
    archive_dir = utils.create_dir_if_not_exists(
        os.path.join(wrf_config.get('archive_dir'), 'results', run_id, 'wrf'))

    logging.info('Backup the output dir')
    utils.backup_dir(output_dir)

    logs_dir = utils.create_dir_if_not_exists(os.path.join(output_dir, 'logs'))

    logging.info('Copying metgrid.zip')
    metgrid_dir = os.path.join(wrf_config.get('nfs_dir'), 'metgrid')
    if wrf_config.is_set('wps_run_id'):
        logging.info('wps_run_id is set. Copying metgrid from ' +
                     wrf_config.get('wps_run_id'))
        utils.copy_files_with_prefix(
            metgrid_dir,
            wrf_config.get('wps_run_id') + '_metgrid.zip', em_real_dir)
        metgrid_zip = os.path.join(
            em_real_dir,
            wrf_config.get('wps_run_id') + '_metgrid.zip')
    else:
        utils.copy_files_with_prefix(metgrid_dir,
                                     wrf_config.get('run_id') + '_metgrid.zip',
                                     em_real_dir)
        metgrid_zip = os.path.join(em_real_dir,
                                   wrf_config.get('run_id') + '_metgrid.zip')

    logging.info('Extracting metgrid.zip')
    ZipFile(metgrid_zip, 'r',
            compression=ZIP_DEFLATED).extractall(path=em_real_dir)

    # logs destination: nfs/logs/xxxx/rsl*
    try:
        try:
            logging.info('Starting real.exe')
            utils.run_subprocess(
                'mpirun --allow-run-as-root -np %d ./real.exe' % procs,
                cwd=em_real_dir)
        finally:
            logging.info('Moving Real log files...')
            utils.create_zip_with_prefix(em_real_dir,
                                         'rsl*',
                                         os.path.join(em_real_dir,
                                                      'real_rsl.zip'),
                                         clean_up=True)
            utils.move_files_with_prefix(em_real_dir, 'real_rsl.zip', logs_dir)

        try:
            logging.info('Starting wrf.exe')
            utils.run_subprocess(
                'mpirun --allow-run-as-root -np %d ./wrf.exe' % procs,
                cwd=em_real_dir)
        finally:
            logging.info('Moving WRF log files...')
            utils.create_zip_with_prefix(em_real_dir,
                                         'rsl*',
                                         os.path.join(em_real_dir,
                                                      'wrf_rsl.zip'),
                                         clean_up=True)
            utils.move_files_with_prefix(em_real_dir, 'wrf_rsl.zip', logs_dir)
    finally:
        logging.info('Moving namelist input file')
        utils.move_files_with_prefix(em_real_dir, 'namelist.input', output_dir)

    logging.info('WRF em_real: DONE! Moving data to the output dir')

    logging.info('Extracting rf from domain3')
    d03_nc = glob.glob(os.path.join(em_real_dir, 'wrfout_d03_*'))[0]
    ncks_query = 'ncks -v %s %s %s' % ('RAINC,RAINNC,XLAT,XLONG,Times', d03_nc,
                                       d03_nc + '_rf')
    utils.run_subprocess(ncks_query)

    logging.info('Extracting rf from domain1')
    d01_nc = glob.glob(os.path.join(em_real_dir, 'wrfout_d01_*'))[0]
    ncks_query = 'ncks -v %s %s %s' % ('RAINC,RAINNC,XLAT,XLONG,Times', d01_nc,
                                       d01_nc + '_rf')
    utils.run_subprocess(ncks_query)

    logging.info('Moving data to the output dir')
    utils.move_files_with_prefix(em_real_dir, 'wrfout_d03*_rf', output_dir)
    utils.move_files_with_prefix(em_real_dir, 'wrfout_d01*_rf', output_dir)
    logging.info('Moving data to the archive dir')
    utils.move_files_with_prefix(em_real_dir, 'wrfout_*', archive_dir)

    logging.info('Cleaning up files')
    utils.delete_files_with_prefix(em_real_dir, 'met_em*')
    utils.delete_files_with_prefix(em_real_dir, 'rsl*')
    os.remove(metgrid_zip)
Esempio n. 10
0
def run_wps(wrf_config):
    logging.info('Running WPS: START')
    wrf_home = wrf_config.get('wrf_home')
    wps_dir = utils.get_wps_dir(wrf_home)
    output_dir = utils.create_dir_if_not_exists(
        os.path.join(wrf_config.get('nfs_dir'), 'results',
                     wrf_config.get('run_id'), 'wps'))

    logging.info('Backup the output dir')
    utils.backup_dir(output_dir)

    logs_dir = utils.create_dir_if_not_exists(os.path.join(output_dir, 'logs'))

    logging.info('Cleaning up files')
    utils.delete_files_with_prefix(wps_dir, 'FILE:*')
    utils.delete_files_with_prefix(wps_dir, 'PFILE:*')
    utils.delete_files_with_prefix(wps_dir, 'met_em*')

    # Linking VTable
    if not os.path.exists(os.path.join(wps_dir, 'Vtable')):
        logging.info('Creating Vtable symlink')
    os.symlink(os.path.join(wps_dir, 'ungrib/Variable_Tables/Vtable.NAM'),
               os.path.join(wps_dir, 'Vtable'))

    # Running link_grib.csh
    gfs_date, gfs_cycle, start = utils.get_appropriate_gfs_inventory(
        wrf_config)
    dest = utils.get_gfs_data_url_dest_tuple(wrf_config.get('gfs_url'),
                                             wrf_config.get('gfs_inv'),
                                             gfs_date, gfs_cycle, '',
                                             wrf_config.get('gfs_res'),
                                             '')[1].replace('.grb2', '')
    utils.run_subprocess('csh link_grib.csh %s/%s' %
                         (wrf_config.get('gfs_dir'), dest),
                         cwd=wps_dir)

    try:
        # Starting ungrib.exe
        try:
            utils.run_subprocess('./ungrib.exe', cwd=wps_dir)
        finally:
            utils.move_files_with_prefix(wps_dir, 'ungrib.log', logs_dir)

        # Starting geogrid.exe'
        if not check_geogrid_output(wps_dir):
            logging.info('Geogrid output not available')
            try:
                utils.run_subprocess('./geogrid.exe', cwd=wps_dir)
            finally:
                utils.move_files_with_prefix(wps_dir, 'geogrid.log', logs_dir)

        # Starting metgrid.exe'
        try:
            utils.run_subprocess('./metgrid.exe', cwd=wps_dir)
        finally:
            utils.move_files_with_prefix(wps_dir, 'metgrid.log', logs_dir)
    finally:
        logging.info('Moving namelist wps file')
        utils.move_files_with_prefix(wps_dir, 'namelist.wps', output_dir)

    logging.info('Running WPS: DONE')

    logging.info('Zipping metgrid data')
    metgrid_zip = os.path.join(wps_dir,
                               wrf_config.get('run_id') + '_metgrid.zip')
    utils.create_zip_with_prefix(wps_dir, 'met_em.d*', metgrid_zip)

    logging.info('Moving metgrid data')
    dest_dir = os.path.join(wrf_config.get('nfs_dir'), 'metgrid')
    utils.move_files_with_prefix(wps_dir, metgrid_zip, dest_dir)
Esempio n. 11
0
    def process(self, *args, **kwargs):
        config = self.get_config(**kwargs)
        logging.info('wrf conifg: ' + config.to_json_string())

        start_date = config.get('start_date')
        d03_dir = config.get('wrf_output_dir')
        d03_sl = os.path.join(d03_dir, 'wrfout_d03_' + start_date + ':00_SL')

        # create a temp work dir & get a local copy of the d03.._SL
        temp_dir = utils.create_dir_if_not_exists(
            os.path.join(config.get('wrf_home'), 'temp'))
        shutil.copy2(d03_sl, temp_dir)

        d03_sl = os.path.join(temp_dir, os.path.basename(d03_sl))

        lat_min = 5.722969
        lon_min = 79.52146
        lat_max = 10.06425
        lon_max = 82.18992

        variables = ext_utils.extract_variables(d03_sl, 'RAINC, RAINNC',
                                                lat_min, lat_max, lon_min,
                                                lon_max)

        lats = variables['XLAT']
        lons = variables['XLONG']

        # cell size is calc based on the mean between the lat and lon points
        cz = np.round(
            np.mean(
                np.append(lons[1:len(lons)] - lons[0:len(lons) - 1],
                          lats[1:len(lats)] - lats[0:len(lats) - 1])), 3)
        # clevs = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30])
        # clevs_cum = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30, 50, 75, 100])
        # norm = colors.BoundaryNorm(boundaries=clevs, ncolors=256)
        # norm_cum = colors.BoundaryNorm(boundaries=clevs_cum, ncolors=256)
        # cmap = plt.get_cmap('jet')

        clevs = [
            0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 250,
            300, 400, 500, 600, 750
        ]
        clevs_cum = clevs
        norm = None
        norm_cum = None
        cmap = cm.s3pcpn

        basemap = Basemap(projection='merc',
                          llcrnrlon=lon_min,
                          llcrnrlat=lat_min,
                          urcrnrlon=lon_max,
                          urcrnrlat=lat_max,
                          resolution='h')

        filter_threshold = 0.05
        data = variables['RAINC'] + variables['RAINNC']
        logging.info('Filtering with the threshold %f' % filter_threshold)
        data[data < filter_threshold] = 0.0
        variables['PRECIP'] = data

        pngs = []
        ascs = []

        for i in range(1, len(variables['Times'])):
            time = variables['Times'][i]
            ts = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M:%S')
            lk_ts = utils.datetime_utc_to_lk(ts)
            logging.info('processing %s', time)

            # instantaneous precipitation (hourly)
            inst_precip = variables['PRECIP'][i] - variables['PRECIP'][i - 1]

            inst_file = os.path.join(temp_dir, 'wrf_inst_' + time)
            title = {
                'label':
                'Hourly rf for %s LK\n%s UTC' %
                (lk_ts.strftime('%Y-%m-%d_%H:%M:%S'), time),
                'fontsize':
                30
            }

            ext_utils.create_asc_file(np.flip(inst_precip, 0),
                                      lats,
                                      lons,
                                      inst_file + '.asc',
                                      cell_size=cz)
            ascs.append(inst_file + '.asc')

            ext_utils.create_contour_plot(inst_precip,
                                          inst_file + '.png',
                                          lat_min,
                                          lon_min,
                                          lat_max,
                                          lon_max,
                                          title,
                                          clevs=clevs,
                                          cmap=cmap,
                                          basemap=basemap,
                                          norm=norm)
            pngs.append(inst_file + '.png')

            if i % 24 == 0:
                t = 'Daily rf from %s LK to %s LK' % (
                    (lk_ts -
                     dt.timedelta(hours=24)).strftime('%Y-%m-%d_%H:%M:%S'),
                    lk_ts.strftime('%Y-%m-%d_%H:%M:%S'))
                d = int(i / 24) - 1
                logging.info('Creating images for D%d' % d)
                cum_file = os.path.join(temp_dir, 'wrf_cum_%dd' % d)

                ext_utils.create_asc_file(np.flip(variables['PRECIP'][i], 0),
                                          lats,
                                          lons,
                                          cum_file + '.asc',
                                          cell_size=cz)
                ascs.append(cum_file + '.asc')

                ext_utils.create_contour_plot(variables['PRECIP'][i] -
                                              variables['PRECIP'][i - 24],
                                              cum_file + '.png',
                                              lat_min,
                                              lon_min,
                                              lat_max,
                                              lon_max,
                                              t,
                                              clevs=clevs,
                                              cmap=cmap,
                                              basemap=basemap,
                                              norm=norm_cum)
                pngs.append(inst_file + '.png')

                gif_file = os.path.join(temp_dir, 'wrf_inst_%dd' % d)
                images = [
                    os.path.join(
                        temp_dir,
                        'wrf_inst_' + i.strftime('%Y-%m-%d_%H:%M:%S') + '.png')
                    for i in np.arange(ts - dt.timedelta(hours=23), ts +
                                       dt.timedelta(
                                           hours=1), dt.timedelta(
                                               hours=1)).astype(dt.datetime)
                ]
                ext_utils.create_gif(images, gif_file + '.gif')

        logging.info('Creating the zips')
        utils.create_zip_with_prefix(temp_dir, '*.png',
                                     os.path.join(temp_dir, 'pngs.zip'))
        utils.create_zip_with_prefix(temp_dir, '*.asc',
                                     os.path.join(temp_dir, 'ascs.zip'))
        # utils.create_zipfile(pngs, os.path.join(temp_dir, 'pngs.zip'))
        # utils.create_zipfile(ascs, os.path.join(temp_dir, 'ascs.zip'))

        logging.info('Cleaning up instantaneous pngs and ascs - wrf_inst_*')
        utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.png')
        utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.asc')

        logging.info('Copying pngs to ' + d03_dir)
        utils.move_files_with_prefix(temp_dir, '*.png', d03_dir)
        logging.info('Copying ascs to ' + d03_dir)
        utils.move_files_with_prefix(temp_dir, '*.asc', d03_dir)
        logging.info('Copying gifs to ' + d03_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', d03_dir)
        logging.info('Copying zips to ' + d03_dir)
        utils.copy_files_with_prefix(temp_dir, '*.zip', d03_dir)

        d03_latest_dir = os.path.join(config.get('nfs_dir'), 'latest',
                                      os.path.basename(config.get('wrf_home')))
        # <nfs>/latest/wrf0 .. 3
        utils.create_dir_if_not_exists(d03_latest_dir)
        # todo: this needs to be adjusted to handle the multiple runs
        logging.info('Copying gifs to ' + d03_latest_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', d03_latest_dir)

        logging.info('Cleaning up temp dir')
        shutil.rmtree(temp_dir)
Esempio n. 12
0
    def pre_process(self, *args, **kwargs):
        logging.info('Running pre-processing for metgrid...')

        wrf_config = self.get_config(**kwargs)
        wps_dir = utils.get_wps_dir(wrf_config.get('wrf_home'))
        utils.delete_files_with_prefix(wps_dir, 'met_em*')
Esempio n. 13
0
def pre_metgrid(**kwargs):
    logging.info('Running preporcessing for geogrid...')

    wrf_config = get_wrf_config(**kwargs)
    wps_dir = utils.get_wps_dir(wrf_config.get('wrf_home'))
    utils.delete_files_with_prefix(wps_dir, 'met_em*')