コード例 #1
0
    def test_all(self):
        logging.basicConfig(
            level=logging.DEBUG,
            format=
            '%(asctime)s %(threadName)s %(module)s %(levelname)s %(message)s')

        wrf_home = tempfile.mkdtemp(prefix='wrf_test_')
        nfs_dir = os.path.join(wrf_home, 'nfs')

        run_id = 'WRF_test0'

        output_dir = os.path.join(nfs_dir, 'results', run_id, 'wrf')
        utils.create_dir_if_not_exists(output_dir)
        shutil.copy(
            res_mgr.get_resource_path('test/wrfout_d03_2017-10-02_12:00:00'),
            output_dir)

        wrf_conf_dict = {
            "wrf_home": wrf_home,
            "nfs_dir": nfs_dir,
            "period": 0.25,
            "start_date": "2017-10-02_12:00"
        }

        db_conf_dict = {
            "host": "localhost",
            "user": "******",
            "password": "******",
            "db": "testdb"
        }

        extract_data_wrf.run(run_id, wrf_conf_dict, db_conf_dict, upsert=True)
コード例 #2
0
def make_plots(v, out_dir, basemap, start=0, end=-1):
    start_rf = v['PRECIP'][0]
    wrfutils.create_dir_if_not_exists(out_dir)
    for j in range(start, end if end > 0 else len(v['PRECIP'])):
        out = out_dir + '/' + v['Times'][j] + 'cum.png'
        if j != 0:
            utils.create_contour_plot(v['PRECIP'][j] - start_rf,
                                      out,
                                      lat_min,
                                      lon_min,
                                      lat_max,
                                      lon_max,
                                      out,
                                      basemap=basemap,
                                      clevs=clevs,
                                      cmap=plt.get_cmap('jet'),
                                      overwrite=True,
                                      norm=norm)
        else:
            utils.create_contour_plot(v['PRECIP'][j],
                                      out,
                                      lat_min,
                                      lon_min,
                                      lat_max,
                                      lon_max,
                                      out,
                                      basemap=basemap,
                                      clevs=clevs,
                                      cmap=plt.get_cmap('jet'),
                                      overwrite=True,
                                      norm=norm)
コード例 #3
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def extract_jaxa_weather_stations(nc_f, weather_stations_file, output_dir):
    nc_fid = Dataset(nc_f, 'r')

    stations = pd.read_csv(weather_stations_file, header=0, sep=',')

    output_file_dir = os.path.join(output_dir, 'jaxa-stations-wrf-forecast')
    utils.create_dir_if_not_exists(output_file_dir)

    for idx, station in stations.iterrows():
        logging.info('Extracting station ' + str(station))

        rf, times = extract_point_rf_series(nc_f, station[2], station[1])

        output_file_path = os.path.join(output_file_dir,
                                        station[3] + '-' + str(station[0]) + '-' + times[0].split('_')[0] + '.txt')
        output_file = open(output_file_path, 'w')
        output_file.write('jaxa-stations-wrf-forecast\n')
        output_file.write(', '.join(stations.columns.values) + '\n')
        output_file.write(', '.join(str(x) for x in station) + '\n')
        output_file.write('timestamp, rainfall\n')
        for i in range(len(times)):
            output_file.write('%s, %f\n' % (times[i], rf[i]))
        output_file.close()

    nc_fid.close()
コード例 #4
0
    def post_process(self, *args, **kwargs):
        # make a sym link in the nfs dir
        wrf_config = self.get_config(**kwargs)
        wps_dir = utils.get_wps_dir(wrf_config.get('wrf_home'))

        nfs_metgrid_dir = os.path.join(wrf_config.get('nfs_dir'), 'metgrid')

        utils.create_dir_if_not_exists(nfs_metgrid_dir)
        # utils.delete_files_with_prefix(nfs_metgrid_dir, 'met_em.d*')
        # utils.create_symlink_with_prefix(wps_dir, 'met_em.d*', nfs_metgrid_dir)

        utils.create_zip_with_prefix(wps_dir, 'met_em.d*',
                                     os.path.join(wps_dir, 'metgrid.zip'))

        utils.delete_files_with_prefix(nfs_metgrid_dir, 'met_em.d*')
        utils.move_files_with_prefix(wps_dir, 'metgrid.zip', nfs_metgrid_dir)
コード例 #5
0
    def test_extract_kelani_basin_rainfall_flo2d_obs_150m(self):
        adapter = ext_utils.get_curw_adapter(mysql_config_path='/home/curw/Desktop/2018-05/mysql.json')
        wrf_output_dir = tempfile.mkdtemp(prefix='flo2d_obs_')
        files = ['wrfout_d03_2018-05-23_18:00:00_rf']
        run_prefix = 'wrf0'

        for f in files:
            out_dir = utils.create_dir_if_not_exists(
                os.path.join(wrf_output_dir, f.replace('wrfout_d03', run_prefix).replace(':00_rf', '_0000'), 'wrf'))
            shutil.copy2('/home/curw/Desktop/2018-05/2018-05-23_18:00/wrf0/%s' % f, out_dir)

        run_date = dt.datetime.strptime('2018-05-23_18:00', '%Y-%m-%d_%H:%M')
        now = '_'.join([run_prefix, run_date.strftime('%Y-%m-%d_%H:%M'), '*'])

        d03_nc_f = glob.glob(os.path.join(wrf_output_dir, now, 'wrf', 'wrfout_d03_*'))[0]

        obs_stations = {'Kottawa North Dharmapala School': [79.95818, 6.865576, 'A&T Labs', 'wrf_79.957123_6.859688'],
                        'IBATTARA2': [79.919, 6.908, 'CUrW IoT', 'wrf_79.902664_6.913757'],
                        'Malabe': [79.95738, 6.90396, 'A&T Labs', 'wrf_79.957123_6.913757'],
                        # 'Mutwal': [79.8609, 6.95871, 'A&T Labs', 'wrf_79.875435_6.967812'],
                        'Glencourse': [80.20305, 6.97805, 'Irrigation Department', 'wrf_80.202187_6.967812'],
                        # 'Waga': [80.11828, 6.90678, 'A&T Labs', 'wrf_80.120499_6.913757'],
                        }

        start_ts = '2018-05-26_00:00'
        kelani_lower_basin_points = res_mgr.get_resource_path('extraction/local/klb_glecourse_points_150m.txt')
        kelani_lower_basin_shp = res_mgr.get_resource_path('extraction/shp/klb_glencourse/klb_glencourse.shp')
        duration_days = (8, 0)
        extract_kelani_basin_rainfall_flo2d_with_obs(d03_nc_f, adapter, obs_stations,
                                                     os.path.join(wrf_output_dir, now, 'klb_flo2d'), start_ts,
                                                     duration_days=duration_days,
                                                     kelani_lower_basin_shp=kelani_lower_basin_shp,
                                                     kelani_lower_basin_points=kelani_lower_basin_points,
                                                     output_prefix='RAINCELL_150m')
コード例 #6
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def create_rainfall_for_mike21(d0_rf_file, prev_rf_files, output_dir):
    d0 = np.genfromtxt(d0_rf_file, dtype=str)

    t0 = dt.datetime.strptime(' '.join(d0[0][:-1]), '%Y-%m-%d %H:%M:%S')
    t1 = dt.datetime.strptime(' '.join(d0[1][:-1]), '%Y-%m-%d %H:%M:%S')

    res_min = int((t1 - t0).total_seconds() / 60)
    lines_per_day = int(24 * 60 / res_min)
    prev_days = len(prev_rf_files)

    output = None
    for i in range(prev_days):
        if prev_rf_files[prev_days - 1 - i] is not None:
            if output is not None:
                output = np.append(output,
                                   np.genfromtxt(prev_rf_files[prev_days - 1 - i], dtype=str, max_rows=lines_per_day),
                                   axis=0)
            else:
                output = np.genfromtxt(prev_rf_files[prev_days - 1 - i], dtype=str, max_rows=lines_per_day)
        else:
            output = None  # if any of the previous files are missing, skip prepending past data to the forecast
            break

    if output is not None:
        output = np.append(output, d0, axis=0)
    else:
        output = d0

    out_file = os.path.join(utils.create_dir_if_not_exists(output_dir), 'rf_mike21.txt')
    with open(out_file, 'w') as out_f:
        for line in output:
            out_f.write('%s %s\t%s\n' % (line[0], line[1], line[2]))
コード例 #7
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
    def test_create_rainfall_for_mike21(self):
        wrf_output_dir = tempfile.mkdtemp(prefix='mike21_')
        run_date = dt.datetime.strptime('2017-12-11_18:00', '%Y-%m-%d_%H:%M')

        basin_shp_file = res_mgr.get_resource_path('extraction/shp/klb-wgs84/klb-wgs84.shp')
        files = ['wrfout_d03_2017-12-09_18:00:00_rf', 'wrfout_d03_2017-12-10_18:00:00_rf',
                 'wrfout_d03_2017-12-11_18:00:00_rf']
        run_prefix = 'wrf0'

        for f in files:
            d03_nc_f = res_mgr.get_resource_path('test/%s' % f)
            out_dir = utils.create_dir_if_not_exists(
                os.path.join(wrf_output_dir, f.replace('wrfout_d03', run_prefix).replace(':00_rf', '_0000')))
            extract_mean_rainfall_from_shp_file(d03_nc_f, out_dir, 'klb_mean_rf', 'klb_mean',
                                                basin_shp_file, constants.KELANI_LOWER_BASIN_EXTENT)

        now = '_'.join([run_prefix, run_date.strftime('%Y-%m-%d_%H:%M'), '*'])
        prev_1 = '_'.join([run_prefix, (run_date - dt.timedelta(days=1)).strftime('%Y-%m-%d_%H:%M'), '*'])
        prev_2 = '_'.join([run_prefix, (run_date - dt.timedelta(days=2)).strftime('%Y-%m-%d_%H:%M'), '*'])
        d03_nc_f = glob.glob(os.path.join(wrf_output_dir, now, 'klb_mean_rf', 'klb_mean_rf.txt'))[0]
        d03_nc_f_prev_1 = glob.glob(os.path.join(wrf_output_dir, prev_1, 'klb_mean_rf', 'klb_mean_rf.txt'))[0]
        d03_nc_f_prev_2 = glob.glob(os.path.join(wrf_output_dir, prev_2, 'klb_mean_rf', 'klb_mean_rf.txt'))[0]

        create_rainfall_for_mike21(d03_nc_f, [d03_nc_f_prev_1, d03_nc_f_prev_2],
                                   os.path.join(wrf_output_dir, now, 'mike_21'))
コード例 #8
0
ファイル: executor.py プロジェクト: hasithadkr7/wrf_docker
def test_download_gfs_data():
    wrf_home = '/tmp/wrf'
    gfs_dir = wrf_home + '/gfs'
    utils.create_dir_if_not_exists(wrf_home)
    utils.create_dir_if_not_exists(gfs_dir)
    conf = get_wrf_config(wrf_home,
                          start_date='2017-08-27_00:00',
                          gfs_dir=gfs_dir,
                          period=0.25)

    gfs_date, start_inv = download_gfs_data(conf)
    logging.info('gfs date %s and start inventory %s' % (gfs_date, start_inv))

    files = os.listdir(gfs_dir)

    assert len(files) == int(
        24 * conf.get('period') / conf.get('gfs_step')) + 1
コード例 #9
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def extract_metro_col_rf_for_mike21(nc_f, output_dir, prev_rf_files=None, points_file=None):
    if not prev_rf_files:
        prev_rf_files = []

    if not points_file:
        points_file = res_mgr.get_resource_path('extraction/local/metro_col_sub_catch_centroids.txt')
    points = np.genfromtxt(points_file, delimiter=',', names=True, dtype=None)

    point_prcp = ext_utils.extract_points_array_rf_series(nc_f, points)

    t0 = dt.datetime.strptime(point_prcp['Times'][0], '%Y-%m-%d %H:%M:%S')
    t1 = dt.datetime.strptime(point_prcp['Times'][1], '%Y-%m-%d %H:%M:%S')

    res_min = int((t1 - t0).total_seconds() / 60)
    lines_per_day = int(24 * 60 / res_min)
    prev_days = len(prev_rf_files)

    output = None
    for i in range(prev_days):
        if prev_rf_files[prev_days - 1 - i] is not None:
            if output is not None:
                output = np.append(output,
                                   ext_utils.extract_points_array_rf_series(prev_rf_files[prev_days - 1 - i], points)[
                                   :lines_per_day], axis=0)
            else:
                output = ext_utils.extract_points_array_rf_series(prev_rf_files[prev_days - 1 - i], points)[
                         :lines_per_day]
        else:
            output = None  # if any of the previous files are missing, skip prepending past data to the forecast
            break

    if output is not None:
        output = np.append(output, point_prcp, axis=0)
    else:
        output = point_prcp

    fmt = '%s'
    for _ in range(len(output[0]) - 1):
        fmt = fmt + ',%g'
    header = ','.join(output.dtype.names)

    utils.create_dir_if_not_exists(output_dir)
    np.savetxt(os.path.join(output_dir, 'met_col_rf_mike21.txt'), output, fmt=fmt, delimiter=',', header=header,
               comments='', encoding='utf-8')
コード例 #10
0
    def post_process(self, *args, **kwargs):
        config = self.get_config(**kwargs)
        wrf_home = config.get('wrf_home')
        em_real_dir = utils.get_em_real_dir(wrf_home)
        start_date = config.get('start_date')

        logging.info('Moving the WRF logs')
        utils.move_files_with_prefix(
            em_real_dir, 'rsl*',
            os.path.join(utils.get_logs_dir(wrf_home),
                         'rsl-wrf-%s' % start_date))

        logging.info('Moving the WRF files to output directory')
        # move the d03 to nfs
        # ex: /mnt/disks/wrf-mod/nfs/output/wrf0/2017-08-13_00:00/0 .. n
        d03_dir = utils.get_incremented_dir_path(
            os.path.join(config.get('nfs_dir'), 'output',
                         os.path.basename(wrf_home), start_date, '0'))
        self.add_config_item('wrf_output_dir', d03_dir)

        d03_file = os.path.join(em_real_dir,
                                'wrfout_d03_' + start_date + ':00')
        ext_utils.ncks_extract_variables(
            d03_file, ['RAINC', 'RAINNC', 'XLAT', 'XLONG', 'Times'],
            d03_file + '_SL')

        d01_file = os.path.join(em_real_dir,
                                'wrfout_d01_' + start_date + ':00')
        ext_utils.ncks_extract_variables(
            d01_file, ['RAINC', 'RAINNC', 'XLAT', 'XLONG', 'Times'],
            d01_file + '_SL')

        # move the wrfout_SL and the namelist files to the nfs
        utils.create_dir_if_not_exists(d03_dir)
        shutil.move(d03_file + '_SL', d03_dir)
        shutil.move(d01_file + '_SL', d03_dir)
        shutil.copy2(os.path.join(em_real_dir, 'namelist.input'), d03_dir)

        # move the rest to the OUTPUT dir of each run
        # todo: in the docker impl - FIND A BETTER WAY
        archive_dir = utils.get_incremented_dir_path(
            os.path.join(utils.get_output_dir(wrf_home), start_date))
        utils.move_files_with_prefix(em_real_dir, 'wrfout_d*', archive_dir)
コード例 #11
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def extract_mean_rainfall_from_shp_file(nc_f, wrf_output, output_prefix, output_name, basin_shp_file, basin_extent,
                                        curw_db_adapter=None, curw_db_upsert=False, run_prefix='WRF',
                                        run_name='Cloud-1'):
    lon_min, lat_min, lon_max, lat_max = basin_extent

    nc_vars = ext_utils.extract_variables(nc_f, ['RAINC', 'RAINNC'], lat_min, lat_max, lon_min, lon_max)
    lats = nc_vars['XLAT']
    lons = nc_vars['XLONG']
    prcp = nc_vars['RAINC'] + nc_vars['RAINNC']
    times = nc_vars['Times']

    diff = ext_utils.get_two_element_average(prcp)

    polys = shapefile.Reader(basin_shp_file)

    output_dir = utils.create_dir_if_not_exists(os.path.join(wrf_output, output_prefix))

    with TemporaryDirectory(prefix=output_prefix) as temp_dir:
        output_file_path = os.path.join(temp_dir, output_prefix + '.txt')
        kub_rf = {}
        with open(output_file_path, 'w') as output_file:
            kub_rf[output_name] = []
            for t in range(0, len(times) - 1):
                cnt = 0
                rf_sum = 0.0
                for y in range(0, len(lats)):
                    for x in range(0, len(lons)):
                        if utils.is_inside_polygon(polys, lats[y], lons[x]):
                            cnt = cnt + 1
                            rf_sum = rf_sum + diff[t, y, x]
                mean_rf = rf_sum / cnt

                t_str = (
                    utils.datetime_utc_to_lk(dt.datetime.strptime(times[t], '%Y-%m-%d_%H:%M:%S'),
                                             shift_mins=30)).strftime('%Y-%m-%d %H:%M:%S')
                output_file.write('%s\t%.4f\n' % (t_str, mean_rf))
                kub_rf[output_name].append([t_str, mean_rf])

        utils.move_files_with_prefix(temp_dir, '*.txt', output_dir)

    if curw_db_adapter is not None:
        station = [Station.CUrW, output_name, output_name, -999, -999, 0, 'Kelani upper basin mean rainfall']
        if ext_utils.create_station_if_not_exists(curw_db_adapter, station):
            logging.info('%s station created' % output_name)

        logging.info('Pushing data to the db...')
        ext_utils.push_rainfall_to_db(curw_db_adapter, kub_rf, upsert=curw_db_upsert, name=run_name,
                                      source=run_prefix)
    else:
        logging.info('curw_db_adapter not available. Unable to push data!')
コード例 #12
0
    def test_create_rainfall_for_mike21_obs(self):
        adapter = ext_utils.get_curw_adapter()
        wrf_output_dir = tempfile.mkdtemp(prefix='mike21_obs_')

        out_dir = utils.create_dir_if_not_exists(os.path.join(wrf_output_dir, 'klb_mean_rf'))
        shutil.copy2('/home/curw/Desktop/2018-05/klb_mean_rf/klb_mean_rf.txt', out_dir)

        d0_mean_rf = os.path.join(out_dir, 'klb_mean_rf.txt')

        obs_stations = {'Kottawa North Dharmapala School': [79.95818, 6.865576, 'A&T Labs'],
                        'IBATTARA2': [79.919, 6.908, 'CUrW IoT'],
                        'Malabe': [79.95738, 6.90396, 'A&T Labs'],
                        'Mutwal': [79.8609, 6.95871, 'A&T Labs']}

        start_ts = '2018-05-21_00:00'
        create_rainfall_for_mike21_obs(d0_mean_rf, adapter, obs_stations, out_dir, start_ts, )
コード例 #13
0
def create_rainfall_for_mike21_obs(d0_rf_file, adapter, obs_stations, output_dir, start_ts, duration_days=None,
                                   kelani_lower_basin_shp=None):
    if kelani_lower_basin_shp is None:
        kelani_lower_basin_shp = res_mgr.get_resource_path('extraction/shp/klb-wgs84/klb-wgs84.shp')

    if duration_days is None:
        duration_days = (2, 3)

    obs_start = dt.datetime.strptime(start_ts, '%Y-%m-%d_%H:%M') - dt.timedelta(days=duration_days[0])
    obs_end = dt.datetime.strptime(start_ts, '%Y-%m-%d_%H:%M')
    # forecast_end = dt.datetime.strptime(start_ts, '%Y-%m-%d_%H:%M') + dt.timedelta(days=duration_days[1])

    obs = _get_observed_precip(obs_stations, obs_start, obs_end, duration_days, adapter)

    thess_poly = spatial_utils.get_voronoi_polygons(obs_stations, kelani_lower_basin_shp, add_total_area=False)

    observed = None
    for i, _id in enumerate(thess_poly['id']):
        if observed is not None:
            observed = observed + obs[_id].astype(float) * thess_poly['area'][i]
        else:
            observed = obs[_id].astype(float) * thess_poly['area'][i]
    observed = observed / sum(thess_poly['area'])

    d0 = np.genfromtxt(d0_rf_file, dtype=str)
    t0 = dt.datetime.strptime(' '.join(d0[0][:-1]), '%Y-%m-%d %H:%M:%S')
    t1 = dt.datetime.strptime(' '.join(d0[1][:-1]), '%Y-%m-%d %H:%M:%S')

    res_min = int((t1 - t0).total_seconds() / 60)

    # prev_output = np.append(prev_output, d0, axis=0)
    out_file = os.path.join(utils.create_dir_if_not_exists(output_dir), 'rf_mike21_obs.txt')

    with open(out_file, 'w') as out_f:
        for index in observed.index:
            out_f.write('%s:00\t%.4f\n' % (index, observed.precip[index]))

        forecast_start_idx = int(
            np.where((d0[:, 0] == obs_end.strftime('%Y-%m-%d')) & (d0[:, 1] == obs_end.strftime('%H:%M:%S')))[0])
        # note: no need to convert to utc as rf_mike21.txt has times in LK

        for i in range(forecast_start_idx + 1, int(24 * 60 * duration_days[1] / res_min)):
            if i < len(d0):
                out_f.write('%s %s\t%s\n' % (d0[i][0], d0[i][1], d0[i][2]))
            else:
                out_f.write('%s\t0.0\n' % (obs_end + dt.timedelta(hours=i - forecast_start_idx - 1)).strftime(
                    '%Y-%m-%d %H:%M:%S'))
コード例 #14
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def extract_weather_stations(nc_f, wrf_output, weather_stations=None, curw_db_adapter=None, curw_db_upsert=False,
                             run_prefix='WRF', run_name='Cloud-1'):
    if weather_stations is None:
        weather_stations = res_mgr.get_resource_path('extraction/local/kelani_basin_stations.txt')

    nc_fid = Dataset(nc_f, 'r')
    times_len, times = ext_utils.extract_time_data(nc_f)

    prefix = 'stations_rf'
    stations_dir = utils.create_dir_if_not_exists(os.path.join(wrf_output, prefix))

    stations_rf = {}
    with TemporaryDirectory(prefix=prefix) as temp_dir:
        with open(weather_stations, 'r') as csvfile:
            stations = csv.reader(csvfile, delimiter=' ')

            for row in stations:
                logging.info(' '.join(row))
                lon = row[1]
                lat = row[2]

                station_prcp = nc_fid.variables['RAINC'][:, lat, lon] + nc_fid.variables['RAINNC'][:, lat, lon]

                station_diff = ext_utils.get_two_element_average(station_prcp)

                stations_rf[row[0]] = []

                station_file_path = os.path.join(temp_dir, row[0] + '_%s.txt' % prefix)
                with open(station_file_path, 'w') as station_file:
                    for t in range(0, len(times) - 1):
                        t_str = (
                            utils.datetime_utc_to_lk(dt.datetime.strptime(times[t], '%Y-%m-%d_%H:%M:%S'),
                                                     shift_mins=30)).strftime('%Y-%m-%d %H:%M:%S')
                        station_file.write('%s\t%.4f\n' % (t_str, station_diff[t]))
                        stations_rf[row[0]].append([t_str, station_diff[t]])

        utils.move_files_with_prefix(temp_dir, '*.txt', stations_dir)

    if curw_db_adapter is not None:
        logging.info('Pushing data to the db...')
        ext_utils.push_rainfall_to_db(curw_db_adapter, stations_rf, upsert=curw_db_upsert, name=run_name,
                                      source=run_prefix)
    else:
        logging.info('curw_db_adapter not available. Unable to push data!')

    nc_fid.close()
コード例 #15
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def extract_weather_stations2(nc_f, wrf_output, weather_stations=None, curw_db_adapter=None, curw_db_upsert=False,
                              run_prefix='WRF', run_name='Cloud-1'):
    if weather_stations is None:
        weather_stations = res_mgr.get_resource_path('extraction/local/wrf_stations.txt')

    points = np.genfromtxt(weather_stations, delimiter=',', names=True, dtype=None)

    point_prcp = ext_utils.extract_points_array_rf_series(nc_f, points)

    t0 = dt.datetime.strptime(point_prcp['Times'][0], '%Y-%m-%d %H:%M:%S')
    t1 = dt.datetime.strptime(point_prcp['Times'][1], '%Y-%m-%d %H:%M:%S')

    res_min = int((t1 - t0).total_seconds() / 60)

    prefix = 'stations_rf'
    stations_dir = utils.create_dir_if_not_exists(os.path.join(wrf_output, prefix))

    stations_rf = {}
    with TemporaryDirectory(prefix=prefix) as temp_dir:
        for point in points:
            logging.info(str(point))
            station_name = point[0].decode()
            stations_rf[station_name] = []

            station_file_path = os.path.join(temp_dir, station_name + '_%s.txt' % prefix)
            with open(station_file_path, 'w') as station_file:
                for t in range(0, len(point_prcp)):
                    station_file.write('%s\t%.4f\n' % (point_prcp['Times'][t], point_prcp[station_name][t]))
                    stations_rf[station_name].append([point_prcp['Times'][t], point_prcp[station_name][t]])

        utils.move_files_with_prefix(temp_dir, '*.txt', stations_dir)

    if curw_db_adapter is not None:
        logging.info('Pushing data to the db...')
        ext_utils.push_rainfall_to_db(curw_db_adapter, stations_rf, upsert=curw_db_upsert, name=run_name,
                                      source=run_prefix)
    else:
        logging.info('curw_db_adapter not available. Unable to push data!')
コード例 #16
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
    def test_extract_kelani_basin_rainfall_flo2d(self):
        wrf_output_dir = tempfile.mkdtemp(prefix='flo2d_')
        files = ['wrfout_d03_2017-12-09_18:00:00_rf', 'wrfout_d03_2017-12-10_18:00:00_rf',
                 'wrfout_d03_2017-12-11_18:00:00_rf']
        run_prefix = 'wrf0'

        for f in files:
            out_dir = utils.create_dir_if_not_exists(
                os.path.join(wrf_output_dir, f.replace('wrfout_d03', run_prefix).replace(':00_rf', '_0000'), 'wrf'))
            shutil.copy2(res_mgr.get_resource_path('test/%s' % f), out_dir)

        run_date = dt.datetime.strptime('2017-12-11_18:00', '%Y-%m-%d_%H:%M')
        now = '_'.join([run_prefix, run_date.strftime('%Y-%m-%d_%H:%M'), '*'])
        prev_1 = '_'.join([run_prefix, (run_date - dt.timedelta(days=1)).strftime('%Y-%m-%d_%H:%M'), '*'])
        prev_2 = '_'.join([run_prefix, (run_date - dt.timedelta(days=2)).strftime('%Y-%m-%d_%H:%M'), '*'])
        d03_nc_f = glob.glob(os.path.join(wrf_output_dir, now, 'wrf', 'wrfout_d03_*'))[0]
        d03_nc_f_prev_1 = glob.glob(os.path.join(wrf_output_dir, prev_1, 'wrf', 'wrfout_d03_*'))[0]
        d03_nc_f_prev_2 = glob.glob(os.path.join(wrf_output_dir, prev_2, 'wrf', 'wrfout_d03_*'))[0]

        kelani_basin_flo2d_file = res_mgr.get_resource_path('extraction/local/kelani_basin_points_250m.txt')
        extract_kelani_basin_rainfall_flo2d(d03_nc_f, [d03_nc_f_prev_1, d03_nc_f_prev_2],
                                            os.path.join(wrf_output_dir, now, 'klb_flo2d'),
                                            kelani_basin_file=kelani_basin_flo2d_file, )
コード例 #17
0
    def test_extract_kelani_basin_rainfall_flo2d_obs(self):
        mysql_conf_path = '/home/curw/Desktop/2018-05/mysql.json'
        adapter = ext_utils.get_curw_adapter(mysql_config_path=mysql_conf_path)
        wrf_output_dir = tempfile.mkdtemp(prefix='flo2d_obs_')
        files = ['wrfout_d03_2018-05-23_18:00:00_rf']
        run_prefix = 'wrf0'

        for f in files:
            out_dir = utils.create_dir_if_not_exists(
                os.path.join(wrf_output_dir, f.replace('wrfout_d03', run_prefix).replace(':00_rf', '_0000'), 'wrf'))
            shutil.copy2('/home/curw/Desktop/2018-05/2018-05-23_18:00/wrf0/%s' % f, out_dir)

        run_date = dt.datetime.strptime('2018-05-23_18:00', '%Y-%m-%d_%H:%M')
        start_ts_lk = '2018-05-26_00:00'
        now = '_'.join([run_prefix, run_date.strftime('%Y-%m-%d_%H:%M'), '*'])

        d03_nc_f = glob.glob(os.path.join(wrf_output_dir, now, 'wrf', 'wrfout_d03_*'))[0]

        obs_stations = {
            'Kottawa North Dharmapala School': [79.95818, 6.865576, 'A&T Labs', 'wrf_79.957123_6.859688'],
            'IBATTARA2': [79.919, 6.908, 'CUrW IoT', 'wrf_79.902664_6.913757'],
            'Malabe': [79.95738, 6.90396, 'A&T Labs', 'wrf_79.957123_6.913757'],
            # 'Mutwal': [79.8609, 6.95871, 'A&T Labs', 'wrf_79.875435_6.967812'],
            # 'Mulleriyawa': [79.941176, 6.923571, 'A&T Labs', 'wrf_79.929893_6.913757'],
            'Orugodawatta': [79.87887, 6.943741, 'CUrW IoT', 'wrf_79.875435_6.940788'],
        }

        duration_days = (8, 0)

        # kelani_lower_basin_points = res_mgr.get_resource_path('extraction/local/kelani_basin_points_30m.txt')
        kelani_lower_basin_points = None

        extract_kelani_basin_rainfall_flo2d_with_obs(d03_nc_f, adapter, obs_stations,
                                                     os.path.join(wrf_output_dir, now, 'klb_flo2d'), start_ts_lk,
                                                     kelani_lower_basin_points=kelani_lower_basin_points,
                                                     duration_days=duration_days)
コード例 #18
0
ファイル: executor.py プロジェクト: hasithadkr7/wrf_docker
def run_wps(wrf_config):
    logging.info('Running WPS: START')
    wrf_home = wrf_config.get('wrf_home')
    wps_dir = utils.get_wps_dir(wrf_home)
    output_dir = utils.create_dir_if_not_exists(
        os.path.join(wrf_config.get('nfs_dir'), 'results',
                     wrf_config.get('run_id'), 'wps'))

    logging.info('Backup the output dir')
    utils.backup_dir(output_dir)

    logs_dir = utils.create_dir_if_not_exists(os.path.join(output_dir, 'logs'))

    logging.info('Cleaning up files')
    utils.delete_files_with_prefix(wps_dir, 'FILE:*')
    utils.delete_files_with_prefix(wps_dir, 'PFILE:*')
    utils.delete_files_with_prefix(wps_dir, 'met_em*')

    # Linking VTable
    if not os.path.exists(os.path.join(wps_dir, 'Vtable')):
        logging.info('Creating Vtable symlink')
    os.symlink(os.path.join(wps_dir, 'ungrib/Variable_Tables/Vtable.NAM'),
               os.path.join(wps_dir, 'Vtable'))

    # Running link_grib.csh
    gfs_date, gfs_cycle, start = utils.get_appropriate_gfs_inventory(
        wrf_config)
    dest = utils.get_gfs_data_url_dest_tuple(wrf_config.get('gfs_url'),
                                             wrf_config.get('gfs_inv'),
                                             gfs_date, gfs_cycle, '',
                                             wrf_config.get('gfs_res'),
                                             '')[1].replace('.grb2', '')
    utils.run_subprocess('csh link_grib.csh %s/%s' %
                         (wrf_config.get('gfs_dir'), dest),
                         cwd=wps_dir)

    try:
        # Starting ungrib.exe
        try:
            utils.run_subprocess('./ungrib.exe', cwd=wps_dir)
        finally:
            utils.move_files_with_prefix(wps_dir, 'ungrib.log', logs_dir)

        # Starting geogrid.exe'
        if not check_geogrid_output(wps_dir):
            logging.info('Geogrid output not available')
            try:
                utils.run_subprocess('./geogrid.exe', cwd=wps_dir)
            finally:
                utils.move_files_with_prefix(wps_dir, 'geogrid.log', logs_dir)

        # Starting metgrid.exe'
        try:
            utils.run_subprocess('./metgrid.exe', cwd=wps_dir)
        finally:
            utils.move_files_with_prefix(wps_dir, 'metgrid.log', logs_dir)
    finally:
        logging.info('Moving namelist wps file')
        utils.move_files_with_prefix(wps_dir, 'namelist.wps', output_dir)

    logging.info('Running WPS: DONE')

    logging.info('Zipping metgrid data')
    metgrid_zip = os.path.join(wps_dir,
                               wrf_config.get('run_id') + '_metgrid.zip')
    utils.create_zip_with_prefix(wps_dir, 'met_em.d*', metgrid_zip)

    logging.info('Moving metgrid data')
    dest_dir = os.path.join(wrf_config.get('nfs_dir'), 'metgrid')
    utils.move_files_with_prefix(wps_dir, metgrid_zip, dest_dir)
コード例 #19
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def create_rf_plots_wrf(nc_f, plots_output_dir, plots_output_base_dir, lon_min=None, lat_min=None, lon_max=None,
                        lat_max=None, filter_threshold=0.05, run_prefix='WRF'):
    if not all([lon_min, lat_min, lon_max, lat_max]):
        lon_min, lat_min, lon_max, lat_max = constants.SRI_LANKA_EXTENT

    variables = ext_utils.extract_variables(nc_f, 'RAINC, RAINNC', lat_min, lat_max, lon_min, lon_max)

    lats = variables['XLAT']
    lons = variables['XLONG']

    # cell size is calc based on the mean between the lat and lon points
    cz = np.round(np.mean(np.append(lons[1:len(lons)] - lons[0: len(lons) - 1], lats[1:len(lats)]
                                    - lats[0: len(lats) - 1])), 3)
    clevs = [0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 250, 300, 400, 500, 600, 750]
    cmap = cm.s3pcpn

    basemap = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, urcrnrlon=lon_max,
                      urcrnrlat=lat_max, resolution='h')

    data = variables['RAINC'] + variables['RAINNC']
    logging.info('Filtering with the threshold %f' % filter_threshold)
    data[data < filter_threshold] = 0.0
    variables['PRECIP'] = data

    prefix = 'wrf_plots'
    with TemporaryDirectory(prefix=prefix) as temp_dir:
        t0 = dt.datetime.strptime(variables['Times'][0], '%Y-%m-%d_%H:%M:%S')
        t1 = dt.datetime.strptime(variables['Times'][1], '%Y-%m-%d_%H:%M:%S')
        step = (t1 - t0).total_seconds() / 3600.0

        inst_precip = ext_utils.get_two_element_average(variables['PRECIP'])
        cum_precip = ext_utils.get_two_element_average(variables['PRECIP'], return_diff=False)

        for i in range(1, len(variables['Times'])):
            time = variables['Times'][i]
            ts = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M:%S')
            lk_ts = utils.datetime_utc_to_lk(ts, shift_mins=30)
            logging.info('processing %s', time)

            # instantaneous precipitation (hourly)
            inst_file = os.path.join(temp_dir, 'wrf_inst_' + lk_ts.strftime('%Y-%m-%d_%H:%M:%S'))

            ext_utils.create_asc_file(np.flip(inst_precip[i - 1], 0), lats, lons, inst_file + '.asc', cell_size=cz)

            title = {
                'label': 'Hourly rf for %s LK' % lk_ts.strftime('%Y-%m-%d_%H:%M:%S'),
                'fontsize': 30
            }
            ext_utils.create_contour_plot(inst_precip[i - 1], inst_file + '.png', lat_min, lon_min, lat_max, lon_max,
                                          title, clevs=clevs, cmap=cmap, basemap=basemap)

            if (i * step) % 24 == 0:
                t = 'Daily rf from %s LK to %s LK' % (
                    (lk_ts - dt.timedelta(hours=24)).strftime('%Y-%m-%d_%H:%M:%S'), lk_ts.strftime('%Y-%m-%d_%H:%M:%S'))
                d = int(i * step / 24) - 1
                logging.info('Creating images for D%d' % d)
                cum_file = os.path.join(temp_dir, 'wrf_cum_%dd' % d)

                if i * step / 24 > 1:
                    cum_precip_24h = cum_precip[i - 1] - cum_precip[i - 1 - int(24 / step)]
                else:
                    cum_precip_24h = cum_precip[i - 1]

                ext_utils.create_asc_file(np.flip(cum_precip_24h, 0), lats, lons, cum_file + '.asc', cell_size=cz)

                ext_utils.create_contour_plot(cum_precip_24h, cum_file + '.png', lat_min, lon_min, lat_max, lon_max, t,
                                              clevs=clevs, cmap=cmap, basemap=basemap)

                gif_file = os.path.join(temp_dir, 'wrf_inst_%dd' % d)
                images = [os.path.join(temp_dir, 'wrf_inst_' + j.strftime('%Y-%m-%d_%H:%M:%S') + '.png') for j in
                          np.arange(lk_ts - dt.timedelta(hours=24 - step), lk_ts + dt.timedelta(hours=step),
                                    dt.timedelta(hours=step)).astype(dt.datetime)]
                ext_utils.create_gif(images, gif_file + '.gif')

        logging.info('Creating the zips')
        utils.create_zip_with_prefix(temp_dir, '*.png', os.path.join(temp_dir, 'pngs.zip'))
        utils.create_zip_with_prefix(temp_dir, '*.asc', os.path.join(temp_dir, 'ascs.zip'))

        logging.info('Cleaning up instantaneous pngs and ascs - wrf_inst_*')
        utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.png')
        utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.asc')

        logging.info('Copying pngs to ' + plots_output_dir)
        utils.move_files_with_prefix(temp_dir, '*.png', plots_output_dir)
        logging.info('Copying ascs to ' + plots_output_dir)
        utils.move_files_with_prefix(temp_dir, '*.asc', plots_output_dir)
        logging.info('Copying gifs to ' + plots_output_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', plots_output_dir)
        logging.info('Copying zips to ' + plots_output_dir)
        utils.copy_files_with_prefix(temp_dir, '*.zip', plots_output_dir)

        plots_latest_dir = os.path.join(plots_output_base_dir, 'latest', run_prefix, os.path.basename(plots_output_dir))
        # <nfs>/latest/wrf0 .. 3
        utils.create_dir_if_not_exists(plots_latest_dir)
        # todo: this needs to be adjusted to handle the multiple runs
        logging.info('Copying gifs to ' + plots_latest_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', plots_latest_dir)
コード例 #20
0
    def process(self, *args, **kwargs):
        config = self.get_config(**kwargs)
        logging.info('wrf conifg: ' + config.to_json_string())

        start_date = config.get('start_date')
        d03_dir = config.get('wrf_output_dir')
        d03_sl = os.path.join(d03_dir, 'wrfout_d03_' + start_date + ':00_SL')

        # create a temp work dir & get a local copy of the d03.._SL
        temp_dir = utils.create_dir_if_not_exists(
            os.path.join(config.get('wrf_home'), 'temp'))
        shutil.copy2(d03_sl, temp_dir)

        d03_sl = os.path.join(temp_dir, os.path.basename(d03_sl))

        lat_min = 5.722969
        lon_min = 79.52146
        lat_max = 10.06425
        lon_max = 82.18992

        variables = ext_utils.extract_variables(d03_sl, 'RAINC, RAINNC',
                                                lat_min, lat_max, lon_min,
                                                lon_max)

        lats = variables['XLAT']
        lons = variables['XLONG']

        # cell size is calc based on the mean between the lat and lon points
        cz = np.round(
            np.mean(
                np.append(lons[1:len(lons)] - lons[0:len(lons) - 1],
                          lats[1:len(lats)] - lats[0:len(lats) - 1])), 3)
        # clevs = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30])
        # clevs_cum = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30, 50, 75, 100])
        # norm = colors.BoundaryNorm(boundaries=clevs, ncolors=256)
        # norm_cum = colors.BoundaryNorm(boundaries=clevs_cum, ncolors=256)
        # cmap = plt.get_cmap('jet')

        clevs = [
            0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 250,
            300, 400, 500, 600, 750
        ]
        clevs_cum = clevs
        norm = None
        norm_cum = None
        cmap = cm.s3pcpn

        basemap = Basemap(projection='merc',
                          llcrnrlon=lon_min,
                          llcrnrlat=lat_min,
                          urcrnrlon=lon_max,
                          urcrnrlat=lat_max,
                          resolution='h')

        filter_threshold = 0.05
        data = variables['RAINC'] + variables['RAINNC']
        logging.info('Filtering with the threshold %f' % filter_threshold)
        data[data < filter_threshold] = 0.0
        variables['PRECIP'] = data

        pngs = []
        ascs = []

        for i in range(1, len(variables['Times'])):
            time = variables['Times'][i]
            ts = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M:%S')
            lk_ts = utils.datetime_utc_to_lk(ts)
            logging.info('processing %s', time)

            # instantaneous precipitation (hourly)
            inst_precip = variables['PRECIP'][i] - variables['PRECIP'][i - 1]

            inst_file = os.path.join(temp_dir, 'wrf_inst_' + time)
            title = {
                'label':
                'Hourly rf for %s LK\n%s UTC' %
                (lk_ts.strftime('%Y-%m-%d_%H:%M:%S'), time),
                'fontsize':
                30
            }

            ext_utils.create_asc_file(np.flip(inst_precip, 0),
                                      lats,
                                      lons,
                                      inst_file + '.asc',
                                      cell_size=cz)
            ascs.append(inst_file + '.asc')

            ext_utils.create_contour_plot(inst_precip,
                                          inst_file + '.png',
                                          lat_min,
                                          lon_min,
                                          lat_max,
                                          lon_max,
                                          title,
                                          clevs=clevs,
                                          cmap=cmap,
                                          basemap=basemap,
                                          norm=norm)
            pngs.append(inst_file + '.png')

            if i % 24 == 0:
                t = 'Daily rf from %s LK to %s LK' % (
                    (lk_ts -
                     dt.timedelta(hours=24)).strftime('%Y-%m-%d_%H:%M:%S'),
                    lk_ts.strftime('%Y-%m-%d_%H:%M:%S'))
                d = int(i / 24) - 1
                logging.info('Creating images for D%d' % d)
                cum_file = os.path.join(temp_dir, 'wrf_cum_%dd' % d)

                ext_utils.create_asc_file(np.flip(variables['PRECIP'][i], 0),
                                          lats,
                                          lons,
                                          cum_file + '.asc',
                                          cell_size=cz)
                ascs.append(cum_file + '.asc')

                ext_utils.create_contour_plot(variables['PRECIP'][i] -
                                              variables['PRECIP'][i - 24],
                                              cum_file + '.png',
                                              lat_min,
                                              lon_min,
                                              lat_max,
                                              lon_max,
                                              t,
                                              clevs=clevs,
                                              cmap=cmap,
                                              basemap=basemap,
                                              norm=norm_cum)
                pngs.append(inst_file + '.png')

                gif_file = os.path.join(temp_dir, 'wrf_inst_%dd' % d)
                images = [
                    os.path.join(
                        temp_dir,
                        'wrf_inst_' + i.strftime('%Y-%m-%d_%H:%M:%S') + '.png')
                    for i in np.arange(ts - dt.timedelta(hours=23), ts +
                                       dt.timedelta(
                                           hours=1), dt.timedelta(
                                               hours=1)).astype(dt.datetime)
                ]
                ext_utils.create_gif(images, gif_file + '.gif')

        logging.info('Creating the zips')
        utils.create_zip_with_prefix(temp_dir, '*.png',
                                     os.path.join(temp_dir, 'pngs.zip'))
        utils.create_zip_with_prefix(temp_dir, '*.asc',
                                     os.path.join(temp_dir, 'ascs.zip'))
        # utils.create_zipfile(pngs, os.path.join(temp_dir, 'pngs.zip'))
        # utils.create_zipfile(ascs, os.path.join(temp_dir, 'ascs.zip'))

        logging.info('Cleaning up instantaneous pngs and ascs - wrf_inst_*')
        utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.png')
        utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.asc')

        logging.info('Copying pngs to ' + d03_dir)
        utils.move_files_with_prefix(temp_dir, '*.png', d03_dir)
        logging.info('Copying ascs to ' + d03_dir)
        utils.move_files_with_prefix(temp_dir, '*.asc', d03_dir)
        logging.info('Copying gifs to ' + d03_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', d03_dir)
        logging.info('Copying zips to ' + d03_dir)
        utils.copy_files_with_prefix(temp_dir, '*.zip', d03_dir)

        d03_latest_dir = os.path.join(config.get('nfs_dir'), 'latest',
                                      os.path.basename(config.get('wrf_home')))
        # <nfs>/latest/wrf0 .. 3
        utils.create_dir_if_not_exists(d03_latest_dir)
        # todo: this needs to be adjusted to handle the multiple runs
        logging.info('Copying gifs to ' + d03_latest_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', d03_latest_dir)

        logging.info('Cleaning up temp dir')
        shutil.rmtree(temp_dir)
コード例 #21
0
    def process(self, *args, **kwargs):
        config = self.get_config(**kwargs)
        logging.info('wrf conifg: ' + config.to_json_string())

        start_date = config.get('start_date')
        d03_dir = config.get('wrf_output_dir')
        d03_sl = os.path.join(d03_dir, 'wrfout_d01_' + start_date + ':00_SL')

        # create a temp work dir & get a local copy of the d03.._SL
        temp_dir = utils.create_dir_if_not_exists(
            os.path.join(config.get('wrf_home'), 'temp_d01'))
        shutil.copy2(d03_sl, temp_dir)

        d03_sl = os.path.join(temp_dir, os.path.basename(d03_sl))

        lat_min = -3.06107
        lon_min = 71.2166
        lat_max = 18.1895
        lon_max = 90.3315

        variables = ext_utils.extract_variables(d03_sl, 'RAINC, RAINNC',
                                                lat_min, lat_max, lon_min,
                                                lon_max)

        lats = variables['XLAT']
        lons = variables['XLONG']

        # cell size is calc based on the mean between the lat and lon points
        cz = np.round(
            np.mean(
                np.append(lons[1:len(lons)] - lons[0:len(lons) - 1],
                          lats[1:len(lats)] - lats[0:len(lats) - 1])), 3)
        # clevs = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30])
        # clevs_cum = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30, 50, 75, 100])
        # norm = colors.BoundaryNorm(boundaries=clevs, ncolors=256)
        # norm_cum = colors.BoundaryNorm(boundaries=clevs_cum, ncolors=256)
        # cmap = plt.get_cmap('jet')

        clevs = [
            0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 250,
            300, 400, 500, 600, 750
        ]
        clevs_cum = clevs
        norm = None
        cmap = cm.s3pcpn

        basemap = Basemap(projection='merc',
                          llcrnrlon=lon_min,
                          llcrnrlat=lat_min,
                          urcrnrlon=lon_max,
                          urcrnrlat=lat_max,
                          resolution='h')

        filter_threshold = 0.05
        data = variables['RAINC'] + variables['RAINNC']
        logging.info('Filtering with the threshold %f' % filter_threshold)
        data[data < filter_threshold] = 0.0
        variables['PRECIP'] = data

        for i in range(1, len(variables['Times'])):
            time = variables['Times'][i]
            ts = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M:%S')
            lk_ts = utils.datetime_utc_to_lk(ts)
            logging.info('processing %s', time)

            # instantaneous precipitation (hourly)
            inst_precip = variables['PRECIP'][i] - variables['PRECIP'][i - 1]

            inst_file = os.path.join(temp_dir, 'wrf_inst_' + time)
            title = {
                'label':
                '3Hourly rf for %s LK\n%s UTC' %
                (lk_ts.strftime('%Y-%m-%d_%H:%M:%S'), time),
                'fontsize':
                30
            }
            ext_utils.create_contour_plot(inst_precip,
                                          inst_file + '.png',
                                          lat_min,
                                          lon_min,
                                          lat_max,
                                          lon_max,
                                          title,
                                          clevs=clevs,
                                          cmap=cmap,
                                          basemap=basemap,
                                          norm=norm)

            if i % 8 == 0:
                d = int(i / 8) - 1
                logging.info('Creating gif for D%d' % d)
                gif_file = os.path.join(temp_dir, 'wrf_inst_D01_%dd' % d)
                images = [
                    os.path.join(
                        temp_dir,
                        'wrf_inst_' + i.strftime('%Y-%m-%d_%H:%M:%S') + '.png')
                    for i in np.arange(ts - dt.timedelta(hours=24 - 3), ts +
                                       dt.timedelta(
                                           hours=3), dt.timedelta(
                                               hours=3)).astype(dt.datetime)
                ]
                ext_utils.create_gif(images, gif_file + '.gif')

        # move all the data in the tmp dir to the nfs
        logging.info('Copying gifs to ' + d03_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', d03_dir)

        d03_latest_dir = os.path.join(config.get('nfs_dir'), 'latest',
                                      os.path.basename(config.get('wrf_home')))
        # <nfs>/latest/wrf0 .. 3
        utils.create_dir_if_not_exists(d03_latest_dir)
        # todo: this needs to be adjusted to handle the multiple runs
        logging.info('Copying gifs to ' + d03_latest_dir)
        utils.copy_files_with_prefix(temp_dir, '*.gif', d03_latest_dir)

        logging.info('Cleaning up the dir ' + temp_dir)
        shutil.rmtree(temp_dir)
コード例 #22
0
def extract_kelani_basin_rainfall_flo2d_with_obs(nc_f, adapter, obs_stations, output_dir, start_ts_lk,
                                                 duration_days=None, output_prefix='RAINCELL',
                                                 kelani_lower_basin_points=None, kelani_lower_basin_shp=None):
    """
    check test_extract_kelani_basin_rainfall_flo2d_obs test case
    :param nc_f: file path of the wrf output
    :param adapter:
    :param obs_stations: dict of stations. {station_name: [lon, lat, name variable, nearest wrf point station name]}
    :param output_dir:
    :param start_ts_lk: start time of the forecast/ end time of the observations
    :param duration_days: (optional) a tuple (observation days, forecast days) default (2,3)
    :param output_prefix: (optional) output file name of the RAINCELL file. ex: output_prefix=RAINCELL-150m --> RAINCELL-150m.DAT
    :param kelani_lower_basin_points: (optional)
    :param kelani_lower_basin_shp: (optional)
    :return:
    """
    if duration_days is None:
        duration_days = (2, 3)

    if kelani_lower_basin_points is None:
        kelani_lower_basin_points = res_mgr.get_resource_path('extraction/local/kelani_basin_points_250m.txt')

    if kelani_lower_basin_shp is None:
        kelani_lower_basin_shp = res_mgr.get_resource_path('extraction/shp/klb-wgs84/klb-wgs84.shp')

    points = np.genfromtxt(kelani_lower_basin_points, delimiter=',')

    kel_lon_min = np.min(points, 0)[1]
    kel_lat_min = np.min(points, 0)[2]
    kel_lon_max = np.max(points, 0)[1]
    kel_lat_max = np.max(points, 0)[2]

    diff, kel_lats, kel_lons, times = ext_utils.extract_area_rf_series(nc_f, kel_lat_min, kel_lat_max, kel_lon_min,
                                                                       kel_lon_max)

    def get_bins(arr):
        sz = len(arr)
        return (arr[1:sz - 1] + arr[0:sz - 2]) / 2

    lat_bins = get_bins(kel_lats)
    lon_bins = get_bins(kel_lons)

    t0 = dt.datetime.strptime(times[0], '%Y-%m-%d_%H:%M:%S')
    t1 = dt.datetime.strptime(times[1], '%Y-%m-%d_%H:%M:%S')

    utils.create_dir_if_not_exists(output_dir)

    obs_start = dt.datetime.strptime(start_ts_lk, '%Y-%m-%d_%H:%M') - dt.timedelta(days=duration_days[0])
    obs_end = dt.datetime.strptime(start_ts_lk, '%Y-%m-%d_%H:%M')
    forecast_end = dt.datetime.strptime(start_ts_lk, '%Y-%m-%d_%H:%M') + dt.timedelta(days=duration_days[1])

    obs = _get_observed_precip(obs_stations, obs_start, obs_end, duration_days, adapter)

    thess_poly = spatial_utils.get_voronoi_polygons(obs_stations, kelani_lower_basin_shp, add_total_area=False)

    output_file_path = os.path.join(output_dir, output_prefix + '.DAT')

    # update points array with the thessian polygon idx
    point_thess_idx = []
    for point in points:
        point_thess_idx.append(spatial_utils.is_inside_geo_df(thess_poly, lon=point[1], lat=point[2]))
        pass

    with open(output_file_path, 'w') as output_file:
        res_mins = int((t1 - t0).total_seconds() / 60)
        data_hours = int(sum(duration_days) * 24 * 60 / res_mins)
        start_ts_lk = obs_start.strftime('%Y-%m-%d %H:%M:%S')
        end_ts = forecast_end.strftime('%Y-%m-%d %H:%M:%S')

        output_file.write("%d %d %s %s\n" % (res_mins, data_hours, start_ts_lk, end_ts))

        for t in range(int(24 * 60 * duration_days[0] / res_mins) + 1):
            for i, point in enumerate(points):
                rf = float(obs[point_thess_idx[i]].values[t]) if point_thess_idx[i] is not None else 0
                output_file.write('%d %.1f\n' % (point[0], rf))

        forecast_start_idx = int(
            np.where(times == utils.datetime_lk_to_utc(obs_end, shift_mins=30).strftime('%Y-%m-%d_%H:%M:%S'))[0])
        for t in range(int(24 * 60 * duration_days[1] / res_mins) - 1):
            for point in points:
                rf_x = np.digitize(point[1], lon_bins)
                rf_y = np.digitize(point[2], lat_bins)
                if t + forecast_start_idx + 1 < len(times):
                    output_file.write('%d %.1f\n' % (point[0], diff[t + forecast_start_idx + 1, rf_y, rf_x]))
                else:
                    output_file.write('%d %.1f\n' % (point[0], 0))
コード例 #23
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def extract_metro_colombo(nc_f, wrf_output, wrf_output_base, curw_db_adapter=None, curw_db_upsert=False,
                          run_prefix='WRF', run_name='Cloud-1'):
    """
    extract Metro-Colombo rf and divide area into to 4 quadrants 
    :param wrf_output_base: 
    :param run_name: 
    :param nc_f: 
    :param wrf_output: 
    :param curw_db_adapter: If not none, data will be pushed to the db 
    :param run_prefix: 
    :param curw_db_upsert: 
    :return: 
    """
    prefix = 'met_col'
    lon_min, lat_min, lon_max, lat_max = constants.COLOMBO_EXTENT

    nc_vars = ext_utils.extract_variables(nc_f, ['RAINC', 'RAINNC'], lat_min, lat_max, lon_min, lon_max)
    lats = nc_vars['XLAT']
    lons = nc_vars['XLONG']
    prcp = nc_vars['RAINC'] + nc_vars['RAINNC']
    times = nc_vars['Times']

    diff = ext_utils.get_two_element_average(prcp)

    width = len(lons)
    height = len(lats)

    output_dir = utils.create_dir_if_not_exists(os.path.join(wrf_output, prefix))

    basin_rf = np.mean(diff[0:(len(times) - 1 if len(times) < 24 else 24), :, :])

    alpha_file_path = os.path.join(wrf_output_base, prefix + '_alphas.txt')
    utils.create_dir_if_not_exists(os.path.dirname(alpha_file_path))
    with open(alpha_file_path, 'a+') as alpha_file:
        t = utils.datetime_utc_to_lk(dt.datetime.strptime(times[0], '%Y-%m-%d_%H:%M:%S'), shift_mins=30)
        alpha_file.write('%s\t%f\n' % (t.strftime('%Y-%m-%d_%H:%M:%S'), basin_rf))

    cz = ext_utils.get_mean_cell_size(lats, lons)
    no_data = -99

    divs = (2, 2)
    div_rf = {}
    for i in range(divs[0] * divs[1]):
        div_rf[prefix + str(i)] = []

    with TemporaryDirectory(prefix=prefix) as temp_dir:
        subsection_file_path = os.path.join(temp_dir, 'sub_means.txt')
        with open(subsection_file_path, 'w') as subsection_file:
            for tm in range(0, len(times) - 1):
                t_str = (
                    utils.datetime_utc_to_lk(dt.datetime.strptime(times[tm], '%Y-%m-%d_%H:%M:%S'),
                                             shift_mins=30)).strftime('%Y-%m-%d %H:%M:%S')

                output_file_path = os.path.join(temp_dir, 'rf_' + t_str.replace(' ', '_') + '.asc')
                ext_utils.create_asc_file(np.flip(diff[tm, :, :], 0), lats, lons, output_file_path, cell_size=cz,
                                          no_data_val=no_data)

                # writing subsection file
                x_idx = [round(i * width / divs[0]) for i in range(0, divs[0] + 1)]
                y_idx = [round(i * height / divs[1]) for i in range(0, divs[1] + 1)]

                subsection_file.write(t_str)
                for j in range(len(y_idx) - 1):
                    for i in range(len(x_idx) - 1):
                        quad = j * divs[1] + i
                        sub_sec_mean = np.mean(diff[tm, y_idx[j]:y_idx[j + 1], x_idx[i]: x_idx[i + 1]])
                        subsection_file.write('\t%.4f' % sub_sec_mean)
                        div_rf[prefix + str(quad)].append([t_str, sub_sec_mean])
                subsection_file.write('\n')

        utils.create_zip_with_prefix(temp_dir, 'rf_*.asc', os.path.join(temp_dir, 'ascs.zip'), clean_up=True)

        utils.move_files_with_prefix(temp_dir, '*', output_dir)

    # writing to the database
    if curw_db_adapter is not None:
        for i in range(divs[0] * divs[1]):
            name = prefix + str(i)
            station = [Station.CUrW, name, name, -999, -999, 0, "met col quadrant %d" % i]
            if ext_utils.create_station_if_not_exists(curw_db_adapter, station):
                logging.info('%s station created' % name)

        logging.info('Pushing data to the db...')
        ext_utils.push_rainfall_to_db(curw_db_adapter, div_rf, upsert=curw_db_upsert, source=run_prefix, name=run_name)
    else:
        logging.info('curw_db_adapter not available. Unable to push data!')

    return basin_rf
コード例 #24
0
ファイル: extractor.py プロジェクト: hasithadkr7/wrf_docker
def extract_kelani_basin_rainfall_flo2d(nc_f, nc_f_prev_days, output_dir, avg_basin_rf=1.0, kelani_basin_file=None,
                                        target_rfs=None, output_prefix='RAINCELL'):
    """
    :param output_prefix:
    :param nc_f:
    :param nc_f_prev_days: 
    :param output_dir: 
    :param avg_basin_rf: 
    :param kelani_basin_file: 
    :param target_rfs: 
    :return: 
    """
    if target_rfs is None:
        target_rfs = [100, 150, 200, 250, 300]
    if kelani_basin_file is None:
        kelani_basin_file = res_mgr.get_resource_path('extraction/local/kelani_basin_points_250m.txt')

    points = np.genfromtxt(kelani_basin_file, delimiter=',')

    kel_lon_min = np.min(points, 0)[1]
    kel_lat_min = np.min(points, 0)[2]
    kel_lon_max = np.max(points, 0)[1]
    kel_lat_max = np.max(points, 0)[2]

    diff, kel_lats, kel_lons, times = ext_utils.extract_area_rf_series(nc_f, kel_lat_min, kel_lat_max, kel_lon_min,
                                                                       kel_lon_max)

    def get_bins(arr):
        sz = len(arr)
        return (arr[1:sz - 1] + arr[0:sz - 2]) / 2

    lat_bins = get_bins(kel_lats)
    lon_bins = get_bins(kel_lons)

    t0 = dt.datetime.strptime(times[0], '%Y-%m-%d_%H:%M:%S')
    t1 = dt.datetime.strptime(times[1], '%Y-%m-%d_%H:%M:%S')
    t_end = dt.datetime.strptime(times[-1], '%Y-%m-%d_%H:%M:%S')

    utils.create_dir_if_not_exists(output_dir)

    prev_diff = []
    prev_days = len(nc_f_prev_days)
    for i in range(prev_days):
        if nc_f_prev_days[i]:
            p_diff, _, _, _ = ext_utils.extract_area_rf_series(nc_f_prev_days[i], kel_lat_min, kel_lat_max, kel_lon_min,
                                                               kel_lon_max)
            prev_diff.append(p_diff)
        else:
            prev_diff.append(None)

    def write_forecast_to_raincell_file(output_file_path, alpha):
        with open(output_file_path, 'w') as output_file:
            res_mins = int((t1 - t0).total_seconds() / 60)
            data_hours = int(len(times) + prev_days * 24 * 60 / res_mins)
            start_ts = utils.datetime_utc_to_lk(t0 - dt.timedelta(days=prev_days), shift_mins=30).strftime(
                '%Y-%m-%d %H:%M:%S')
            end_ts = utils.datetime_utc_to_lk(t_end, shift_mins=30).strftime('%Y-%m-%d %H:%M:%S')

            output_file.write("%d %d %s %s\n" % (res_mins, data_hours, start_ts, end_ts))

            for d in range(prev_days):
                for t in range(int(24 * 60 / res_mins)):
                    for point in points:
                        rf_x = np.digitize(point[1], lon_bins)
                        rf_y = np.digitize(point[2], lat_bins)
                        if prev_diff[prev_days - 1 - d] is not None:
                            output_file.write('%d %.1f\n' % (point[0], prev_diff[prev_days - 1 - d][t, rf_y, rf_x]))
                        else:
                            output_file.write('%d %.1f\n' % (point[0], 0))

            for t in range(len(times)):
                for point in points:
                    rf_x = np.digitize(point[1], lon_bins)
                    rf_y = np.digitize(point[2], lat_bins)
                    if t < int(24 * 60 / res_mins):
                        output_file.write('%d %.1f\n' % (point[0], diff[t, rf_y, rf_x] * alpha))
                    else:
                        output_file.write('%d %.1f\n' % (point[0], diff[t, rf_y, rf_x]))

    with TemporaryDirectory(prefix='curw_raincell') as temp_dir:
        raincell_temp = os.path.join(temp_dir, output_prefix + '.DAT')
        write_forecast_to_raincell_file(raincell_temp, 1)

        for target_rf in target_rfs:
            write_forecast_to_raincell_file('%s.%d' % (raincell_temp, target_rf), target_rf / avg_basin_rf)

        utils.create_zip_with_prefix(temp_dir, output_prefix + '.DAT*', os.path.join(temp_dir, output_prefix + '.zip'),
                                     clean_up=True)
        utils.move_files_with_prefix(temp_dir, output_prefix + '.zip', utils.create_dir_if_not_exists(output_dir))
コード例 #25
0
ファイル: executor.py プロジェクト: hasithadkr7/wrf_docker
def run_em_real(wrf_config):
    logging.info('Running em_real...')

    wrf_home = wrf_config.get('wrf_home')
    em_real_dir = utils.get_em_real_dir(wrf_home)
    procs = wrf_config.get('procs')
    run_id = wrf_config.get('run_id')
    output_dir = utils.create_dir_if_not_exists(
        os.path.join(wrf_config.get('nfs_dir'), 'results', run_id, 'wrf'))
    archive_dir = utils.create_dir_if_not_exists(
        os.path.join(wrf_config.get('archive_dir'), 'results', run_id, 'wrf'))

    logging.info('Backup the output dir')
    utils.backup_dir(output_dir)

    logs_dir = utils.create_dir_if_not_exists(os.path.join(output_dir, 'logs'))

    logging.info('Copying metgrid.zip')
    metgrid_dir = os.path.join(wrf_config.get('nfs_dir'), 'metgrid')
    if wrf_config.is_set('wps_run_id'):
        logging.info('wps_run_id is set. Copying metgrid from ' +
                     wrf_config.get('wps_run_id'))
        utils.copy_files_with_prefix(
            metgrid_dir,
            wrf_config.get('wps_run_id') + '_metgrid.zip', em_real_dir)
        metgrid_zip = os.path.join(
            em_real_dir,
            wrf_config.get('wps_run_id') + '_metgrid.zip')
    else:
        utils.copy_files_with_prefix(metgrid_dir,
                                     wrf_config.get('run_id') + '_metgrid.zip',
                                     em_real_dir)
        metgrid_zip = os.path.join(em_real_dir,
                                   wrf_config.get('run_id') + '_metgrid.zip')

    logging.info('Extracting metgrid.zip')
    ZipFile(metgrid_zip, 'r',
            compression=ZIP_DEFLATED).extractall(path=em_real_dir)

    # logs destination: nfs/logs/xxxx/rsl*
    try:
        try:
            logging.info('Starting real.exe')
            utils.run_subprocess(
                'mpirun --allow-run-as-root -np %d ./real.exe' % procs,
                cwd=em_real_dir)
        finally:
            logging.info('Moving Real log files...')
            utils.create_zip_with_prefix(em_real_dir,
                                         'rsl*',
                                         os.path.join(em_real_dir,
                                                      'real_rsl.zip'),
                                         clean_up=True)
            utils.move_files_with_prefix(em_real_dir, 'real_rsl.zip', logs_dir)

        try:
            logging.info('Starting wrf.exe')
            utils.run_subprocess(
                'mpirun --allow-run-as-root -np %d ./wrf.exe' % procs,
                cwd=em_real_dir)
        finally:
            logging.info('Moving WRF log files...')
            utils.create_zip_with_prefix(em_real_dir,
                                         'rsl*',
                                         os.path.join(em_real_dir,
                                                      'wrf_rsl.zip'),
                                         clean_up=True)
            utils.move_files_with_prefix(em_real_dir, 'wrf_rsl.zip', logs_dir)
    finally:
        logging.info('Moving namelist input file')
        utils.move_files_with_prefix(em_real_dir, 'namelist.input', output_dir)

    logging.info('WRF em_real: DONE! Moving data to the output dir')

    logging.info('Extracting rf from domain3')
    d03_nc = glob.glob(os.path.join(em_real_dir, 'wrfout_d03_*'))[0]
    ncks_query = 'ncks -v %s %s %s' % ('RAINC,RAINNC,XLAT,XLONG,Times', d03_nc,
                                       d03_nc + '_rf')
    utils.run_subprocess(ncks_query)

    logging.info('Extracting rf from domain1')
    d01_nc = glob.glob(os.path.join(em_real_dir, 'wrfout_d01_*'))[0]
    ncks_query = 'ncks -v %s %s %s' % ('RAINC,RAINNC,XLAT,XLONG,Times', d01_nc,
                                       d01_nc + '_rf')
    utils.run_subprocess(ncks_query)

    logging.info('Moving data to the output dir')
    utils.move_files_with_prefix(em_real_dir, 'wrfout_d03*_rf', output_dir)
    utils.move_files_with_prefix(em_real_dir, 'wrfout_d01*_rf', output_dir)
    logging.info('Moving data to the archive dir')
    utils.move_files_with_prefix(em_real_dir, 'wrfout_*', archive_dir)

    logging.info('Cleaning up files')
    utils.delete_files_with_prefix(em_real_dir, 'met_em*')
    utils.delete_files_with_prefix(em_real_dir, 'rsl*')
    os.remove(metgrid_zip)