def write_forecast_to_raincell_file(output_file_path, alpha): with open(output_file_path, 'w') as output_file: res_mins = int((t1 - t0).total_seconds() / 60) data_hours = int(len(times) + prev_days * 24 * 60 / res_mins) start_ts = utils.datetime_utc_to_lk(t0 - dt.timedelta(days=prev_days), shift_mins=30).strftime( '%Y-%m-%d %H:%M:%S') end_ts = utils.datetime_utc_to_lk(t_end, shift_mins=30).strftime('%Y-%m-%d %H:%M:%S') output_file.write("%d %d %s %s\n" % (res_mins, data_hours, start_ts, end_ts)) for d in range(prev_days): for t in range(int(24 * 60 / res_mins)): for point in points: rf_x = np.digitize(point[1], lon_bins) rf_y = np.digitize(point[2], lat_bins) if prev_diff[prev_days - 1 - d] is not None: output_file.write('%d %.1f\n' % (point[0], prev_diff[prev_days - 1 - d][t, rf_y, rf_x])) else: output_file.write('%d %.1f\n' % (point[0], 0)) for t in range(len(times)): for point in points: rf_x = np.digitize(point[1], lon_bins) rf_y = np.digitize(point[2], lat_bins) if t < int(24 * 60 / res_mins): output_file.write('%d %.1f\n' % (point[0], diff[t, rf_y, rf_x] * alpha)) else: output_file.write('%d %.1f\n' % (point[0], diff[t, rf_y, rf_x]))
def extract_points_array_rf_series(nc_f, points_array, boundaries=None, rf_var_list=None, lat_var='XLAT', lon_var='XLONG', time_var='Times'): """ :param boundaries: list [lat_min, lat_max, lon_min, lon_max] :param nc_f: :param points_array: multi dim array (np structured array) with a row [name, lon, lat] :param rf_var_list: :param lat_var: :param lon_var: :param time_var: :return: np structured array with [(time, name1, name2, .... )] """ if rf_var_list is None: rf_var_list = ['RAINC', 'RAINNC'] if boundaries is None: lat_min = np.min(points_array[points_array.dtype.names[2]]) lat_max = np.max(points_array[points_array.dtype.names[2]]) lon_min = np.min(points_array[points_array.dtype.names[1]]) lon_max = np.max(points_array[points_array.dtype.names[1]]) else: lat_min, lat_max, lon_min, lon_max = boundaries variables = extract_variables(nc_f, rf_var_list, lat_min, lat_max, lon_min, lon_max, lat_var, lon_var, time_var) prcp = variables[rf_var_list[0]] for i in range(1, len(rf_var_list)): prcp = prcp + variables[rf_var_list[i]] diff = get_two_element_average(prcp, return_diff=True) result = np.array([ utils.datetime_utc_to_lk( dt.datetime.strptime(t, '%Y-%m-%d_%H:%M:%S'), shift_mins=30).strftime('%Y-%m-%d %H:%M:%S').encode('utf-8') for t in variables[time_var][:-1] ], dtype=np.dtype([(time_var, 'U19')])) for p in points_array: lat_start_idx = np.argmin(abs(variables['XLAT'] - p[2])) lon_start_idx = np.argmin(abs(variables['XLONG'] - p[1])) rf = np.round(diff[:, lat_start_idx, lon_start_idx], 6) # use this for 4 point average # rf = np.round(np.mean(diff[:, lat_start_idx:lat_start_idx + 2, lon_start_idx:lon_start_idx + 2], axis=(1, 2)), # 6) result = append_fields(result, p[0].decode(), rf, usemask=False) return result
def process_jaxa_zip_file(zip_file_path, out_file_path, lat_min, lon_min, lat_max, lon_max, output_prefix='jaxa_sat'): sat_zip = zipfile.ZipFile(zip_file_path) sat = np.genfromtxt(sat_zip.open( os.path.basename(zip_file_path).replace('.zip', '')), delimiter=',', names=True) print(':', []) sat_filt = np.sort(sat[(sat['Lat'] <= lat_max) & (sat['Lat'] >= lat_min) & (sat['Lon'] <= lon_max) & (sat['Lon'] >= lon_min)], order=['Lat', 'Lon']) lats = np.sort(np.unique(sat_filt['Lat'])) lons = np.sort(np.unique(sat_filt['Lon'])) data = sat_filt['RainRate'].reshape(len(lats), len(lons)) ext_utils.create_asc_file(np.flip(data, 0), lats, lons, out_file_path) # clevs = np.concatenate(([-1, 0], np.array([pow(2, i) for i in range(0, 9)]))) # clevs = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30]) # norm = colors.BoundaryNorm(boundaries=clevs, ncolors=256) # cmap = plt.get_cmap('jet') clevs = [ 0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 75, 100, 150, 200, 250, 300 ] # clevs = [0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30, 50, 75, 100] norm = None cmap = cm.s3pcpn ts = dt.datetime.strptime( os.path.basename(out_file_path).replace(output_prefix + '_', '').replace('.asc', ''), '%Y-%m-%d_%H:%M') lk_ts = utils.datetime_utc_to_lk(ts) title_opts = { 'label': output_prefix + ' ' + lk_ts.strftime('%Y-%m-%d %H:%M') + ' LK\n' + ts.strftime('%Y-%m-%d %H:%M') + ' UTC', 'fontsize': 30 } create_contour_plot(data, out_file_path + '.png', np.min(lats), np.min(lons), np.max(lats), np.max(lons), title_opts, clevs=clevs, cmap=cmap, norm=norm)
def extract_mean_rainfall_from_shp_file(nc_f, wrf_output, output_prefix, output_name, basin_shp_file, basin_extent, curw_db_adapter=None, curw_db_upsert=False, run_prefix='WRF', run_name='Cloud-1'): lon_min, lat_min, lon_max, lat_max = basin_extent nc_vars = ext_utils.extract_variables(nc_f, ['RAINC', 'RAINNC'], lat_min, lat_max, lon_min, lon_max) lats = nc_vars['XLAT'] lons = nc_vars['XLONG'] prcp = nc_vars['RAINC'] + nc_vars['RAINNC'] times = nc_vars['Times'] diff = ext_utils.get_two_element_average(prcp) polys = shapefile.Reader(basin_shp_file) output_dir = utils.create_dir_if_not_exists(os.path.join(wrf_output, output_prefix)) with TemporaryDirectory(prefix=output_prefix) as temp_dir: output_file_path = os.path.join(temp_dir, output_prefix + '.txt') kub_rf = {} with open(output_file_path, 'w') as output_file: kub_rf[output_name] = [] for t in range(0, len(times) - 1): cnt = 0 rf_sum = 0.0 for y in range(0, len(lats)): for x in range(0, len(lons)): if utils.is_inside_polygon(polys, lats[y], lons[x]): cnt = cnt + 1 rf_sum = rf_sum + diff[t, y, x] mean_rf = rf_sum / cnt t_str = ( utils.datetime_utc_to_lk(dt.datetime.strptime(times[t], '%Y-%m-%d_%H:%M:%S'), shift_mins=30)).strftime('%Y-%m-%d %H:%M:%S') output_file.write('%s\t%.4f\n' % (t_str, mean_rf)) kub_rf[output_name].append([t_str, mean_rf]) utils.move_files_with_prefix(temp_dir, '*.txt', output_dir) if curw_db_adapter is not None: station = [Station.CUrW, output_name, output_name, -999, -999, 0, 'Kelani upper basin mean rainfall'] if ext_utils.create_station_if_not_exists(curw_db_adapter, station): logging.info('%s station created' % output_name) logging.info('Pushing data to the db...') ext_utils.push_rainfall_to_db(curw_db_adapter, kub_rf, upsert=curw_db_upsert, name=run_name, source=run_prefix) else: logging.info('curw_db_adapter not available. Unable to push data!')
def extract_weather_stations(nc_f, wrf_output, weather_stations=None, curw_db_adapter=None, curw_db_upsert=False, run_prefix='WRF', run_name='Cloud-1'): if weather_stations is None: weather_stations = res_mgr.get_resource_path('extraction/local/kelani_basin_stations.txt') nc_fid = Dataset(nc_f, 'r') times_len, times = ext_utils.extract_time_data(nc_f) prefix = 'stations_rf' stations_dir = utils.create_dir_if_not_exists(os.path.join(wrf_output, prefix)) stations_rf = {} with TemporaryDirectory(prefix=prefix) as temp_dir: with open(weather_stations, 'r') as csvfile: stations = csv.reader(csvfile, delimiter=' ') for row in stations: logging.info(' '.join(row)) lon = row[1] lat = row[2] station_prcp = nc_fid.variables['RAINC'][:, lat, lon] + nc_fid.variables['RAINNC'][:, lat, lon] station_diff = ext_utils.get_two_element_average(station_prcp) stations_rf[row[0]] = [] station_file_path = os.path.join(temp_dir, row[0] + '_%s.txt' % prefix) with open(station_file_path, 'w') as station_file: for t in range(0, len(times) - 1): t_str = ( utils.datetime_utc_to_lk(dt.datetime.strptime(times[t], '%Y-%m-%d_%H:%M:%S'), shift_mins=30)).strftime('%Y-%m-%d %H:%M:%S') station_file.write('%s\t%.4f\n' % (t_str, station_diff[t])) stations_rf[row[0]].append([t_str, station_diff[t]]) utils.move_files_with_prefix(temp_dir, '*.txt', stations_dir) if curw_db_adapter is not None: logging.info('Pushing data to the db...') ext_utils.push_rainfall_to_db(curw_db_adapter, stations_rf, upsert=curw_db_upsert, name=run_name, source=run_prefix) else: logging.info('curw_db_adapter not available. Unable to push data!') nc_fid.close()
def create_rf_plots_wrf(nc_f, plots_output_dir, plots_output_base_dir, lon_min=None, lat_min=None, lon_max=None, lat_max=None, filter_threshold=0.05, run_prefix='WRF'): if not all([lon_min, lat_min, lon_max, lat_max]): lon_min, lat_min, lon_max, lat_max = constants.SRI_LANKA_EXTENT variables = ext_utils.extract_variables(nc_f, 'RAINC, RAINNC', lat_min, lat_max, lon_min, lon_max) lats = variables['XLAT'] lons = variables['XLONG'] # cell size is calc based on the mean between the lat and lon points cz = np.round(np.mean(np.append(lons[1:len(lons)] - lons[0: len(lons) - 1], lats[1:len(lats)] - lats[0: len(lats) - 1])), 3) clevs = [0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 250, 300, 400, 500, 600, 750] cmap = cm.s3pcpn basemap = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, urcrnrlon=lon_max, urcrnrlat=lat_max, resolution='h') data = variables['RAINC'] + variables['RAINNC'] logging.info('Filtering with the threshold %f' % filter_threshold) data[data < filter_threshold] = 0.0 variables['PRECIP'] = data prefix = 'wrf_plots' with TemporaryDirectory(prefix=prefix) as temp_dir: t0 = dt.datetime.strptime(variables['Times'][0], '%Y-%m-%d_%H:%M:%S') t1 = dt.datetime.strptime(variables['Times'][1], '%Y-%m-%d_%H:%M:%S') step = (t1 - t0).total_seconds() / 3600.0 inst_precip = ext_utils.get_two_element_average(variables['PRECIP']) cum_precip = ext_utils.get_two_element_average(variables['PRECIP'], return_diff=False) for i in range(1, len(variables['Times'])): time = variables['Times'][i] ts = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M:%S') lk_ts = utils.datetime_utc_to_lk(ts, shift_mins=30) logging.info('processing %s', time) # instantaneous precipitation (hourly) inst_file = os.path.join(temp_dir, 'wrf_inst_' + lk_ts.strftime('%Y-%m-%d_%H:%M:%S')) ext_utils.create_asc_file(np.flip(inst_precip[i - 1], 0), lats, lons, inst_file + '.asc', cell_size=cz) title = { 'label': 'Hourly rf for %s LK' % lk_ts.strftime('%Y-%m-%d_%H:%M:%S'), 'fontsize': 30 } ext_utils.create_contour_plot(inst_precip[i - 1], inst_file + '.png', lat_min, lon_min, lat_max, lon_max, title, clevs=clevs, cmap=cmap, basemap=basemap) if (i * step) % 24 == 0: t = 'Daily rf from %s LK to %s LK' % ( (lk_ts - dt.timedelta(hours=24)).strftime('%Y-%m-%d_%H:%M:%S'), lk_ts.strftime('%Y-%m-%d_%H:%M:%S')) d = int(i * step / 24) - 1 logging.info('Creating images for D%d' % d) cum_file = os.path.join(temp_dir, 'wrf_cum_%dd' % d) if i * step / 24 > 1: cum_precip_24h = cum_precip[i - 1] - cum_precip[i - 1 - int(24 / step)] else: cum_precip_24h = cum_precip[i - 1] ext_utils.create_asc_file(np.flip(cum_precip_24h, 0), lats, lons, cum_file + '.asc', cell_size=cz) ext_utils.create_contour_plot(cum_precip_24h, cum_file + '.png', lat_min, lon_min, lat_max, lon_max, t, clevs=clevs, cmap=cmap, basemap=basemap) gif_file = os.path.join(temp_dir, 'wrf_inst_%dd' % d) images = [os.path.join(temp_dir, 'wrf_inst_' + j.strftime('%Y-%m-%d_%H:%M:%S') + '.png') for j in np.arange(lk_ts - dt.timedelta(hours=24 - step), lk_ts + dt.timedelta(hours=step), dt.timedelta(hours=step)).astype(dt.datetime)] ext_utils.create_gif(images, gif_file + '.gif') logging.info('Creating the zips') utils.create_zip_with_prefix(temp_dir, '*.png', os.path.join(temp_dir, 'pngs.zip')) utils.create_zip_with_prefix(temp_dir, '*.asc', os.path.join(temp_dir, 'ascs.zip')) logging.info('Cleaning up instantaneous pngs and ascs - wrf_inst_*') utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.png') utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.asc') logging.info('Copying pngs to ' + plots_output_dir) utils.move_files_with_prefix(temp_dir, '*.png', plots_output_dir) logging.info('Copying ascs to ' + plots_output_dir) utils.move_files_with_prefix(temp_dir, '*.asc', plots_output_dir) logging.info('Copying gifs to ' + plots_output_dir) utils.copy_files_with_prefix(temp_dir, '*.gif', plots_output_dir) logging.info('Copying zips to ' + plots_output_dir) utils.copy_files_with_prefix(temp_dir, '*.zip', plots_output_dir) plots_latest_dir = os.path.join(plots_output_base_dir, 'latest', run_prefix, os.path.basename(plots_output_dir)) # <nfs>/latest/wrf0 .. 3 utils.create_dir_if_not_exists(plots_latest_dir) # todo: this needs to be adjusted to handle the multiple runs logging.info('Copying gifs to ' + plots_latest_dir) utils.copy_files_with_prefix(temp_dir, '*.gif', plots_latest_dir)
def push_wrf_rainfall_to_db(nc_f, curw_db_adapter=None, lon_min=None, lat_min=None, lon_max=None, lat_max=None, run_prefix='WRF', upsert=False, run_name='Cloud-1', station_prefix='wrf'): """ :param run_name: :param nc_f: :param curw_db_adapter: If not none, data will be pushed to the db :param run_prefix: :param lon_min: :param lat_min: :param lon_max: :param lat_max: :param upsert: :return: """ if curw_db_adapter is None: logging.info('curw_db_adapter not available. Unable to push data!') return if not all([lon_min, lat_min, lon_max, lat_max]): lon_min, lat_min, lon_max, lat_max = constants.SRI_LANKA_EXTENT nc_vars = ext_utils.extract_variables(nc_f, ['RAINC', 'RAINNC'], lat_min, lat_max, lon_min, lon_max) lats = nc_vars['XLAT'] lons = nc_vars['XLONG'] prcp = nc_vars['RAINC'] + nc_vars['RAINNC'] times = nc_vars['Times'] diff = ext_utils.get_two_element_average(prcp) width = len(lons) height = len(lats) def random_check_stations_exist(): for _ in range(10): _x = lons[int(random() * width)] _y = lats[int(random() * height)] _name = '%s_%.6f_%.6f' % (station_prefix, _x, _y) _query = {'name': _name} if curw_db_adapter.get_station(_query) is None: logging.debug('Random stations check fail') return False logging.debug('Random stations check success') return True stations_exists = random_check_stations_exist() rf_ts = {} for y in range(height): for x in range(width): lat = lats[y] lon = lons[x] station_id = '%s_%.6f_%.6f' % (station_prefix, lon, lat) name = station_id if not stations_exists: logging.info('Creating station %s ...' % name) station = [Station.WRF, station_id, name, str(lon), str(lat), str(0), "WRF point"] curw_db_adapter.create_station(station) # add rf series to the dict ts = [] for i in range(len(diff)): t = utils.datetime_utc_to_lk(dt.datetime.strptime(times[i], '%Y-%m-%d_%H:%M:%S'), shift_mins=30) ts.append([t.strftime('%Y-%m-%d %H:%M:%S'), diff[i, y, x]]) rf_ts[name] = ts ext_utils.push_rainfall_to_db(curw_db_adapter, rf_ts, source=run_prefix, upsert=upsert, name=run_name)
def extract_metro_colombo(nc_f, wrf_output, wrf_output_base, curw_db_adapter=None, curw_db_upsert=False, run_prefix='WRF', run_name='Cloud-1'): """ extract Metro-Colombo rf and divide area into to 4 quadrants :param wrf_output_base: :param run_name: :param nc_f: :param wrf_output: :param curw_db_adapter: If not none, data will be pushed to the db :param run_prefix: :param curw_db_upsert: :return: """ prefix = 'met_col' lon_min, lat_min, lon_max, lat_max = constants.COLOMBO_EXTENT nc_vars = ext_utils.extract_variables(nc_f, ['RAINC', 'RAINNC'], lat_min, lat_max, lon_min, lon_max) lats = nc_vars['XLAT'] lons = nc_vars['XLONG'] prcp = nc_vars['RAINC'] + nc_vars['RAINNC'] times = nc_vars['Times'] diff = ext_utils.get_two_element_average(prcp) width = len(lons) height = len(lats) output_dir = utils.create_dir_if_not_exists(os.path.join(wrf_output, prefix)) basin_rf = np.mean(diff[0:(len(times) - 1 if len(times) < 24 else 24), :, :]) alpha_file_path = os.path.join(wrf_output_base, prefix + '_alphas.txt') utils.create_dir_if_not_exists(os.path.dirname(alpha_file_path)) with open(alpha_file_path, 'a+') as alpha_file: t = utils.datetime_utc_to_lk(dt.datetime.strptime(times[0], '%Y-%m-%d_%H:%M:%S'), shift_mins=30) alpha_file.write('%s\t%f\n' % (t.strftime('%Y-%m-%d_%H:%M:%S'), basin_rf)) cz = ext_utils.get_mean_cell_size(lats, lons) no_data = -99 divs = (2, 2) div_rf = {} for i in range(divs[0] * divs[1]): div_rf[prefix + str(i)] = [] with TemporaryDirectory(prefix=prefix) as temp_dir: subsection_file_path = os.path.join(temp_dir, 'sub_means.txt') with open(subsection_file_path, 'w') as subsection_file: for tm in range(0, len(times) - 1): t_str = ( utils.datetime_utc_to_lk(dt.datetime.strptime(times[tm], '%Y-%m-%d_%H:%M:%S'), shift_mins=30)).strftime('%Y-%m-%d %H:%M:%S') output_file_path = os.path.join(temp_dir, 'rf_' + t_str.replace(' ', '_') + '.asc') ext_utils.create_asc_file(np.flip(diff[tm, :, :], 0), lats, lons, output_file_path, cell_size=cz, no_data_val=no_data) # writing subsection file x_idx = [round(i * width / divs[0]) for i in range(0, divs[0] + 1)] y_idx = [round(i * height / divs[1]) for i in range(0, divs[1] + 1)] subsection_file.write(t_str) for j in range(len(y_idx) - 1): for i in range(len(x_idx) - 1): quad = j * divs[1] + i sub_sec_mean = np.mean(diff[tm, y_idx[j]:y_idx[j + 1], x_idx[i]: x_idx[i + 1]]) subsection_file.write('\t%.4f' % sub_sec_mean) div_rf[prefix + str(quad)].append([t_str, sub_sec_mean]) subsection_file.write('\n') utils.create_zip_with_prefix(temp_dir, 'rf_*.asc', os.path.join(temp_dir, 'ascs.zip'), clean_up=True) utils.move_files_with_prefix(temp_dir, '*', output_dir) # writing to the database if curw_db_adapter is not None: for i in range(divs[0] * divs[1]): name = prefix + str(i) station = [Station.CUrW, name, name, -999, -999, 0, "met col quadrant %d" % i] if ext_utils.create_station_if_not_exists(curw_db_adapter, station): logging.info('%s station created' % name) logging.info('Pushing data to the db...') ext_utils.push_rainfall_to_db(curw_db_adapter, div_rf, upsert=curw_db_upsert, source=run_prefix, name=run_name) else: logging.info('curw_db_adapter not available. Unable to push data!') return basin_rf
def process_jaxa_zip_file(zip_file_path, out_file_path, lat_min, lon_min, lat_max, lon_max, archive_data=False, output_prefix='jaxa_sat', db_adapter_config=None): sat_zip = zipfile.ZipFile(zip_file_path) sat = np.genfromtxt(sat_zip.open( os.path.basename(zip_file_path).replace('.zip', '')), delimiter=',', names=True) sat_filt = np.sort(sat[(sat['Lat'] <= lat_max) & (sat['Lat'] >= lat_min) & (sat['Lon'] <= lon_max) & (sat['Lon'] >= lon_min)], order=['Lat', 'Lon']) lats = np.sort(np.unique(sat_filt['Lat'])) lons = np.sort(np.unique(sat_filt['Lon'])) data = sat_filt['RainRate'].reshape(len(lats), len(lons)) ext_utils.create_asc_file(np.flip(data, 0), lats, lons, out_file_path) # clevs = np.concatenate(([-1, 0], np.array([pow(2, i) for i in range(0, 9)]))) # clevs = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30]) # norm = colors.BoundaryNorm(boundaries=clevs, ncolors=256) # cmap = plt.get_cmap('jet') clevs = [ 0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 75, 100, 150, 200, 250, 300 ] # clevs = [0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30, 50, 75, 100] norm = None cmap = cm.s3pcpn ts = dt.datetime.strptime( os.path.basename(out_file_path).replace(output_prefix + '_', '').replace('.asc', ''), '%Y-%m-%d_%H:%M') lk_ts = utils.datetime_utc_to_lk(ts) title_opts = { 'label': output_prefix + ' ' + lk_ts.strftime('%Y-%m-%d %H:%M') + ' LK\n' + ts.strftime('%Y-%m-%d %H:%M') + ' UTC', 'fontsize': 30 } ext_utils.create_contour_plot(data, out_file_path + '.png', np.min(lats), np.min(lons), np.max(lats), np.max(lons), title_opts, clevs=clevs, cmap=cmap, norm=norm) if archive_data and not utils.file_exists_nonempty(out_file_path + '.archive'): np.savetxt(out_file_path + '.archive', data, fmt='%g') else: logging.info('%s already exits' % (out_file_path + '.archive')) if not db_adapter_config: logging.info('db_adapter not available. Unable to push data!') return db_adapter = ext_utils.get_curw_adapter(mysql_config=db_adapter_config) width = len(lons) height = len(lats) station_prefix = 'sat' run_name = 'Cloud-1' upsert = True def random_check_stations_exist(): for _ in range(10): _x = lons[int(random() * width)] _y = lats[int(random() * height)] _name = '%s_%.6f_%.6f' % (station_prefix, _x, _y) _query = {'name': _name} if db_adapter.get_station(_query) is None: logging.debug('Random stations check fail') return False logging.debug('Random stations check success') return True stations_exists = random_check_stations_exist() rf_ts = {} for y in range(height): for x in range(width): lat = lats[y] lon = lons[x] station_id = '%s_%.6f_%.6f' % (station_prefix, lon, lat) name = station_id if not stations_exists: logging.info('Creating station %s ...' % name) station = [ Station.Sat, station_id, name, str(lon), str(lat), str(0), "WRF point" ] db_adapter.create_station(station) # add rf series to the dict rf_ts[name] = [[lk_ts.strftime('%Y-%m-%d %H:%M:%S'), data[y, x]]] ext_utils.push_rainfall_to_db(db_adapter, rf_ts, source=station_prefix, name=run_name, types=['Observed'], upsert=upsert)
# clevs = np.concatenate(([-1, 0], np.array([pow(2, i) for i in range(0, 9)]))) # clevs = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30]) # norm = colors.BoundaryNorm(boundaries=clevs, ncolors=256) # cmap = plt.get_cmap('jet') clevs = [ 0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 75, 100, 150, 200, 250, 300 ] # clevs = [0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30, 50, 75, 100] norm = None cmap = cm.s3pcpn ts = dt.datetime.strptime( os.path.basename(out_file_path).replace(output_prefix + '_', '').replace('.asc', ''), '%Y-%m-%d_%H:%M') lk_ts = utils.datetime_utc_to_lk(ts) title_opts = { 'label': output_prefix + ' ' + lk_ts.strftime('%Y-%m-%d %H:%M') + ' LK\n' + ts.strftime('%Y-%m-%d %H:%M') + ' UTC', 'fontsize': 30 } ext_utils.create_contour_plot(data, out_file_path + '.png', np.min(lats), np.min(lons), np.max(lats), np.max(lons), title_opts, clevs=clevs,
def process(self, *args, **kwargs): config = self.get_config(**kwargs) logging.info('wrf conifg: ' + config.to_json_string()) start_date = config.get('start_date') d03_dir = config.get('wrf_output_dir') d03_sl = os.path.join(d03_dir, 'wrfout_d01_' + start_date + ':00_SL') # create a temp work dir & get a local copy of the d03.._SL temp_dir = utils.create_dir_if_not_exists( os.path.join(config.get('wrf_home'), 'temp_d01')) shutil.copy2(d03_sl, temp_dir) d03_sl = os.path.join(temp_dir, os.path.basename(d03_sl)) lat_min = -3.06107 lon_min = 71.2166 lat_max = 18.1895 lon_max = 90.3315 variables = ext_utils.extract_variables(d03_sl, 'RAINC, RAINNC', lat_min, lat_max, lon_min, lon_max) lats = variables['XLAT'] lons = variables['XLONG'] # cell size is calc based on the mean between the lat and lon points cz = np.round( np.mean( np.append(lons[1:len(lons)] - lons[0:len(lons) - 1], lats[1:len(lats)] - lats[0:len(lats) - 1])), 3) # clevs = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30]) # clevs_cum = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30, 50, 75, 100]) # norm = colors.BoundaryNorm(boundaries=clevs, ncolors=256) # norm_cum = colors.BoundaryNorm(boundaries=clevs_cum, ncolors=256) # cmap = plt.get_cmap('jet') clevs = [ 0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 250, 300, 400, 500, 600, 750 ] clevs_cum = clevs norm = None cmap = cm.s3pcpn basemap = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, urcrnrlon=lon_max, urcrnrlat=lat_max, resolution='h') filter_threshold = 0.05 data = variables['RAINC'] + variables['RAINNC'] logging.info('Filtering with the threshold %f' % filter_threshold) data[data < filter_threshold] = 0.0 variables['PRECIP'] = data for i in range(1, len(variables['Times'])): time = variables['Times'][i] ts = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M:%S') lk_ts = utils.datetime_utc_to_lk(ts) logging.info('processing %s', time) # instantaneous precipitation (hourly) inst_precip = variables['PRECIP'][i] - variables['PRECIP'][i - 1] inst_file = os.path.join(temp_dir, 'wrf_inst_' + time) title = { 'label': '3Hourly rf for %s LK\n%s UTC' % (lk_ts.strftime('%Y-%m-%d_%H:%M:%S'), time), 'fontsize': 30 } ext_utils.create_contour_plot(inst_precip, inst_file + '.png', lat_min, lon_min, lat_max, lon_max, title, clevs=clevs, cmap=cmap, basemap=basemap, norm=norm) if i % 8 == 0: d = int(i / 8) - 1 logging.info('Creating gif for D%d' % d) gif_file = os.path.join(temp_dir, 'wrf_inst_D01_%dd' % d) images = [ os.path.join( temp_dir, 'wrf_inst_' + i.strftime('%Y-%m-%d_%H:%M:%S') + '.png') for i in np.arange(ts - dt.timedelta(hours=24 - 3), ts + dt.timedelta( hours=3), dt.timedelta( hours=3)).astype(dt.datetime) ] ext_utils.create_gif(images, gif_file + '.gif') # move all the data in the tmp dir to the nfs logging.info('Copying gifs to ' + d03_dir) utils.copy_files_with_prefix(temp_dir, '*.gif', d03_dir) d03_latest_dir = os.path.join(config.get('nfs_dir'), 'latest', os.path.basename(config.get('wrf_home'))) # <nfs>/latest/wrf0 .. 3 utils.create_dir_if_not_exists(d03_latest_dir) # todo: this needs to be adjusted to handle the multiple runs logging.info('Copying gifs to ' + d03_latest_dir) utils.copy_files_with_prefix(temp_dir, '*.gif', d03_latest_dir) logging.info('Cleaning up the dir ' + temp_dir) shutil.rmtree(temp_dir)
def process(self, *args, **kwargs): config = self.get_config(**kwargs) logging.info('wrf conifg: ' + config.to_json_string()) start_date = config.get('start_date') d03_dir = config.get('wrf_output_dir') d03_sl = os.path.join(d03_dir, 'wrfout_d03_' + start_date + ':00_SL') # create a temp work dir & get a local copy of the d03.._SL temp_dir = utils.create_dir_if_not_exists( os.path.join(config.get('wrf_home'), 'temp')) shutil.copy2(d03_sl, temp_dir) d03_sl = os.path.join(temp_dir, os.path.basename(d03_sl)) lat_min = 5.722969 lon_min = 79.52146 lat_max = 10.06425 lon_max = 82.18992 variables = ext_utils.extract_variables(d03_sl, 'RAINC, RAINNC', lat_min, lat_max, lon_min, lon_max) lats = variables['XLAT'] lons = variables['XLONG'] # cell size is calc based on the mean between the lat and lon points cz = np.round( np.mean( np.append(lons[1:len(lons)] - lons[0:len(lons) - 1], lats[1:len(lats)] - lats[0:len(lats) - 1])), 3) # clevs = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30]) # clevs_cum = 10 * np.array([0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 25, 30, 50, 75, 100]) # norm = colors.BoundaryNorm(boundaries=clevs, ncolors=256) # norm_cum = colors.BoundaryNorm(boundaries=clevs_cum, ncolors=256) # cmap = plt.get_cmap('jet') clevs = [ 0, 1, 2.5, 5, 7.5, 10, 15, 20, 30, 40, 50, 70, 100, 150, 200, 250, 300, 400, 500, 600, 750 ] clevs_cum = clevs norm = None norm_cum = None cmap = cm.s3pcpn basemap = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, urcrnrlon=lon_max, urcrnrlat=lat_max, resolution='h') filter_threshold = 0.05 data = variables['RAINC'] + variables['RAINNC'] logging.info('Filtering with the threshold %f' % filter_threshold) data[data < filter_threshold] = 0.0 variables['PRECIP'] = data pngs = [] ascs = [] for i in range(1, len(variables['Times'])): time = variables['Times'][i] ts = dt.datetime.strptime(time, '%Y-%m-%d_%H:%M:%S') lk_ts = utils.datetime_utc_to_lk(ts) logging.info('processing %s', time) # instantaneous precipitation (hourly) inst_precip = variables['PRECIP'][i] - variables['PRECIP'][i - 1] inst_file = os.path.join(temp_dir, 'wrf_inst_' + time) title = { 'label': 'Hourly rf for %s LK\n%s UTC' % (lk_ts.strftime('%Y-%m-%d_%H:%M:%S'), time), 'fontsize': 30 } ext_utils.create_asc_file(np.flip(inst_precip, 0), lats, lons, inst_file + '.asc', cell_size=cz) ascs.append(inst_file + '.asc') ext_utils.create_contour_plot(inst_precip, inst_file + '.png', lat_min, lon_min, lat_max, lon_max, title, clevs=clevs, cmap=cmap, basemap=basemap, norm=norm) pngs.append(inst_file + '.png') if i % 24 == 0: t = 'Daily rf from %s LK to %s LK' % ( (lk_ts - dt.timedelta(hours=24)).strftime('%Y-%m-%d_%H:%M:%S'), lk_ts.strftime('%Y-%m-%d_%H:%M:%S')) d = int(i / 24) - 1 logging.info('Creating images for D%d' % d) cum_file = os.path.join(temp_dir, 'wrf_cum_%dd' % d) ext_utils.create_asc_file(np.flip(variables['PRECIP'][i], 0), lats, lons, cum_file + '.asc', cell_size=cz) ascs.append(cum_file + '.asc') ext_utils.create_contour_plot(variables['PRECIP'][i] - variables['PRECIP'][i - 24], cum_file + '.png', lat_min, lon_min, lat_max, lon_max, t, clevs=clevs, cmap=cmap, basemap=basemap, norm=norm_cum) pngs.append(inst_file + '.png') gif_file = os.path.join(temp_dir, 'wrf_inst_%dd' % d) images = [ os.path.join( temp_dir, 'wrf_inst_' + i.strftime('%Y-%m-%d_%H:%M:%S') + '.png') for i in np.arange(ts - dt.timedelta(hours=23), ts + dt.timedelta( hours=1), dt.timedelta( hours=1)).astype(dt.datetime) ] ext_utils.create_gif(images, gif_file + '.gif') logging.info('Creating the zips') utils.create_zip_with_prefix(temp_dir, '*.png', os.path.join(temp_dir, 'pngs.zip')) utils.create_zip_with_prefix(temp_dir, '*.asc', os.path.join(temp_dir, 'ascs.zip')) # utils.create_zipfile(pngs, os.path.join(temp_dir, 'pngs.zip')) # utils.create_zipfile(ascs, os.path.join(temp_dir, 'ascs.zip')) logging.info('Cleaning up instantaneous pngs and ascs - wrf_inst_*') utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.png') utils.delete_files_with_prefix(temp_dir, 'wrf_inst_*.asc') logging.info('Copying pngs to ' + d03_dir) utils.move_files_with_prefix(temp_dir, '*.png', d03_dir) logging.info('Copying ascs to ' + d03_dir) utils.move_files_with_prefix(temp_dir, '*.asc', d03_dir) logging.info('Copying gifs to ' + d03_dir) utils.copy_files_with_prefix(temp_dir, '*.gif', d03_dir) logging.info('Copying zips to ' + d03_dir) utils.copy_files_with_prefix(temp_dir, '*.zip', d03_dir) d03_latest_dir = os.path.join(config.get('nfs_dir'), 'latest', os.path.basename(config.get('wrf_home'))) # <nfs>/latest/wrf0 .. 3 utils.create_dir_if_not_exists(d03_latest_dir) # todo: this needs to be adjusted to handle the multiple runs logging.info('Copying gifs to ' + d03_latest_dir) utils.copy_files_with_prefix(temp_dir, '*.gif', d03_latest_dir) logging.info('Cleaning up temp dir') shutil.rmtree(temp_dir)