def create_outflow_old(dir_path, ts_start, ts_end): outflow_file_path = os.path.join(dir_path, 'OUTFLOW.DAT') init_tidal_path = os.path.join(os.getcwd(), 'outflow', 'INITTIDAL.CONF') config_path = os.path.join(os.getcwd(), 'outflow', 'config_old.json') print('create_outflow_old|outflow_file_path : ', outflow_file_path) print('create_outflow_old|init_tidal_path : ', init_tidal_path) print('create_outflow_old|config_path : ', config_path) with open(config_path) as json_file: config = json.load(json_file) adapter = MySQLAdapter(host=config['db_host'], user=config['db_user'], password=config['db_password'], db=config['db_name']) opts = { 'from': ts_start, 'to': ts_end, } tidal_timeseries = get_forecast_timeseries(adapter, TIDAL_FORECAST_ID, opts) if len(tidal_timeseries) > 0: print('tidal_timeseries::', len(tidal_timeseries), tidal_timeseries[0], tidal_timeseries[-1]) else: print('No data found for tidal timeseries: ', tidal_timeseries) sys.exit(1) adapter.close() print('Open FLO2D OUTFLOW ::', outflow_file_path) outflow_file = open(outflow_file_path, 'w') lines = [] print('Reading INIT TIDAL CONF...') with open(init_tidal_path) as initTidalConfFile: initTidalLevels = initTidalConfFile.readlines() for initTidalLevel in initTidalLevels: if len(initTidalLevel.split()): # Check if not empty line lines.append(initTidalLevel) if initTidalLevel[0] == 'N': lines.append('{0} {1:{w}} {2:{w}}\n'.format( 'S', 0, 0, w=DAT_WIDTH)) base_date_time = datetime.strptime( ts_start, '%Y-%m-%d %H:%M:%S') for step in tidal_timeseries: hours_so_far = (step[0] - base_date_time) hours_so_far = 24 * hours_so_far.days + hours_so_far.seconds / ( 60 * 60) lines.append('{0} {1:{w}} {2:{w}{b}}\n'.format( 'S', int(hours_so_far), float(step[1]), b='.2f', w=DAT_WIDTH)) outflow_file.writelines(lines) outflow_file.close() print('Finished writing OUTFLOW.DAT')
def create_hybrid_raincell(dir_path, run_date, run_time, forward, backward, res_mins='60', model_prefix='wrf', forecast_source='wrf0', run_name='Cloud-1'): try: observed_adapter = None kelani_basin_points_file = get_resource_path( 'extraction/local/kelani_basin_points_250m.txt') kelani_lower_basin_shp_file = get_resource_path( 'extraction/shp/klb-wgs84/klb-wgs84.shp') reference_net_cdf = get_resource_path( 'extraction/netcdf/wrf_wrfout_d03_2019-03-31_18_00_00_rf') # config_path = os.path.join(os.getcwd(), 'raincelldat', 'config.json') config_path = os.path.join(os.getcwd(), 'config.json') with open(config_path) as json_file: config = json.load(json_file) if 'forecast_db_config' in config: forecast_db_config = config['forecast_db_config'] if 'observed_db_config' in config: observed_db_config = config['observed_db_config'] if 'klb_obs_stations' in config: obs_stations = copy.deepcopy(config['klb_obs_stations']) print('[run_date, run_time] : ', [run_date, run_time]) start_ts_lk = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S') start_ts_lk = start_ts_lk.strftime( '%Y-%m-%d_%H:00') # '2018-05-24_08:00' duration_days = (int(backward), int(forward)) obs_start = datetime.strptime( start_ts_lk, '%Y-%m-%d_%H:%M') - timedelta(days=duration_days[0]) obs_end = datetime.strptime(start_ts_lk, '%Y-%m-%d_%H:%M') forecast_end = datetime.strptime( start_ts_lk, '%Y-%m-%d_%H:%M') + timedelta(days=duration_days[1]) print([obs_start, obs_end, forecast_end]) fcst_duration_start = obs_end.strftime('%Y-%m-%d %H:%M:%S') obs_duration_start = ( datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') - timedelta(days=backward)).strftime('%Y-%m-%d 00:00:00') print('obs_duration_start : ', obs_duration_start) print('fcst_duration_start : ', fcst_duration_start) observed_duration = int( (datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') - datetime.strptime(obs_duration_start, '%Y-%m-%d %H:%M:%S')).total_seconds() / (60 * res_mins)) print('observed_duration : ', observed_duration) raincell_file_path = os.path.join(dir_path, 'RAINCELL.DAT') if not os.path.isfile(raincell_file_path): points = np.genfromtxt(kelani_basin_points_file, delimiter=',') kel_lon_min = np.min(points, 0)[1] kel_lat_min = np.min(points, 0)[2] kel_lon_max = np.max(points, 0)[1] kel_lat_max = np.max(points, 0)[2] print( '[kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max] : ', [kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max]) observed_adapter = MySQLAdapter( host=observed_db_config['host'], user=observed_db_config['user'], password=observed_db_config['password'], db=observed_db_config['db']) # print('obs_stations : ', obs_stations) observed_precipitations = get_observed_precip( obs_stations, obs_duration_start, fcst_duration_start, observed_duration, observed_adapter, forecast_source='wrf0') #print('observed_precipitations : ', observed_precipitations) observed_adapter.close() observed_adapter = None validated_obs_station = {} print('observed_precipitations.keys() : ', observed_precipitations.keys()) #""" for station_name in obs_stations.keys(): if station_name in observed_precipitations.keys(): validated_obs_station[station_name] = obs_stations[ station_name] else: print('station_name : ', station_name) print('validated_obs_station : ', validated_obs_station) if len(validated_obs_station) >= 3 and bool( observed_precipitations): # if bool(observed_precipitations): thess_poly = get_voronoi_polygons( validated_obs_station, kelani_lower_basin_shp_file, add_total_area=False) print('thess_poly : ', thess_poly) point_thess_idx = [] for point in points: point_thess_idx.append( is_inside_geo_df(thess_poly, lon=point[1], lat=point[2])) pass print('len(points)', len(points)) print('len(point_thess_idx)', len(point_thess_idx)) print('point_thess_idx', point_thess_idx) with open(raincell_file_path, 'w') as output_file: output_file.write( "%d %d %s %s\n" % (res_mins, observed_duration, obs_duration_start, fcst_duration_start)) print('range 1 : ', int(24 * 60 * duration_days[0] / res_mins) + 1) print('range 2 : ', int(24 * 60 * duration_days[1] / res_mins) - 1) for t in range(observed_duration - 5): for i, point in enumerate(points): rf = float( observed_precipitations[ point_thess_idx[i]].values[t] ) if point_thess_idx[i] is not None else 0 output_file.write('%d %.1f\n' % (point[0], rf)) else: print('----------------------------------------------') print('No observed data.') print('----------------------------------------------') #""" except Exception as e: print('Raincell generation error.') traceback.print_exc() try: if observed_adapter is not None: observed_adapter.close() except Exception as ex: print(str(ex))
adapter, kub_observed_stations, obs_start, obs_end) obs_klb_mean_df = get_observed_klb_mean( adapter, klb_observed_stations, obs_start, obs_end) # print('obs_kub_mean_df : ', obs_kub_mean_df) # print('obs_klb_mean_df : ', obs_klb_mean_df) kub_mean_df = pd.concat( [obs_kub_mean_df, fcst_kub_mean_df]) klb_mean_df = pd.concat( [obs_klb_mean_df, fcst_klb_mean_df]) #print('kub_mean_df : ', kub_mean_df) #print('klb_mean_df : ', klb_mean_df) mean_df = pd.merge(kub_mean_df, klb_mean_df, on='time') print('mean_df : ', mean_df) fh = open(raincsv_file_path, 'w') csvWriter = csv.writer(fh, delimiter=',', quotechar='|') # Write Metadata https://publicwiki.deltares.nl/display/FEWSDOC/CSV csvWriter.writerow( ['Location Names', 'Awissawella', 'Colombo']) csvWriter.writerow( ['Location Ids', 'Awissawella', 'Colombo']) csvWriter.writerow(['Time', 'Rainfall', 'Rainfall']) fh.close() with open(raincsv_file_path, 'a') as f: mean_df.to_csv(f, header=False) adapter.close() except Exception as ex: adapter.close() print("Download required files|Exception: ", str(ex)) except Exception as e: print("Exception occurred: ", str(e))
# password=fcst_db_configs['password'], db=fcst_db_configs['db']) obs_start = ts_start_datetime.strftime('%Y-%m-%d %H:%M:%S') obs_end = run_datetime.strftime('%Y-%m-%d %H:%M:%S') print('[obs_start, obs_end] : ', [obs_start, obs_end]) fcst_start = obs_end fcst_end = ts_end_datetime.strftime('%Y-%m-%d %H:%M:%S') print('[fcst_start, fcst_end] : ', [fcst_start, fcst_end]) forecast_duration = int((datetime.strptime(fcst_end, '%Y-%m-%d %H:%M:%S') - datetime.strptime( fcst_start, '%Y-%m-%d %H:%M:%S')).total_seconds() / (60 * 60)) obs_kub_mean_df = get_observed_kub_mean(curw_adapter, kub_observed_stations, obs_start, obs_end) obs_klb_mean_df = get_observed_klb_mean(curw_adapter, klb_observed_stations, obs_start, obs_end) curw_adapter.close() # fcst_adapter.close() mean_df = pd.merge(obs_kub_mean_df, obs_klb_mean_df, on='time') print('mean_df : ', mean_df) fh = open(raincsv_file_path, 'w') csvWriter = csv.writer(fh, delimiter=',', quotechar='|') # Write Metadata https://publicwiki.deltares.nl/display/FEWSDOC/CSV csvWriter.writerow(['Location Names', 'Awissawella', 'Colombo']) csvWriter.writerow(['Location Ids', 'Awissawella', 'Colombo']) csvWriter.writerow(['Time', 'Rainfall', 'Rainfall']) fh.close() with open(raincsv_file_path, 'a') as f: mean_df.to_csv(f, header=False) fh = open(raincsv_file_path, 'a') csvWriter = csv.writer(fh, delimiter=',', quotechar='|')
def create_hybrid_raincell(dir_path, run_date, run_time, forward=3, backward=2): try: #60 120 2019-05-01 00:00:00 2019-05-05 23:00:00 #60 120 2019-05-01 23:00:00 2019-05-06 23:00:00 #60 120 2019-05-01 23:00:00 2019-05-06 23:00:00 #60 120 2019-04-30 23:00:00 2019-05-05 23:00:00 # run_date = datetime.now().strftime("%Y-%m-%d") # run_time = datetime.now().strftime("%H:00:00") # run_date = '2019-05-02' # run_time = '23:00:00' tag = '' # try: # opts, args = getopt.getopt(sys.argv[1:], "hd:t:T:f:b:", [ # "help", "date=", "time=", "forward=", "backward=", "wrf-rf=", "wrf-kub=", "tag=" # ]) # except getopt.GetoptError: # usage() # sys.exit(2) # for opt, arg in opts: # if opt in ("-h", "--help"): # usage() # sys.exit() # elif opt in ("-d", "--date"): # run_date = arg # 2018-05-24 # elif opt in ("-t", "--time"): # run_time = arg # 16:00:00 # elif opt in ("-f","--forward"): # forward = arg # elif opt in ("-b","--backward"): # backward = arg # elif opt in ("--wrf-rf"): # RF_DIR_PATH = arg # elif opt in ("--wrf-kub"): # KUB_DIR_PATH = arg # elif opt in ("-T", "--tag"): # tag = arg #run_date = '2019-04-29' print("WrfTrigger run_date : ", run_date) print("WrfTrigger run_time : ", run_time) # backward = 2 # forward = 3 start_ts_lk = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S') start_ts_lk = start_ts_lk.strftime( '%Y-%m-%d_%H:00') # '2018-05-24_08:00' print("WrfTrigger start_ts_lk : ", start_ts_lk) duration_days = (int(backward), int(forward)) print("WrfTrigger duration_days : ", duration_days) config_path = os.path.join(os.getcwd(), 'raincelldat', 'config.json') print('config_path : ', config_path) with open(config_path) as json_file: config_data = json.load(json_file) key_file = os.path.join(os.getcwd(), 'raincelldat', 'uwcc-admin.json') bucket_name = config_data["BUCKET_NAME"] initial_path_prefix = config_data["INITIAL_PATH_PREFIX"] net_cdf_file_format = config_data["NET_CDF_FILE"] wrf_data_dir = dir_path print("wrf_data_dir : ", wrf_data_dir) net_cdf_date = datetime.strptime(run_date, '%Y-%m-%d') - timedelta(hours=24) net_cdf_date = net_cdf_date.strftime("%Y-%m-%d") download_location = os.path.join(os.getcwd(), run_date) print("download_location : ", download_location) print("net_cdf_date : ", net_cdf_date) MYSQL_HOST = config_data['db_host'] MYSQL_USER = config_data['db_user'] MYSQL_DB = config_data['db_name'] MYSQL_PASSWORD = config_data['db_password'] klb_observed_stations = config_data['klb_obs_stations'] klb_points = get_resource_path( 'extraction/local/kelani_basin_points_250m.txt') adapter = MySQLAdapter(host=MYSQL_HOST, user=MYSQL_USER, password=MYSQL_PASSWORD, db=MYSQL_DB) name_list = net_cdf_file_format.split("-") net_cdf_file_name = name_list[ 0] + "_" + net_cdf_date + "_" + name_list[1] try: net_cdf_file_path = os.path.join(download_location, net_cdf_file_name) print("net_cdf_file_path : ", net_cdf_file_path) if not os.path.isfile(net_cdf_file_path): download_netcdf(initial_path_prefix, download_location, net_cdf_file_name, key_file, bucket_name) if os.path.isfile(net_cdf_file_path): raincell_file_path = os.path.join(wrf_data_dir, run_date, run_time, 'RAINCELL.DAT') if not os.path.isfile(raincell_file_path): create_raincell_file( run_time, adapter, net_cdf_file_path, start_ts_lk, os.path.join(wrf_data_dir, run_date), klb_observed_stations, klb_points, duration_days) adapter.close() except Exception as ex: adapter.close() print("Download required files|Exception: ", str(ex)) except Exception as e: print("Exception occurred: ", str(e))
def create_inflow(dir_path, run_date, run_time): try: # run_date = datetime.now().strftime("%Y-%m-%d") # run_time = datetime.now().strftime("%H:00:00") # FLO-2D parameters IHOURDAILY = 0 # 0-hourly interval, 1-daily interval IDEPLT = 0 # Set to 0 on running with Text mode. Otherwise cell number e.g. 8672 IFC = 'C' # foodplain 'F' or a channel 'C' INOUTFC = 0 # 0-inflow, 1-outflow KHIN = 8655 # inflow nodes HYDCHAR = 'H' # Denote line of inflow hydrograph time and discharge pairs # try: # opts, args = getopt.getopt(sys.argv[1:], "hd:t:T:f:b:", [ # "help", "date=", "time=", "forward=", "backward=", "wrf-rf=", "wrf-kub=", "tag=" # ]) # except getopt.GetoptError: # sys.exit(2) # for opt, arg in opts: # if opt in ("-h", "--help"): # sys.exit() # elif opt in ("-d", "--date"): # run_date = arg # 2018-05-24 # elif opt in ("-t", "--time"): # run_time = arg # 16:00:00 # elif opt in ("-f","--forward"): # forward = arg # elif opt in ("-b","--backward"): # backward = arg # elif opt in ("--wrf-rf"): # RF_DIR_PATH = arg # elif opt in ("--wrf-kub"): # KUB_DIR_PATH = arg # elif opt in ("-T", "--tag"): # tag = arg #run_date = '2019-04-29' print("WrfTrigger run_date : ", run_date) print("WrfTrigger run_time : ", run_time) # backward = 2 # forward = 3 startDateTime = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S') print("startDateTime : ", startDateTime) config_path = os.path.join(os.getcwd(), 'inflowdat', 'config.json') print('config_path : ', config_path) with open(config_path) as json_file: config_data = json.load(json_file) output_dir = dir_path inflow_file = config_data["inflow_file"] # CONTROL_INTERVAL = config_data["CONTROL_INTERVAL"] # CSV_NUM_METADATA_LINES = config_data["CSV_NUM_METADATA_LINES"] DAT_WIDTH = config_data["DAT_WIDTH"] OBSERVED_WL_IDS = config_data["OBSERVED_WL_IDS"] MYSQL_HOST = config_data['db_host'] MYSQL_USER = config_data['db_user'] MYSQL_DB = config_data['db_name'] MYSQL_PASSWORD = config_data['db_password'] adapter = MySQLAdapter(host=MYSQL_HOST, user=MYSQL_USER, password=MYSQL_PASSWORD, db=MYSQL_DB) try: #hourly_inflow_file_dir = os.path.join(output_dir, run_date, run_time) hourly_inflow_file = os.path.join(output_dir, inflow_file) #create_dir_if_not_exists(hourly_inflow_file_dir) print("hourly_outflow_file : ", hourly_inflow_file) if not os.path.isfile(hourly_inflow_file): discharge_df = get_discharge_data(adapter, startDateTime) print('discharge_df', discharge_df) initial_water_levels = update_initial_water_levels(OBSERVED_WL_IDS, adapter, startDateTime.strftime("%Y-%m-%d %H:%M:%S")) f = open(hourly_inflow_file, 'w') line1 = '{0} {1:{w}{b}}\n'.format(IHOURDAILY, IDEPLT, b='d', w=DAT_WIDTH) line2 = '{0} {1:{w}{b}} {2:{w}{b}}\n'.format(IFC, INOUTFC, KHIN, b='d', w=DAT_WIDTH) line3 = '{0} {1:{w}{b}} {2:{w}{b}}\n'.format(HYDCHAR, 0.0, 0.0, b='.1f', w=DAT_WIDTH) f.writelines([line1, line2, line3]) lines = []; i = 1.0 for time, row in discharge_df.iterrows(): lines.append( '{0} {1:{w}{b}} {2:{w}{b}}\n'.format(HYDCHAR, i, float(row["value"]), b='.1f', w=DAT_WIDTH)) i += 1.0 lines.extend(initial_water_levels) f.writelines(lines) f.close() adapter.close() except Exception as ex: adapter.close() print("Download required files|Exception: ", str(ex)) except Exception as e: print("Exception occurred: ", str(e))
def create_hybrid_mike_input(dir_path, run_date, run_time, forward, backward): try: res_mins = '60' model_prefix = 'wrf' forecast_source = 'wrf0' run_name = 'Cloud-1' forecast_adapter = None observed_adapter = None kelani_basin_mike_points_file = get_resource_path( 'extraction/local/metro_col_sub_catch_centroids.csv') kelani_basin_points_file = get_resource_path( 'extraction/local/kelani_basin_points_250m.txt') kelani_lower_basin_shp_file = get_resource_path( 'extraction/shp/klb-wgs84/klb-wgs84.shp') reference_net_cdf = get_resource_path( 'extraction/netcdf/wrf_wrfout_d03_2019-03-31_18_00_00_rf') #config_path = os.path.join(os.getcwd(), 'raincelldat', 'config.json') config_path = os.path.join(os.getcwd(), 'config.json') with open(config_path) as json_file: config = json.load(json_file) if 'forecast_db_config' in config: forecast_db_config = config['forecast_db_config'] if 'observed_db_config' in config: observed_db_config = config['observed_db_config'] if 'klb_obs_stations' in config: obs_stations = copy.deepcopy(config['klb_obs_stations']) res_mins = int(res_mins) print('[run_date, run_time] : ', [run_date, run_time]) start_ts_lk = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S') start_ts_lk = start_ts_lk.strftime( '%Y-%m-%d_%H:00') # '2018-05-24_08:00' duration_days = (int(backward), int(forward)) obs_start = datetime.strptime( start_ts_lk, '%Y-%m-%d_%H:%M') - timedelta(days=duration_days[0]) obs_end = datetime.strptime(start_ts_lk, '%Y-%m-%d_%H:%M') forecast_end = datetime.strptime( start_ts_lk, '%Y-%m-%d_%H:%M') + timedelta(days=duration_days[1]) print([obs_start, obs_end, forecast_end]) fcst_duration_start = obs_end.strftime('%Y-%m-%d %H:%M:%S') fcst_duration_end = ( datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') + timedelta(days=3)).strftime('%Y-%m-%d 00:00:00') obs_duration_start = ( datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') - timedelta(days=2)).strftime('%Y-%m-%d 00:00:00') print('obs_duration_start : ', obs_duration_start) print('fcst_duration_start : ', fcst_duration_start) print('fcst_duration_end : ', fcst_duration_end) observed_duration = int( (datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') - datetime.strptime(obs_duration_start, '%Y-%m-%d %H:%M:%S')).total_seconds() / (60 * res_mins)) forecast_duration = int( (datetime.strptime(fcst_duration_end, '%Y-%m-%d %H:%M:%S') - datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S')).total_seconds() / (60 * res_mins)) total_duration = int( (datetime.strptime(fcst_duration_end, '%Y-%m-%d %H:%M:%S') - datetime.strptime(obs_duration_start, '%Y-%m-%d %H:%M:%S')).total_seconds() / (60 * res_mins)) print('observed_duration : ', observed_duration) print('forecast_duration : ', forecast_duration) print('total_duration : ', total_duration) mike_input_file_path = os.path.join(dir_path, 'mike_input.txt') print('mike_input_file_path : ', mike_input_file_path) if not os.path.isfile(mike_input_file_path): points = np.genfromtxt(kelani_basin_points_file, delimiter=',') kel_lon_min = np.min(points, 0)[1] kel_lat_min = np.min(points, 0)[2] kel_lon_max = np.max(points, 0)[1] kel_lat_max = np.max(points, 0)[2] mike_points = np.genfromtxt(kelani_basin_mike_points_file, delimiter=',', names=True, dtype=None) print('mike_points : ', mike_points) print('mike_points : ', mike_points[0][0].decode()) print('mike_points : ', mike_points[1][0].decode()) print('mike_points : ', mike_points[2][0].decode()) def _get_points_names(mike_points): mike_point_names = [] for p in mike_points: mike_point_names.append(p[0].decode()) return mike_point_names #mike_point_names = get_centroid_names(kelani_basin_mike_points_file) mike_point_names = _get_points_names(mike_points) print('mike_point_names : ', mike_point_names) print('mike_point_names[0] : ', mike_point_names[0]) print('mike_point_names[1] : ', mike_point_names[1]) print('mike_point_names[2] : ', mike_point_names[2]) print('mike_point_names[-1] : ', mike_point_names[-1]) print( '[kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max] : ', [kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max]) #""" # #min_lat, min_lon, max_lat, max_lon forecast_stations, station_points = get_forecast_stations_from_net_cdf( model_prefix, reference_net_cdf, kel_lat_min, kel_lon_min, kel_lat_max, kel_lon_max) print('forecast_stations length : ', len(forecast_stations)) file_header = ','.join(mike_point_names) print('file_header : ', file_header) observed_adapter = MySQLAdapter( host=observed_db_config['host'], user=observed_db_config['user'], password=observed_db_config['password'], db=observed_db_config['db']) # print('obs_stations : ', obs_stations) observed_precipitations = get_observed_precip( obs_stations, obs_duration_start, fcst_duration_start, observed_duration, observed_adapter, forecast_source='wrf0') observed_adapter.close() observed_adapter = None validated_obs_station = {} # print('obs_stations.keys() : ', obs_stations.keys()) # print('observed_precipitations.keys() : ', observed_precipitations.keys()) for station_name in obs_stations.keys(): if station_name in observed_precipitations.keys(): validated_obs_station[station_name] = obs_stations[ station_name] else: print('invalid station_name : ', station_name) # if bool(observed_precipitations): if len(validated_obs_station) >= 1: thess_poly = get_voronoi_polygons( validated_obs_station, kelani_lower_basin_shp_file, add_total_area=False) forecast_adapter = MySQLAdapter( host=forecast_db_config['host'], user=forecast_db_config['user'], password=forecast_db_config['password'], db=forecast_db_config['db']) forecast_precipitations = get_forecast_precipitation( forecast_source, run_name, forecast_stations, forecast_adapter, obs_end.strftime('%Y-%m-%d %H:%M:%S'), forward_days=3) forecast_adapter.close() forecast_adapter = None if bool(forecast_precipitations): fcst_thess_poly = get_voronoi_polygons( station_points, kelani_lower_basin_shp_file, add_total_area=False) fcst_point_thess_idx = [] for point in mike_points: fcst_point_thess_idx.append( is_inside_geo_df(fcst_thess_poly, lon=point[1], lat=point[2])) pass # print('fcst_point_thess_idx : ', fcst_point_thess_idx) # create_dir_if_not_exists(dir_path) point_thess_idx = [] for point in mike_points: point_thess_idx.append( is_inside_geo_df(thess_poly, lon=point[1], lat=point[2])) pass print('len(mike_points)', len(mike_points)) print('len(point_thess_idx)', len(point_thess_idx)) print('len(fcst_point_thess_idx)', len(fcst_point_thess_idx)) print('point_thess_idx : ', point_thess_idx) print('fcst_point_thess_idx : ', fcst_point_thess_idx) print('mike_point_names : ', mike_point_names) with open(mike_input_file_path, mode='w') as output_file: output_writer = csv.writer(output_file, delimiter=',', dialect='excel') header = ['Times'] header.extend(mike_point_names) output_writer.writerow(header) print( 'range 1 : ', int(24 * 60 * duration_days[0] / res_mins) + 1) print( 'range 2 : ', int(24 * 60 * duration_days[1] / res_mins) - 1) obs_duration_end = None for t in range(observed_duration): date_time = datetime.strptime( obs_duration_start, '%Y-%m-%d %H:%M:%S') + timedelta(hours=t) obs_duration_end = date_time.strftime( '%Y-%m-%d %H:%M:%S') print(date_time.strftime('%Y-%m-%d %H:%M:%S')) obs_rf_list = [] for i, point in enumerate(mike_points): rf = float( observed_precipitations[ point_thess_idx[i]].values[t] ) if point_thess_idx[i] is not None else 0 obs_rf_list.append('%.6f' % rf) row = [date_time.strftime('%Y-%m-%d %H:%M:%S')] row.extend(obs_rf_list) output_writer.writerow(row) print( 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' ) next_time_step = datetime.strptime( obs_duration_end, '%Y-%m-%d %H:%M:%S') + timedelta(hours=1) for t in range(forecast_duration): date_time = next_time_step + timedelta(hours=t) print(date_time.strftime('%Y-%m-%d %H:%M:%S')) fcst_rf_list = [] for i, point in enumerate(mike_points): rf = float(forecast_precipitations[ fcst_point_thess_idx[i]].values[t] ) if fcst_point_thess_idx[ i] is not None else 0 fcst_rf_list.append('%.6f' % rf) row = [date_time.strftime('%Y-%m-%d %H:%M:%S')] row.extend(fcst_rf_list) output_writer.writerow(row) else: print('----------------------------------------------') print('No forecast data.') print('----------------------------------------------') else: print('----------------------------------------------') print('No observed data.') print('Available station count: ', len(validated_obs_station)) print('Proceed with forecast data.') print('----------------------------------------------') # """ except Exception as e: print('Raincell generation error|Exception:', str(e)) traceback.print_exc() try: if forecast_adapter is not None: forecast_adapter.close() if observed_adapter is not None: observed_adapter.close() except Exception as ex: print(str(ex))
def create_outflow(dir_path, run_date, run_time, forward = 3, backward = 2): try: # run_date = datetime.now().strftime("%Y-%m-%d") # run_time = datetime.now().strftime("%H:00:00") # tag = '' # try: # opts, args = getopt.getopt(sys.argv[1:], "hd:t:T:f:b:", [ # "help", "date=", "time=", "forward=", "backward=", "wrf-rf=", "wrf-kub=", "tag=" # ]) # except getopt.GetoptError: # sys.exit(2) # for opt, arg in opts: # if opt in ("-h", "--help"): # sys.exit() # elif opt in ("-d", "--date"): # run_date = arg # 2018-05-24 # elif opt in ("-t", "--time"): # run_time = arg # 16:00:00 # elif opt in ("-f","--forward"): # forward = arg # elif opt in ("-b","--backward"): # backward = arg # elif opt in ("--wrf-rf"): # RF_DIR_PATH = arg # elif opt in ("--wrf-kub"): # KUB_DIR_PATH = arg # elif opt in ("-T", "--tag"): # tag = arg #run_date = '2019-04-29' print("WrfTrigger run_date : ", run_date) print("WrfTrigger run_time : ", run_time) # backward = 2 # forward = 3 startDateTime = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S') print("startDateTime : ", startDateTime) config_path = os.path.join(os.getcwd(), 'outflowdat', 'config.json') print('config_path : ', config_path) with open(config_path) as json_file: config_data = json.load(json_file) output_dir = dir_path inittidal_conf_path = os.path.join(os.getcwd(), 'outflowdat', 'INITTIDAL.CONF') CONTROL_INTERVAL = config_data["CONTROL_INTERVAL"] DAT_WIDTH = config_data["DAT_WIDTH"] TIDAL_FORECAST_ID = config_data["TIDAL_FORECAST_ID"] MYSQL_HOST = config_data['db_host'] MYSQL_USER = config_data['db_user'] MYSQL_DB = config_data['db_name'] MYSQL_PASSWORD = config_data['db_password'] adapter = MySQLAdapter(host=MYSQL_HOST, user=MYSQL_USER, password=MYSQL_PASSWORD, db=MYSQL_DB) try: hourly_outflow_file = os.path.join(output_dir, 'OUTFLOW.DAT') print("hourly_outflow_file : ", hourly_outflow_file) if not os.path.isfile(hourly_outflow_file): opts = { 'from': (startDateTime - timedelta(minutes=0)).strftime("%Y-%m-%d %H:%M:%S"), 'to': (startDateTime + timedelta(minutes=CONTROL_INTERVAL)).strftime("%Y-%m-%d %H:%M:%S"), } tidal_timeseries = get_forecast_timeseries(adapter, TIDAL_FORECAST_ID, opts) if len(tidal_timeseries) > 0: print('tidal_timeseries::', len(tidal_timeseries), tidal_timeseries[0], tidal_timeseries[-1]) f = open(hourly_outflow_file, 'w') lines = [] print('Reading INIT TIDAL CONF...') with open(inittidal_conf_path) as initTidalConfFile: initTidalLevels = initTidalConfFile.readlines() for initTidalLevel in initTidalLevels: if len(initTidalLevel.split()): # Check if not empty line lines.append(initTidalLevel) if initTidalLevel[0] == 'N': lines.append('{0} {1:{w}} {2:{w}}\n'.format('S', 0, 0, w=DAT_WIDTH)) base_date_time = startDateTime.replace(minute=0, second=0, microsecond=0) for step in tidal_timeseries: hours_so_far = (step[0] - base_date_time) hours_so_far = 24 * hours_so_far.days + hours_so_far.seconds / (60 * 60) lines.append('{0} {1:{w}} {2:{w}{b}}\n' .format('S', int(hours_so_far), float(step[1]), b='.2f', w=DAT_WIDTH)) f.writelines(lines) f.close() print('Finished writing OUTFLOW.DAT') else: print('No data found for tidal timeseries: ', tidal_timeseries) sys.exit(1) adapter.close() except Exception as ex: adapter.close() print("Download required files|Exception: ", str(ex)) except Exception as e: print("Exception occurred: ", str(e))
forecast_adapter = MySQLAdapter( host=forecast_db_config['host'], user=forecast_db_config['user'], password=forecast_db_config['password'], db=forecast_db_config['db']) # #min_lat, min_lon, max_lat, max_lon forecast_stations, station_points = get_forecast_stations_from_net_cdf( reference_net_cdf, kel_lat_min, kel_lon_min, kel_lat_max, kel_lon_max) print('forecast_stations length : ', len(forecast_stations)) forecast_precipitations = get_forecast_precipitation( forecast_stations, obs_end, forecast_end, forecast_adapter) # print('forecast_precipitations : ', forecast_precipitations) forecast_adapter.close() observed_adapter = MySQLAdapter( host=observed_db_config['host'], user=observed_db_config['user'], password=observed_db_config['password'], db=observed_db_config['db']) # observed_precipitations = get_observed_precip(obs_stations, obs_start, obs_end, duration_days, observed_adapter, forecast_source='wrf0') observed_precipitations = get_observed_precip( obs_stations, datetime.strptime(obs_start, '%Y-%m-%d %H:%M:%S'), datetime.strptime(obs_end, '%Y-%m-%d %H:%M:%S'), duration_days, observed_adapter, forecast_source='wrf0')