Ejemplo n.º 1
0
def get_basin_init_discharge(init_date_time,
                             db_user,
                             db_pwd,
                             db_host,
                             db_name='curw_sim'):
    # print('get_basin_init_discharge|init_date_time : ', init_date_time)
    sim_adapter = CurwSimAdapter(db_user, db_pwd, db_host, db_name)
    value = sim_adapter.get_basin_discharge(init_date_time,
                                            grid_id='discharge_glencourse')
    # print('get_basin_init_discharge|value : ', value)
    return value
Ejemplo n.º 2
0
def get_rain_files(file_name, ts_start, ts_end):
    print('get_rain_files|{file_name, ts_start, ts_end}: ', {file_name, ts_start, ts_end})
    sim_adapter = CurwSimAdapter(MYSQL_USER, MYSQL_PASSWORD, MYSQL_HOST, MYSQL_DB)
    valid_gages = validate_gage_points(sim_adapter, ts_start, ts_end)
    print('valid_gages.keys() : ', valid_gages.keys())
    kub_points = get_valid_kub_points_from_meta_data(valid_gages)
    try:
        shape_file = 'kub-wgs84/kub-wgs84.shp'
        catchment_file = 'sub_catchments/sub_catchments.shp'
        thessian_df = get_thessian_polygon_from_gage_points(shape_file, kub_points)
        catchment_df = get_catchment_area(catchment_file)
        sub_ratios = calculate_intersection(thessian_df, catchment_df)
        print(sub_ratios)
        catchments_list = []
        catchments_rf_df_list = []
        for sub_dict in sub_ratios:
            ratio_list = sub_dict['ratios']
            sub_catchment_name = sub_dict['sub_catchment_name']
            gage_dict = ratio_list[0]
            gage_name = gage_dict['gage_name']
            sub_catchment_df = valid_gages[gage_name]
            ratio = gage_dict['ratio']
            if ratio > 0:
                sub_catchment_df.loc[:, 'value'] *= decimal.Decimal(ratio)
            ratio_list.remove(gage_dict)
            for gage_dict in ratio_list:
                gage_name = gage_dict['gage_name']
                time_series_df = valid_gages[gage_name]
                ratio = gage_dict['ratio']
                time_series_df.loc[:, 'value'] *= decimal.Decimal(ratio)
                sub_catchment_df['value'] = sub_catchment_df['value'] + time_series_df['value']
            if sub_catchment_df.size > 0:
                catchments_list.append(sub_catchment_name)
                catchments_rf_df_list.append(sub_catchment_df)
        df_merged = reduce(lambda left, right: pd.merge(left, right, on=['time'],
                                                        how='outer'), catchments_rf_df_list)
        print('df_merged : ', df_merged)
        df_merged.to_csv('df_merged.csv', header=False)
        file_handler = open(file_name, 'w')
        csvWriter = csv.writer(file_handler, delimiter=',', quotechar='|')
        # Write Metadata https://publicwiki.deltares.nl/display/FEWSDOC/CSV
        first_row = ['Location Names']
        first_row.extend(catchments_list)
        second_row = ['Location Ids']
        second_row.extend(catchments_list)
        third_row = ['Time']
        for i in range(len(catchments_list)):
            third_row.append('Rainfall')
        csvWriter.writerow(first_row)
        csvWriter.writerow(second_row)
        csvWriter.writerow(third_row)
        file_handler.close()
        df_merged.to_csv(file_name, mode='a', header=False)
    except Exception as e:
        print("get_thessian_polygon_from_gage_points|Exception|e : ", e)
Ejemplo n.º 3
0
def get_hd_mean_rain(ts_start_str, ts_end_str, output_dir, model, pop_method,
                     allowed_error, exec_datetime, db_user, db_pwd, db_host,
                     db_name, catchment):
    sim_adapter = None
    try:
        print(
            'get_hd_mean_rain|[ts_start, ts_end, output_dir, model, pop_method, allowed_error, exec_datetime, catchment] : ',
            [
                ts_start_str, ts_end_str, output_dir, model, pop_method,
                allowed_error, exec_datetime, catchment
            ])
        sub_catchment_shape_file = os.path.join(
            RESOURCE_PATH, 'sub_catchments/sub_subcatchments.shp')
        if catchment == 'kub':
            shape_file = os.path.join(RESOURCE_PATH, 'kub-wgs84/kub-wgs84.shp')
        else:
            shape_file = os.path.join(RESOURCE_PATH, 'klb-wgs84/klb-wgs84.shp')
        sim_adapter = CurwSimAdapter(db_user, db_pwd, db_host, db_name)
        all_stations = sim_adapter.get_all_basin_stations()
        # [{'station': station, 'hash_id': hash_id, 'latitude': latitude, 'longitude': longitude}]
        print('get_basin_rain|all_stations : ', all_stations)
        ts_start = datetime.strptime(ts_start_str, '%Y-%m-%d %H:%M:%S')
        ts_end = datetime.strptime(ts_end_str, '%Y-%m-%d %H:%M:%S')
        ts_step = ts_start
        step_one = True
        output_file = os.path.join(output_dir, 'DailyRain.csv')
        while ts_step < ts_end:
            next_ts_step = ts_step + timedelta(minutes=60)
            ts_start_str = ts_step.strftime('%Y-%m-%d %H:%M:%S')
            ts_end_str = next_ts_step.strftime('%Y-%m-%d %H:%M:%S')
            all_stations_tms = get_ts_for_start_end(sim_adapter, all_stations,
                                                    ts_start_str, ts_end_str,
                                                    allowed_error)
            zero_tms_df = create_df(ts_start_str, ts_end_str)
            calculate_hd_step_mean(shape_file, sub_catchment_shape_file,
                                   all_stations_tms, output_file, step_one,
                                   zero_tms_df)
            step_one = False
            ts_step = next_ts_step
        file_handler = open(output_file, 'a')
        csvWriter = csv.writer(file_handler, delimiter=',', quotechar='|')
        csvWriter.writerow([ts_end, 0.0, 0.0, 0.0, 0.0, 0.0])
        file_handler.close()
        sim_adapter.close_connection()
    except Exception as e:
        if sim_adapter is not None:
            sim_adapter.close_connection()
        print('get_hd_mean_rain|Exception : ', str(e))
Ejemplo n.º 4
0
        elif opt in ("-f", "--forward"):
            forward = int(arg)
        elif opt in ("-b", "--backward"):
            backward = int(arg)
        elif opt in ("-m", "--mode"):
            mode = arg
    config_path = os.path.join('/home/uwcc-admin/mike21/mike_input/code',
                               'config.json')
    print('config_path : ', config_path)
    with open(config_path) as json_file:
        config = json.load(json_file)
        dir_path = config['dir_path']
        if 'curw_sim_db_config' in config:
            curw_sim_db_config = config['curw_sim_db_config']
        else:
            exit(2)
        db_adapter = CurwSimAdapter(curw_sim_db_config['user'],
                                    curw_sim_db_config['password'],
                                    curw_sim_db_config['host'],
                                    curw_sim_db_config['db'])
        output_path = os.path.join(dir_path, run_date, run_time)
        if not os.path.exists(output_path):
            os.makedirs(output_path)
        create_hybrid_mike_input(mode, output_path, run_date, forward,
                                 backward)
        try:
            if db_adapter is not None:
                db_adapter.close_connection()
        except Exception as ex:
            print(str(ex))
Ejemplo n.º 5
0
        kub_basin_extent = config['KELANI_UPPER_BASIN_EXTENT']
        klb_basin_extent = config['KELANI_LOWER_BASIN_EXTENT']

        kelani_lower_basin_shp = get_resource_path(
            'extraction/shp/klb-wgs84/klb-wgs84.shp')
        kelani_upper_basin_shp = get_resource_path(
            'extraction/shp/kub-wgs84/kub-wgs84.shp')
        hourly_csv_file_dir = os.path.join(wrf_data_dir, run_date, run_time)
        create_dir_if_not_exists(hourly_csv_file_dir)
        raincsv_file_path = os.path.join(hourly_csv_file_dir, 'DailyRain.csv')
        if not os.path.isfile(raincsv_file_path):
            # mysql_user, mysql_password, mysql_host, mysql_db
            # print('sim_db_config : ', sim_db_config)
            sim_adapter = CurwSimAdapter(sim_db_config['user'],
                                         sim_db_config['password'],
                                         sim_db_config['host'],
                                         sim_db_config['db'])
            klb_ts = get_klb_mean(
                hourly_csv_file_dir, sim_adapter, klb_stations,
                ts_start_datetime.strftime('%Y-%m-%d %H:%M:%S'),
                ts_end_datetime.strftime('%Y-%m-%d %H:%M:%S'))
            kub_ts = get_kub_mean(
                hourly_csv_file_dir, sim_adapter, kub_stations,
                ts_start_datetime.strftime('%Y-%m-%d %H:%M:%S'),
                ts_end_datetime.strftime('%Y-%m-%d %H:%M:%S'))
            # print('klb_ts: ', klb_ts)
            # print('kub_ts: ', kub_ts)
            sim_adapter.close_connection()
            mean_df = pd.merge(kub_ts, klb_ts, on='time')
            # print('mean_df : ', mean_df)
            fh = open(raincsv_file_path, 'w')
Ejemplo n.º 6
0
def get_mean_rain(ts_start,
                  ts_end,
                  output_dir,
                  model,
                  pop_method,
                  allowed_error,
                  exec_datetime,
                  db_user,
                  db_pwd,
                  db_host,
                  db_name='curw_sim',
                  catchment='kub'):
    try:
        print(
            '[ts_start, ts_end, output_dir, model, pop_method, allowed_error, exec_datetime] : ',
            [
                ts_start, ts_end, output_dir, model, pop_method, allowed_error,
                exec_datetime
            ])
        sim_adapter = CurwSimAdapter(db_user, db_pwd, db_host, db_name)
        if catchment == 'kub':
            # shape_file = res_mgr.get_resource_path('resources/kub-wgs84/kub-wgs84.shp')
            shape_file = os.path.join(RESOURCE_PATH, 'kub-wgs84/kub-wgs84.shp')
        else:
            shape_file = os.path.join(RESOURCE_PATH, 'klb-wgs84/klb-wgs84.shp')
            # shape_file = res_mgr.get_resource_path('resources/klb-wgs84/klb-wgs84.shp')
        # {station1:{'hash_id': hash_id1, 'latitude': latitude1, 'longitude': longitude1, 'timeseries': timeseries1}}
        available_stations = sim_adapter.get_basin_available_stations_timeseries(
            shape_file, ts_start, ts_end, model, pop_method, allowed_error,
            exec_datetime)
        # {'id' --> [lon, lat]}
        gauge_points = {}
        for station, info in available_stations.items():
            print('Final available station : ', station)
            gauge_points[station] = [
                '%.6f' % info['longitude'],
                '%.6f' % info['latitude']
            ]
        print('gauge_points : ', gauge_points)
        print('output_dir : ', output_dir)
        gauge_points_thessian = get_thessian_polygon_from_gage_points(
            output_dir, shape_file, gauge_points)
        print('gauge_points_thessian : ', gauge_points_thessian)
        #shape_file = res_mgr.get_resource_path(os.path.join(RESOURCE_PATH, 'sub_catchments/sub_catchments.shp'))
        shape_file = os.path.join(RESOURCE_PATH,
                                  'sub_catchments/sub_subcatchments.shp')
        catchment_df = gpd.GeoDataFrame.from_file(shape_file)
        sub_ratios = calculate_intersection(gauge_points_thessian,
                                            catchment_df)
        print('sub_ratios : ', sub_ratios)
        catchment_rain = []
        catchment_name_list = []
        for sub_ratio in sub_ratios:
            catchment_name = sub_ratio['sub_catchment_name']
            catchment_ts_list = []
            ratios = sub_ratio['ratios']
            for ratio in ratios:
                # {'gage_name': 'Dickoya', 'ratio': 0.9878}
                gauge_name = ratio['gage_name']
                ratio = Decimal(ratio['ratio'])
                gauge_ts = available_stations[gauge_name]['timeseries']
                gauge_ts.to_csv(
                    os.path.join(
                        output_dir,
                        '{}_{}_rain.csv'.format(catchment_name, gauge_name)))
                modified_gauge_ts = gauge_ts.multiply(ratio, axis='value')
                modified_gauge_ts.to_csv(
                    os.path.join(
                        output_dir,
                        '{}_{}_ratio_rain.csv'.format(catchment_name,
                                                      gauge_name)))
                catchment_ts_list.append(modified_gauge_ts)
            total_rain = reduce(lambda x, y: x.add(y, fill_value=0),
                                catchment_ts_list)
            total_rain.rename(columns={'value': catchment_name}, inplace=True)
            catchment_name_list.append(catchment_name)
            catchment_rain.append(total_rain)
        if len(catchment_rain) >= 1:
            mean_rain = catchment_rain[0].join(catchment_rain[1:])
            output_file = os.path.join(output_dir, 'DailyRain.csv')
            # mean_rain.to_csv(output_file, header=False)
            file_handler = open(output_file, 'w')
            csvWriter = csv.writer(file_handler, delimiter=',', quotechar='|')
            # Write Metadata https://publicwiki.deltares.nl/display/FEWSDOC/CSV
            first_row = ['Location Names']
            first_row.extend(catchment_name_list)
            second_row = ['Location Ids']
            second_row.extend(catchment_name_list)
            third_row = ['Time']
            for i in range(len(catchment_name_list)):
                third_row.append('Rainfall')
            csvWriter.writerow(first_row)
            csvWriter.writerow(second_row)
            csvWriter.writerow(third_row)
            file_handler.close()
            mean_rain.to_csv(output_file, mode='a', header=False)
        sim_adapter.close_connection()
    except Exception as e:
        print("get_mean_rain|Exception|e : ", e)