Esempio n. 1
0
def prepare_inflow_150(inflow_file_path, start, end, discharge_id, curw_sim_pool):

    try:

        # Extract discharge series
        TS = DisTS(pool=curw_sim_pool)
        discharge_ts = TS.get_timeseries(id_=discharge_id, start_date=start, end_date=end)

        inflow = []

        inflow.append('0           41550')
        inflow.append('C               0           41550')
        inflow.append('H               0               0')

        timeseries = discharge_ts
        for i in range(1, len(timeseries)):
            time_col = (str('%.1f' % (((timeseries[i][0] - timeseries[0][0]).total_seconds())/3600))).rjust(16)
            value_col = (str('%.1f' % (timeseries[i][1]))).rjust(16)
            inflow.append('H' + time_col + value_col)

        write_to_file(inflow_file_path, data=inflow)

    except Exception as e:
        print(traceback.print_exc())
    finally:
        destroy_Pool(curw_sim_pool)
        print("Inflow generated")
Esempio n. 2
0
def prepare_inflow(inflow_file_path, start, end, discharge_id, wl_id,
                   curw_sim_pool):

    obs_wl = None

    try:

        curw_obs_pool = get_Pool(host=con_params.CURW_OBS_HOST,
                                 user=con_params.CURW_OBS_USERNAME,
                                 password=con_params.CURW_OBS_PASSWORD,
                                 port=con_params.CURW_OBS_PORT,
                                 db=con_params.CURW_OBS_DATABASE)

        connection = curw_obs_pool.connection()

        # Extract waterlevel
        with connection.cursor() as cursor1:
            obs_end = datetime.strptime(
                start, COMMON_DATE_TIME_FORMAT) + timedelta(hours=10)
            cursor1.callproc('getWL', (wl_id, start, obs_end))
            result = cursor1.fetchone()
            obs_wl = result.get('value')

        if obs_wl is None:
            obs_wl = 0.5

        # Extract discharge series
        TS = DisTS(pool=curw_sim_pool)
        discharge_ts = TS.get_timeseries(id_=discharge_id,
                                         start_date=start,
                                         end_date=end)

        inflow = []

        inflow.append('0               0')
        inflow.append('C               0            8655')
        inflow.append('H               0               0')

        timeseries = discharge_ts
        for i in range(1, len(timeseries)):
            time_col = (str(
                '%.1f' %
                (((timeseries[i][0] - timeseries[0][0]).total_seconds()) /
                 3600))).rjust(16)
            value_col = (str('%.1f' % (timeseries[i][1]))).rjust(16)
            inflow.append('H' + time_col + value_col)

        inflow.append('R            2265{}'.format((str(obs_wl)).rjust(16)))
        inflow.append('R            3559             6.6')

        write_to_file(inflow_file_path, data=inflow)

    except Exception as e:
        print(traceback.print_exc())
    finally:
        connection.close()
        destroy_Pool(curw_obs_pool)
        destroy_Pool(curw_sim_pool)
        print("Inflow generated")
Esempio n. 3
0
def update_discharge_from_hechms(curw_sim_pool, curw_fcst_pool, flo2d_model,
                                 method, start_time, end_time, sim_tag):
    try:
        TS = DTimeseries(pool=curw_sim_pool)

        # [station_name,latitude,longitude,target,model,version,sim_tag,station]
        extract_stations = read_csv(
            os.path.join(ROOT_DIR,
                         'grids/discharge_stations/flo2d_stations.csv'))

        for i in range(len(extract_stations)):
            station_name = extract_stations[i][0]
            latitude = extract_stations[i][1]
            longitude = extract_stations[i][2]
            target_model = extract_stations[i][3]

            if target_model == flo2d_model:
                if station_name in ("glencourse"):
                    meta_data = {
                        'latitude': float('%.6f' % float(latitude)),
                        'longitude': float('%.6f' % float(longitude)),
                        'model': target_model,
                        'method': method,
                        'grid_id': 'discharge_{}'.format(station_name)
                    }

                    tms_id = TS.get_timeseries_id_if_exists(
                        meta_data=meta_data)

                    if tms_id is None:
                        tms_id = TS.generate_timeseries_id(meta_data=meta_data)
                        meta_data['id'] = tms_id
                        TS.insert_run(meta_data=meta_data)

                    processed_discharge_ts = process_fcst_ts_from_hechms_outputs(
                        curw_fcst_pool=curw_fcst_pool,
                        extract_stations=extract_stations,
                        i=i,
                        start=start_time,
                        end=end_time,
                        sim_tag=sim_tag)

                    if processed_discharge_ts is not None and len(
                            processed_discharge_ts) > 0:
                        TS.insert_data(timeseries=processed_discharge_ts,
                                       tms_id=tms_id,
                                       upsert=True)

                else:
                    continue  # skip the current iteration

    except Exception as e:
        traceback.print_exc()
Esempio n. 4
0
        dis_id = read_attribute_from_config_file('dis_id', config)

        if start_time is None:
            start_time = (datetime.now() - timedelta(days=3)).strftime('%Y-%m-%d 00:00:00')
        else:
            check_time_format(time=start_time)

        if end_time is None:
            end_time = (datetime.now() + timedelta(days=2)).strftime('%Y-%m-%d 00:00:00')
        else:
            check_time_format(time=end_time)

        pool = get_Pool(host=con_params.CURW_SIM_HOST, port=con_params.CURW_SIM_PORT, user=con_params.CURW_SIM_USERNAME,
                        password=con_params.CURW_SIM_PASSWORD,
                        db=con_params.CURW_SIM_DATABASE)
        TS = Timeseries(pool)

        latest_fgt = TS.get_obs_end(dis_id)

        if output_dir is None:
            output_dir = os.path.join(OUTPUT_DIRECTORY, (datetime.utcnow() + timedelta(hours=5, minutes=30)).strftime('%Y-%m-%d_%H-00-00'))
        if file_name is None:
            file_name = 'mike_dis.txt'  #.format(latest_fgt.strftime('%Y-%m-%d_%H-%M-00'))

        mike_dis_file_path = os.path.join(output_dir, file_name)

        if not os.path.isfile(mike_dis_file_path):
            makedir_if_not_exist_given_filepath(mike_dis_file_path)
            print("{} start preparing mike discharge input".format(datetime.now()))
            mike_discharge = prepare_mike_dis_input(TS=TS, start=start_time, end=end_time, dis_id=dis_id)
            mike_discharge.to_csv(mike_dis_file_path, header=False, index=True)
Esempio n. 5
0
            [round_to_nearest_hour(discharge_ts[k][0]), '%.3f' % float(discharge_ts[k][1])])

    return processed_dis_ts


if __name__=="__main__":

    try:

        curw_sim_pool = get_Pool(host=CURW_SIM_HOST, user=CURW_SIM_USERNAME, password=CURW_SIM_PASSWORD,
                port=CURW_SIM_PORT, db=CURW_SIM_DATABASE)

        curw_fcst_pool = get_Pool(host=CURW_FCST_HOST, user=CURW_FCST_USERNAME, password=CURW_FCST_PASSWORD,
                                  port=CURW_FCST_PORT, db=CURW_FCST_DATABASE)

        TS = Timeseries(pool=curw_sim_pool)

        # [station_name,latitude,longitude,target,model,version,sim_tag,station]
        extract_stations = read_csv('grids/discharge_stations/flo2d_stations.csv')

        for i in range(len(extract_stations)):
            station_name = extract_stations[i][0]
            latitude = extract_stations[i][1]
            longitude = extract_stations[i][2]
            target_model = extract_stations[i][3]
            method = extract_stations[i][4]

            meta_data = {
                'latitude': float('%.6f' % float(latitude)),
                'longitude': float('%.6f' % float(longitude)),
                'model': target_model, 'method': method,
Esempio n. 6
0
def update_discharge_obs(curw_sim_pool, flo2d_model, method, timestep,
                         start_time, end_time):
    try:

        discharge_TS = DTimeseries(pool=curw_sim_pool)
        waterlevel_TS = WLTimeseries(pool=curw_sim_pool)

        # [station_name,latitude,longitude,target]
        extract_stations = read_csv(
            'grids/discharge_stations/flo2d_stations.csv')
        extract_stations_dict = {
        }  # keys: target_model , value: [latitude, longitude, station_name]
        # keys: station_name , value: [latitude, longitude, target_model]

        for obs_index in range(len(extract_stations)):
            extract_stations_dict[extract_stations[obs_index][3]] = [
                extract_stations[obs_index][1], extract_stations[obs_index][2],
                extract_stations[obs_index][0]
            ]

        station_name = extract_stations_dict.get(flo2d_model)[2]
        meta_data = {
            'latitude':
            float('%.6f' % float(extract_stations_dict.get(flo2d_model)[0])),
            'longitude':
            float('%.6f' % float(extract_stations_dict.get(flo2d_model)[1])),
            'model':
            flo2d_model,
            'method':
            method,
            'grid_id':
            'discharge_{}'.format(station_name)
        }

        wl_meta_data = {
            'latitude':
            float('%.6f' % float(extract_stations_dict.get(flo2d_model)[0])),
            'longitude':
            float('%.6f' % float(extract_stations_dict.get(flo2d_model)[1])),
            'model':
            flo2d_model,
            'method':
            method,
            'grid_id':
            'waterlevel_{}'.format(station_name)
        }

        tms_id = discharge_TS.get_timeseries_id_if_exists(meta_data=meta_data)
        wl_tms_id = waterlevel_TS.get_timeseries_id_if_exists(
            meta_data=wl_meta_data)

        if wl_tms_id is None:
            print("Warning!!! {} waterlevel timeseries doesn't exist.".format(
                station_name))
            exit(1)

        timeseries = []

        if tms_id is None:
            tms_id = discharge_TS.generate_timeseries_id(meta_data=meta_data)
            meta_data['id'] = tms_id
            discharge_TS.insert_run(meta_data=meta_data)

        wl_timeseries = waterlevel_TS.get_timeseries(id_=wl_tms_id,
                                                     start_date=start_time,
                                                     end_date=end_time)

        estimated_discharge_ts = []

        if station_name == 'hanwella':
            estimated_discharge_ts = calculate_hanwella_discharge(
                wl_timeseries)
        elif station_name == 'glencourse':
            estimated_discharge_ts = calculate_glencourse_discharge(
                wl_timeseries)

        if estimated_discharge_ts is not None and len(
                estimated_discharge_ts) > 0:
            discharge_TS.insert_data(timeseries=estimated_discharge_ts,
                                     tms_id=tms_id,
                                     upsert=True)

    except Exception as e:
        traceback.print_exc()
Esempio n. 7
0
        discharge_ts.append([glencourse_wl_ts[i][0], '%.3f' % discharge])

    return discharge_ts


if __name__ == "__main__":

    try:

        curw_sim_pool = get_Pool(host=CURW_SIM_HOST,
                                 user=CURW_SIM_USERNAME,
                                 password=CURW_SIM_PASSWORD,
                                 port=CURW_SIM_PORT,
                                 db=CURW_SIM_DATABASE)

        discharge_TS = DTimeseries(pool=curw_sim_pool)
        waterlevel_TS = WLTimeseries(pool=curw_sim_pool)

        # [station_name,latitude,longitude,target]
        extract_stations = read_csv(
            'grids/discharge_stations/flo2d_stations.csv')
        # extract_stations_dict = { }  # keys: station_name , value: [latitude, longitude, target_model]

        # for obs_index in range(len(extract_stations)):
        #     extract_stations_dict[extract_stations[obs_index][0]] = [extract_stations[obs_index][1],
        #                                                              extract_stations[obs_index][2],
        #                                                              extract_stations[obs_index][3],
        #                                                              extract_stations[obs_index][4]]

        for i in range(len(extract_stations)):