Exemple #1
0
def prepare_inflow(inflow_file_path, start, end, discharge_id, wl_id):

    obs_wl = None

    try:

        curw_sim_pool = get_Pool(host=CURW_SIM_HOST, user=CURW_SIM_USERNAME, password=CURW_SIM_PASSWORD, port=CURW_SIM_PORT,
                                 db=CURW_SIM_DATABASE)

        curw_obs_pool = get_Pool(host=CURW_OBS_HOST, user=CURW_OBS_USERNAME, password=CURW_OBS_PASSWORD, port=CURW_OBS_PORT,
                                 db=CURW_OBS_DATABASE)

        connection = curw_obs_pool.connection()

        # Extract discharge series
        with connection.cursor() as cursor1:
            obs_end = datetime.strptime(start, COMMON_DATE_TIME_FORMAT) + timedelta(hours=10)
            cursor1.callproc('getWL', (wl_id, start, obs_end))
            result = cursor1.fetchone()
            obs_wl = result.get('value')

        if obs_wl is None:
            obs_wl = 0.5

        TS = DisTS(pool=curw_sim_pool)
        discharge_ts = TS.get_timeseries(id_=discharge_id, start_date=start, end_date=end)

        inflow = []

        inflow.append('0               0')
        inflow.append('C               0            8655')
        inflow.append('H               0               0')

        timeseries = discharge_ts
        for i in range(1, len(timeseries)):
            time_col = (str('%.1f' % (((timeseries[i][0] - timeseries[0][0]).total_seconds())/3600))).rjust(16)
            value_col = (str('%.1f' % (timeseries[i][1]))).rjust(16)
            inflow.append('H' + time_col + value_col)

        inflow.append('R            2265{}'.format((str(obs_wl)).rjust(16)))
        inflow.append('R            3559             6.6')

        write_to_file(inflow_file_path, data=inflow)

    except Exception as e:
        print(traceback.print_exc())
    finally:
        connection.close()
        destroy_Pool(curw_obs_pool)
        destroy_Pool(curw_sim_pool)
        print("Inflow generated")
Exemple #2
0
def prepare_inflow_150(inflow_file_path, start, end, discharge_id):
    try:
        curw_sim_pool = get_Pool(host=CURW_SIM_HOST, user=CURW_SIM_USERNAME, password=CURW_SIM_PASSWORD,
                                 port=CURW_SIM_PORT,
                                 db=CURW_SIM_DATABASE)

        # Extract discharge series
        TS = DisTS(pool=curw_sim_pool)
        discharge_ts = TS.get_timeseries(id_=discharge_id, start_date=start, end_date=end)

        inflow = []

        inflow.append('0           41550')
        inflow.append('C               0           41550')
        inflow.append('H               0               0')

        timeseries = discharge_ts
        for i in range(1, len(timeseries)):
            time_col = (str('%.1f' % (((timeseries[i][0] - timeseries[0][0]).total_seconds()) / 3600))).rjust(16)
            value_col = (str('%.1f' % (timeseries[i][1]))).rjust(16)
            inflow.append('H' + time_col + value_col)

        write_to_file(inflow_file_path, data=inflow)

    except Exception as e:
        print(traceback.print_exc())
    finally:
        destroy_Pool(curw_sim_pool)
        print("Inflow generated")
def prepare_mike_dis_input(start, end, tide_id):

    try:

        pool = get_Pool(host=con_params.CURW_SIM_HOST,
                        port=con_params.CURW_SIM_PORT,
                        user=con_params.CURW_SIM_USERNAME,
                        password=con_params.CURW_SIM_PASSWORD,
                        db=con_params.CURW_SIM_DATABASE)
        TS = Timeseries(pool)
        ts = TS.get_timeseries(id_=tide_id, start_date=start, end_date=end)
        ts.insert(0, ['time', 'value'])
        ts_df = list_of_lists_to_df_first_row_as_columns(ts)
        ts_df['value'] = ts_df['value'].astype('float64')

        tide_ts_df = pd.DataFrame()
        tide_ts_df['time'] = pd.date_range(start=start, end=end, freq='15min')

        tide_ts_df = pd.merge(tide_ts_df, ts_df, how="left", on='time')

        tide_ts_df.set_index('time', inplace=True)

        processed_tide_ts_df = replace_negative_99999_with_nan(tide_ts_df)

        if np.isnan(processed_tide_ts_df.iloc[-1, 0]):
            processed_tide_ts_df.iloc[-1, 0] = 0

        processed_tide_ts_df = processed_tide_ts_df.dropna()

        return processed_tide_ts_df

    except Exception:
        traceback.print_exc()
    finally:
        destroy_Pool(pool)
Exemple #4
0
def extract_active_curw_obs_rainfall_stations(start_time, end_time):
    """
        Extract currently active (active within last week) rainfall obs stations
        :return:
        """
    # Connect to the database
    pool = get_Pool(host=con_params.CURW_OBS_HOST, port=con_params.CURW_OBS_PORT, user=con_params.CURW_OBS_USERNAME,
                    password=con_params.CURW_OBS_PASSWORD, db=con_params.CURW_OBS_DATABASE)

    obs_stations = [['hash_id', 'station_id', 'station_name', 'latitude', 'longitude']]

    connection = pool.connection()

    try:

        with connection.cursor() as cursor1:
            cursor1.callproc('getActiveRfStationsAtGivenTime', (start_time, end_time))
            results = cursor1.fetchall()

            for result in results:
                obs_stations.append([result.get('hash_id'), result.get('station_id'), result.get('station_name'),
                                     result.get('latitude'), result.get('longitude')])

        # Write to csv file
        create_csv(os.path.join(ROOT_DIR, 'grids/obs_stations/rainfall/curw_active_rainfall_obs_stations.csv'), obs_stations)

    except Exception as ex:
        traceback.print_exc()
    finally:
        connection.close()
        destroy_Pool(pool)
def import_old_data():
    # Connect to the database
    pool = get_Pool(host=CURW_FCST_HOST,
                    user=CURW_FCST_USERNAME,
                    password=CURW_FCST_PASSWORD,
                    port=CURW_FCST_PORT,
                    db=CURW_FCST_DATABASE)

    connection = pool.connection()

    curw_fcst_new_to_old_hash_id_mapping = read_csv(
        "curw_fcst_new_to_old_hash_id_mapping.csv")

    TS = Timeseries(pool=pool)

    try:

        for hash_index in range(len(curw_fcst_new_to_old_hash_id_mapping)):
            print("##### Hash index: ", hash_index, " #####")
            fgt_list = []
            # Extract fgts
            with connection.cursor() as cursor1:
                sql_statement = "select distinct `fgt` from `data_v3` where `id`=%s order by `fgt` desc;"
                cursor1.execute(
                    sql_statement,
                    curw_fcst_new_to_old_hash_id_mapping[hash_index][1])
                fgts = cursor1.fetchall()
                for fgt in fgts:
                    fgt_list.append(fgt.get('fgt'))

            for fgt in fgt_list:
                timeseries = []
                with connection.cursor() as cursor2:
                    sql_statement = "select * from `data_v3` where `id`=%s and `fgt`=%s;"
                    cursor2.execute(
                        sql_statement,
                        (curw_fcst_new_to_old_hash_id_mapping[hash_index][1],
                         fgt))
                    results = cursor2.fetchall()
                    for result in results:
                        timeseries.append([
                            curw_fcst_new_to_old_hash_id_mapping[hash_index]
                            [0],
                            result.get('time'),
                            result.get('fgt'),
                            result.get('value')
                        ])

                TS.insert_data(timeseries=timeseries, upsert=True)
                TS.update_start_date(
                    id_=curw_fcst_new_to_old_hash_id_mapping[hash_index][0],
                    start_date=fgt)

    except Exception as ex:
        traceback.print_exc()
    finally:
        connection.close()
        destroy_Pool(pool=pool)
        print()
Exemple #6
0
def prepare_mike_rf_input(start, end, step):

    try:
        mike_obs_stations = read_csv(os.path.join(ROOT_DIRECTORY, 'inputs', 'params', 'mike_rainfall_stations.csv'))
        # [hash_id,station_id,station_name,latitude,longitude]

        station_dict = {}
        for i in range(len(mike_obs_stations)):
            # { station_id: [station_hash_id, station_name]
            station_dict[mike_obs_stations[i][1]] = [mike_obs_stations[i][0], mike_obs_stations[i][2]]

        ts_df = pd.DataFrame()
        ts_df['time'] = pd.date_range(start=start, end=end, freq='{}min'.format(step))

        obs_pool = get_Pool(host=con_params.CURW_OBS_HOST, port=con_params.CURW_OBS_PORT, user=con_params.CURW_OBS_USERNAME,
                        password=con_params.CURW_OBS_PASSWORD,
                        db=con_params.CURW_OBS_DATABASE)

        connection = obs_pool.connection()

        for obs_id in station_dict.keys():

            ts = extract_obs_rain_custom_min_intervals(connection=connection, id=station_dict.get(obs_id)[0],
                                                       time_step=step, start_time=start, end_time=end)
            ts.insert(0, ['time', obs_id])
            df = list_of_lists_to_df_first_row_as_columns(ts)
            df[obs_id] = df[obs_id].astype('float64')

            ts_df = pd.merge(ts_df, df, how="left", on='time')

        ts_df.set_index('time', inplace=True)

        mike_input = replace_negative_numbers_with_nan(ts_df)
        # pd.set_option('display.max_rows', mike_input.shape[0] + 1)
        # pd.set_option('display.max_columns', mike_input.shape[1] + 1)
        # print(mike_input)
        # print("#######################################")
        # mike_input = replace_nan_with_empty_string(mike_input)

        mike_input = mike_input.fillna('')
        mike_input = mike_input.round(1)
        # pd.set_option('display.max_rows', mike_input.shape[0] + 1)
        # pd.set_option('display.max_columns', mike_input.shape[1] + 1)
        # print(mike_input)
        # print("#######################################")

        for col in mike_input.columns:
            mike_input = mike_input.rename(columns={col: station_dict.get(col)[1]})

        # pd.set_option('display.max_rows', mike_input.shape[0]+1)
        # pd.set_option('display.max_columns', mike_input.shape[1]+1)
        # print(mike_input)
        return mike_input

    except Exception:
        traceback.print_exc()
    finally:
        connection.close()
        destroy_Pool(obs_pool)
Exemple #7
0
def prepare_outflow_250(outflow_file_path, start, end, tide_id):
    try:

        curw_sim_pool = get_Pool(host=CURW_SIM_HOST,
                                 user=CURW_SIM_USERNAME,
                                 password=CURW_SIM_PASSWORD,
                                 port=CURW_SIM_PORT,
                                 db=CURW_SIM_DATABASE)

        TS = TideTS(pool=curw_sim_pool)
        tide_ts = TS.get_timeseries(id_=tide_id,
                                    start_date=start,
                                    end_date=end)

        tide_data = []
        timeseries = tide_ts
        for i in range(len(timeseries)):
            time_col = (str(
                '%.3f' %
                (((timeseries[i][0] - timeseries[0][0]).total_seconds()) /
                 3600))).rjust(16)
            value_col = (str('%.3f' % (timeseries[i][1]))).rjust(16)
            tide_data.append('S' + time_col + value_col)

        outflow = []

        outflow.append('K              91')
        outflow.append('K             171')
        outflow.append('K             214')
        outflow.append('K             491')

        outflow.append('N             134               1')
        outflow.extend(tide_data)

        outflow.append('N             220               1')
        outflow.extend(tide_data)

        outflow.append('N             261               1')
        outflow.extend(tide_data)

        outflow.append('N             558               1')
        outflow.extend(tide_data)

        write_to_file(outflow_file_path, data=outflow)

        tail_file_path = os.path.join(os.getcwd(), 'outflow', 'tail_150.txt')
        tail_file = open(tail_file_path, "r")
        tail = tail_file.read()
        tail_file.close()

        append_file_to_file(outflow_file_path, file_content=tail)

    except Exception as e:
        print(traceback.print_exc())
    finally:
        destroy_Pool(curw_sim_pool)
        print("Outflow generated")
def extract_active_curw_obs_rainfall_stations():
    """
        Extract currently active (active within last week) rainfall obs stations
        :return:
        """
    # Connect to the database
    pool = get_Pool(host=CURW_OBS_HOST,
                    port=CURW_OBS_PORT,
                    user=CURW_OBS_USERNAME,
                    password=CURW_OBS_PASSWORD,
                    db=CURW_OBS_DATABASE)

    obs_stations = [[
        'hash_id', 'station_id', 'station_name', 'latitude', 'longitude'
    ]]

    connection = pool.connection()

    try:

        with connection.cursor() as cursor1:
            cursor1.callproc(procname='getActiveRainfallObsStations')
            results = cursor1.fetchall()

            for result in results:
                # if (result.get('station_id') != 100083):
                obs_stations.append([
                    result.get('hash_id'),
                    result.get('station_id'),
                    result.get('station_name'),
                    result.get('latitude'),
                    result.get('longitude')
                ])

        # Write to csv file
        create_csv(
            'grids/obs_stations/rainfall/curw_active_rainfall_obs_stations.csv',
            obs_stations)

    except Exception as ex:
        traceback.print_exc()
    finally:
        connection.close()
        destroy_Pool(pool)
def create_inflow(dir_path, run_date, run_time):

    try:
        config_path = os.path.join(os.getcwd(), 'inflowdat', 'config.json')
        config = json.loads(open(config_path).read())

        start_date_time = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S')

        start = (datetime.strptime(start_date_time.strftime('%Y-%m-%d 00:00:00'), '%Y-%m-%d %H:%M:%S') - timedelta(days=2))\
            .strftime('%Y-%m-%d %H:%M:%S')
        end = (datetime.strptime(start, '%Y-%m-%d %H:%M:%S') + timedelta(days=5)).strftime('%Y-%m-%d %H:%M:%S')

        target_stations = read_attribute_from_config_file('station_names', config, True)

        output_dir = dir_path
        file_name = 'INFLOW.DAT'

        pool = get_Pool(host=FCST_HOST, port=FCST_PORT, user=FCST_USER, password=FCST_PASSWORD, db=FCST_DB)
        hechms_stations = get_hechms_stations(pool=pool)

        target_station_ids = []

        for i in range(len(target_stations)):
            target_station_ids.append(hechms_stations.get(target_stations[i])[0])

        print("target stations: ", target_station_ids)

        obs_wl = get_obs_waterlevel(station_id=OBS_WL_ID, start=start)
        if obs_wl is None:
            obs_wl = 0.5

        fcst_discharges = extract_fcst_discharge_ts(pool=pool, start=start, end=end, station_ids=target_station_ids)

        for id in target_station_ids:
            # file = '{}_{}'.format(id, file_name)
            file_path = os.path.join(output_dir, file_name)
            prepare_inflow(inflow_file_path=file_path, fcst_discharge_ts=fcst_discharges.get(id), obs_wl=obs_wl)

    except Exception as e:
        traceback.print_exc()
    finally:
        destroy_Pool(pool)
        out_file_path = os.path.join(output_dir, output_file_name)

        if not os.path.exists(out_file_path):
            msg = 'no file :: {}'.format(out_file_path)
            logger.warning(msg)
            print(msg)
            exit(1)

        fgt = get_file_last_modified_time(out_file_path)
        print("fgt, ", fgt)

        timeseries = read_csv(out_file_path)

        pool = get_Pool(host=CURW_FCST_HOST,
                        port=CURW_FCST_PORT,
                        db=CURW_FCST_DATABASE,
                        user=CURW_FCST_USERNAME,
                        password=CURW_FCST_PASSWORD)

        hechms_stations = get_hechms_stations(pool=pool)

        station_id = hechms_stations.get(station_name)[0]
        lat = str(hechms_stations.get(station_name)[1])
        lon = str(hechms_stations.get(station_name)[2])

        source_id = get_source_id(pool=pool, model=model, version=version)

        variable_id = get_variable_id(pool=pool, variable=variable)

        unit_id = get_unit_id(pool=pool, unit=unit, unit_type=unit_type)
        if 'start_date' in config and (config['start_date'] != ""):
            run_date_str = config['start_date']
            # fgt = (datetime.strptime(run_date_str, '%Y-%m-%d') + timedelta(days=1)) \
            #     .strftime('%Y-%m-%d 23:45:00')
        else:
            run_date_str = (datetime.now() -
                            timedelta(days=1)).strftime('%Y-%m-%d')
            # fgt = datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S')

        daily_dir = 'STATIONS_{}'.format(run_date_str)

        output_dir = os.path.join(wrf_dir, daily_dir)

        pool = get_Pool(host=host,
                        port=port,
                        user=user,
                        password=password,
                        db=db)

        wrf_v3_stations = get_wrf_stations(pool)

        # # Retrieve db version.
        # conn = pool.get_conn()
        # with conn.cursor() as cursor:
        #     cursor.execute("SELECT VERSION()")
        #     data = cursor.fetchone()
        #     logger.info("Database version : %s " % data)
        # if conn is not None:
        #     pool.release(conn)

        variable_id = get_variable_id(pool=pool, variable=variable)
Exemple #12
0
def extract_distrubuted_hechms_outputs(target_model, db_user, db_pwd, db_host,
                                       db_name, out_file_path, run_date,
                                       run_time):
    """
    Config.json
    {
      "output_file_name": "DailyDischarge.csv",
      "output_dir": "",
      "run_date": "2019-05-24",
      "run_time": "00:00:00",
      "utc_offset": "",
      "sim_tag": "hourly_run",
      "model": "HECHMS",
      "version": "single",
      "unit": "m3/s",
      "unit_type": "Instantaneous",
      "variable": "Discharge",
      "station_name": "Hanwella"
    }
    """
    try:

        config = json.loads(
            open('/home/curw/git/distributed_hechms/uploads/config.json').read(
            ))

        # output related details
        run_date = run_date
        run_time = run_time

        utc_offset = read_attribute_from_config_file('utc_offset', config,
                                                     False)
        if utc_offset is None:
            utc_offset = ''

        # sim tag
        sim_tag = read_attribute_from_config_file('sim_tag', config, True)
        print("extract_distrubuted_hechms_outputs|sim_tag : ", sim_tag)
        # source details
        model = read_attribute_from_config_file('model', config, True)
        print("extract_distrubuted_hechms_outputs|model : ", model)

        version_config = read_attribute_from_config_file(
            'version_config', config, True)
        print("extract_distrubuted_hechms_outputs|version_config : ",
              version_config)

        version = version_config[target_model]
        print("extract_distrubuted_hechms_outputs|version : ", version)

        # unit details
        unit = read_attribute_from_config_file('unit', config, True)
        print("extract_distrubuted_hechms_outputs|unit : ", unit)
        unit_type = UnitType.getType(
            read_attribute_from_config_file('unit_type', config, True))
        print("extract_distrubuted_hechms_outputs|unit_type : ", unit_type)

        # variable details
        variable = read_attribute_from_config_file('variable', config, True)
        print("extract_distrubuted_hechms_outputs|variable : ", variable)

        # station details
        station_name = read_attribute_from_config_file('station_name', config,
                                                       True)
        print("extract_distrubuted_hechms_outputs|station_name : ",
              station_name)

        if not os.path.exists(out_file_path):
            msg = 'no file :: {}'.format(out_file_path)
            logger.warning(msg)
            print(msg)
            exit(1)

        fgt = get_file_last_modified_time(out_file_path)
        print("extract_distrubuted_hechms_outputs|fgt : ", fgt)

        timeseries = read_csv(out_file_path)

        pool = get_Pool(host=db_host,
                        port=3306,
                        db=db_name,
                        user=db_user,
                        password=db_pwd)

        hechms_stations = get_hechms_stations(pool=pool)
        print("extract_distrubuted_hechms_outputs|hechms_stations : ",
              hechms_stations)

        station_id = hechms_stations.get(station_name)[0]
        lat = str(hechms_stations.get(station_name)[1])
        lon = str(hechms_stations.get(station_name)[2])
        print("extract_distrubuted_hechms_outputs|[station_id, lat, lon] : ",
              [station_id, lat, lon])

        source_id = get_source_id(pool=pool, model=model, version=version)
        print("extract_distrubuted_hechms_outputs|source_id : ", source_id)

        variable_id = get_variable_id(pool=pool, variable=variable)
        print("extract_distrubuted_hechms_outputs|variable_id : ", variable_id)

        unit_id = get_unit_id(pool=pool, unit=unit, unit_type=unit_type)
        print("extract_distrubuted_hechms_outputs|unit_id : ", unit_id)

        tms_meta = {
            'sim_tag': sim_tag,
            'model': model,
            'version': version,
            'variable': variable,
            'unit': unit,
            'unit_type': unit_type.value,
            'latitude': lat,
            'longitude': lon,
            'station_id': station_id,
            'source_id': source_id,
            'variable_id': variable_id,
            'unit_id': unit_id
        }

        print("extract_distrubuted_hechms_outputs|tms_meta : ", tms_meta)
        utcOffset = getUTCOffset(utc_offset, default=True)

        if utcOffset != timedelta():
            tms_meta['utcOffset'] = utcOffset

        # Push timeseries to database
        save_forecast_timeseries_to_db(pool=pool,
                                       timeseries=timeseries,
                                       run_date=run_date,
                                       run_time=run_time,
                                       tms_meta=tms_meta,
                                       fgt=fgt)
        return {'Result': 'Success'}
    except Exception as e:
        logger.error('JSON config data loading error.')
        print('JSON config data loading error.')
        traceback.print_exc()
        return {'Result': 'Fail'}
def insert_curw_obs_runs():

    hash_mapping = [['old_hash_id', 'new_hash_id']]

    try:
        # pool = get_Pool(host=CURW_OBS_HOST, port=CURW_OBS_PORT, user=CURW_OBS_USERNAME, password=CURW_OBS_PASSWORD,
        #         db=CURW_OBS_DATABASE)

        pool = get_Pool(host=HOST,
                        port=PORT,
                        user=USERNAME,
                        password=PASSWORD,
                        db=DATABASE)

        curw_old_obs_entries = read_csv('all_curw_obs.csv')

        for old_index in range(len(curw_old_obs_entries)):

            meta_data = {}

            old_hash_id = curw_old_obs_entries[old_index][0]
            run_name = curw_old_obs_entries[old_index][1]
            station_name = curw_old_obs_entries[old_index][4]
            latitude = curw_old_obs_entries[old_index][5]
            longitude = curw_old_obs_entries[old_index][6]
            description = curw_old_obs_entries[old_index][7]
            variable = curw_old_obs_entries[old_index][8]
            unit = curw_old_obs_entries[old_index][9]
            unit_type = curw_old_obs_entries[old_index][10]

            meta_data['run_name'] = run_name

            meta_data['variable'] = variable
            meta_data['unit'] = unit
            meta_data['unit_type'] = unit_type

            meta_data['latitude'] = latitude
            meta_data['longitude'] = longitude

            if variable == "WaterLevel":
                station_type = StationEnum.CUrW_WaterLevelGauge
            else:
                station_type = StationEnum.CUrW_WeatherStation

            meta_data['station_type'] = StationEnum.getTypeString(station_type)

            unit_id = get_unit_id(pool=pool,
                                  unit=unit,
                                  unit_type=UnitType.getType(unit_type))

            if unit_id is None:
                add_unit(pool=pool,
                         unit=unit,
                         unit_type=UnitType.getType(unit_type))
                unit_id = get_unit_id(pool=pool,
                                      unit=unit,
                                      unit_type=UnitType.getType(unit_type))

            variable_id = get_variable_id(pool=pool, variable=variable)

            if variable_id is None:
                add_variable(pool=pool, variable=variable)
                variable_id = get_variable_id(pool=pool, variable=variable)

            station_id = get_station_id(pool=pool,
                                        latitude=latitude,
                                        longitude=longitude,
                                        station_type=station_type)

            if station_id is None:
                add_station(pool=pool,
                            name=station_name,
                            latitude=latitude,
                            longitude=longitude,
                            station_type=station_type,
                            description=description)
                station_id = get_station_id(pool=pool,
                                            latitude=latitude,
                                            longitude=longitude,
                                            station_type=station_type)

            TS = Timeseries(pool=pool)

            tms_id = TS.get_timeseries_id_if_exists(meta_data=meta_data)

            meta_data['station_id'] = station_id
            meta_data['variable_id'] = variable_id
            meta_data['unit_id'] = unit_id

            if tms_id is None:
                tms_id = TS.generate_timeseries_id(meta_data=meta_data)
                meta_data['tms_id'] = tms_id
                TS.insert_run(run_meta=meta_data)

            hash_mapping.append([old_hash_id, tms_id])

        create_csv(file_name='curw_to_curw_obs_hash_id_mapping.csv',
                   data=hash_mapping)

    except Exception:
        traceback.print_exc()
        print(
            "Exception occurred while inserting run entries to curw_obs run table and making hash mapping"
        )
    finally:
        destroy_Pool(pool=pool)
        config = json.loads(open('MME_config.json').read())

        start = read_attribute_from_config_file('start_time', config, True)
        end = read_attribute_from_config_file('end_time', config, True)

        target_stations = read_attribute_from_config_file(
            'station_names', config, True)

        output_dir = read_attribute_from_config_file('output_dir', config,
                                                     True)
        file_name = read_attribute_from_config_file('output_file_name', config,
                                                    True)

        pool = get_Pool(host=FCST_HOST,
                        port=FCST_PORT,
                        user=FCST_USER,
                        password=FCST_PASSWORD,
                        db=FCST_DB)
        hechms_stations = get_hechms_stations(pool=pool)

        target_station_ids = []

        for i in range(len(target_stations)):
            target_station_ids.append(
                hechms_stations.get(target_stations[i])[0])

        print("target stations: ", target_station_ids)

        obs_wl = get_obs_waterlevel(station_id=OBS_WL_ID, start=start)
        if obs_wl is None:
            obs_wl = 0.5
def prepare_mike_rf_input(start, end):

    try:
        mike_obs_stations = read_csv(
            os.path.join(ROOT_DIRECTORY, 'inputs', 'params',
                         'mike_rainfall_stations.csv'))
        selected_obs_ids = []
        for list in mike_obs_stations:
            selected_obs_ids.append(str(list[1]))
        # [hash_id,station_id,station_name,latitude,longitude]

        #### process staton based hybrid timeseries ####
        hybrid_ts_df = pd.DataFrame()
        hybrid_ts_df['time'] = pd.date_range(start=start, end=end, freq='5min')

        pool = get_Pool(host=con_params.CURW_SIM_HOST,
                        port=con_params.CURW_SIM_PORT,
                        user=con_params.CURW_SIM_USERNAME,
                        password=con_params.CURW_SIM_PASSWORD,
                        db=con_params.CURW_SIM_DATABASE)

        obs_pool = get_Pool(host=con_params.CURW_OBS_HOST,
                            port=con_params.CURW_OBS_PORT,
                            user=con_params.CURW_OBS_USERNAME,
                            password=con_params.CURW_OBS_PASSWORD,
                            db=con_params.CURW_OBS_DATABASE)

        TS = Timeseries(pool)

        obs_id_hash_id_mapping = get_all_obs_rain_hashids_from_curw_sim(pool)
        obs_stations = extract_active_curw_obs_rainfall_stations(
            curw_obs_pool=obs_pool, start_time=start, end_time=end)
        obs_obs_mapping = find_nearest_obs_stations_for_mike_rainfall_stations(
            mike_obs_stations=mike_obs_stations, obs_stations=obs_stations)

        for obs_id in selected_obs_ids:

            if obs_id in obs_id_hash_id_mapping.keys():
                # taking data from curw_sim database (data prepared based on active stations for hechms)
                ts = TS.get_timeseries(id_=obs_id_hash_id_mapping.get(obs_id),
                                       start_date=start,
                                       end_date=end)
            else:
                ts = []

            ts.insert(0, ['time', obs_id])
            ts_df = list_of_lists_to_df_first_row_as_columns(ts)
            ts_df[obs_id] = ts_df[obs_id].astype('float64')

            hybrid_ts_df = pd.merge(hybrid_ts_df, ts_df, how="left", on='time')

        hybrid_ts_df.set_index('time', inplace=True)
        hybrid_ts_df = hybrid_ts_df.resample('15min',
                                             label='right',
                                             closed='right').sum()

        mike_input = replace_negative_numbers_with_nan(hybrid_ts_df)
        for col in mike_input.columns:
            if len(obs_obs_mapping[col]) > 0:
                print(col, obs_obs_mapping[col][0])
                mike_input[col] = mike_input[col].fillna(
                    mike_input[obs_obs_mapping[col][0]])
            if len(obs_obs_mapping[col]) > 1:
                print(col, obs_obs_mapping[col][1])
                mike_input[col] = mike_input[col].fillna(
                    mike_input[obs_obs_mapping[col][1]])

        mike_input = replace_nan_with_row_average(mike_input)

        mike_input = mike_input.round(1)

        station_name_dict = {}
        for i in range(len(mike_obs_stations)):
            station_name_dict[str(
                mike_obs_stations[i][1])] = mike_obs_stations[i][2]
        for col in mike_input.columns:
            mike_input = mike_input.rename(
                columns={col: station_name_dict.get(col)})

        # pd.set_option('display.max_rows', mike_input.shape[0]+1)
        # pd.set_option('display.max_columns', mike_input.shape[1]+1)
        # print(mike_input)
        return mike_input

    except Exception:
        traceback.print_exc()
    finally:
        destroy_Pool(pool)
        destroy_Pool(obs_pool)
Exemple #16
0
        hechms_stations = read_csv('hechms_stations.csv')

        # pool = get_Pool(host=CURW_FCST_HOST, port=CURW_FCST_PORT, user=CURW_FCST_USERNAME, password=CURW_FCST_PASSWORD,
        #         db=CURW_FCST_DATABASE)

        # ########
        # # test
        # ########

        USERNAME = "******"
        PASSWORD = "******"
        HOST = "127.0.0.1"
        PORT = 3306
        DATABASE = "curw_fcst"

        pool = get_Pool(host=HOST, port=PORT, user=USERNAME, password=PASSWORD, db=DATABASE)

        add_source(pool=pool, model=model, version=version, parameters=None)
        add_variable(pool=pool, variable=variable)
        add_unit(pool=pool, unit=unit, unit_type=unit_type)

        # add hechms output stations

        for i in range(len(hechms_stations)):
            station_name = hechms_stations[i][0]
            lat = hechms_stations[i][1]
            lon = hechms_stations[i][2]
            add_station(pool=pool, name=station_name, latitude="%.6f" % float(lat), longitude="%.6f" % float(lon),
                    station_type=StationEnum.HECHMS, description="hecHMS output station")

        destroy_Pool(pool=pool)
Exemple #17
0
def prepare_mike_rf_input(start, end, coefficients):

    try:

        #### process staton based hybrid timeseries ####
        distinct_obs_ids = coefficients.curw_obs_id.unique()
        hybrid_ts_df = pd.DataFrame()
        hybrid_ts_df['time'] = pd.date_range(start=start, end=end, freq='5min')

        pool = get_Pool(host=con_params.CURW_SIM_HOST,
                        port=con_params.CURW_SIM_PORT,
                        user=con_params.CURW_SIM_USERNAME,
                        password=con_params.CURW_SIM_PASSWORD,
                        db=con_params.CURW_SIM_DATABASE)

        TS = Timeseries(pool)

        obs_id_hash_id_mapping = get_all_obs_rain_hashids_from_curw_sim(pool)

        for obs_id in distinct_obs_ids:
            # taking data from curw_sim database (data prepared based on active stations for hechms)
            ts = TS.get_timeseries(id_=obs_id_hash_id_mapping.get(str(obs_id)),
                                   start_date=start,
                                   end_date=end)
            ts.insert(0, ['time', obs_id])
            ts_df = list_of_lists_to_df_first_row_as_columns(ts)
            ts_df[obs_id] = ts_df[obs_id].astype('float64')

            hybrid_ts_df = pd.merge(hybrid_ts_df, ts_df, how="left", on='time')

        hybrid_ts_df.set_index('time', inplace=True)
        hybrid_ts_df = hybrid_ts_df.resample('15min',
                                             label='right',
                                             closed='right').sum()

        # pd.set_option('display.max_rows', hybrid_ts_df.shape[0]+1)
        # pd.set_option('display.max_columns', hybrid_ts_df.shape[1]+1)
        # print(hybrid_ts_df)

        hybrid_ts_df = replace_negative_numbers_with_nan(hybrid_ts_df)

        # print(hybrid_ts_df)
        hybrid_ts_df = replace_nan_with_row_average(hybrid_ts_df)

        # print(hybrid_ts_df)

        #### process mike input ####

        distinct_names = coefficients.name.unique()
        mike_input = pd.DataFrame()
        mike_input_initialized = False

        for name in distinct_names:
            catchment_coefficients = coefficients[coefficients.name == name]
            # print(catchment_coefficients)
            catchment = pd.DataFrame()
            catchment_initialized = False
            for index, row in catchment_coefficients.iterrows():
                # print(index, row['curw_obs_id'], row['coefficient'])
                if not catchment_initialized:
                    catchment = (hybrid_ts_df[row['curw_obs_id']] *
                                 row['coefficient']).to_frame(
                                     name=row['curw_obs_id'])
                    catchment_initialized = True
                else:
                    new = (hybrid_ts_df[row['curw_obs_id']] *
                           row['coefficient']).to_frame(
                               name=row['curw_obs_id'])
                    catchment = pd.merge(catchment, new, how="left", on='time')

            if not mike_input_initialized:
                mike_input[name] = catchment.sum(axis=1)
                mike_input_initialized = True
            else:
                mike_input = pd.merge(
                    mike_input, (catchment.sum(axis=1)).to_frame(name=name),
                    how="left",
                    on='time')

        mike_input.round(1)
        return mike_input

    except Exception:
        traceback.print_exc()
    finally:
        destroy_Pool(pool)
Exemple #18
0
        file_name = read_attribute_from_config_file('output_file_name', config)

        dis_id = read_attribute_from_config_file('dis_id', config)

        if start_time is None:
            start_time = (datetime.now() - timedelta(days=3)).strftime('%Y-%m-%d 00:00:00')
        else:
            check_time_format(time=start_time)

        if end_time is None:
            end_time = (datetime.now() + timedelta(days=2)).strftime('%Y-%m-%d 00:00:00')
        else:
            check_time_format(time=end_time)

        pool = get_Pool(host=con_params.CURW_SIM_HOST, port=con_params.CURW_SIM_PORT, user=con_params.CURW_SIM_USERNAME,
                        password=con_params.CURW_SIM_PASSWORD,
                        db=con_params.CURW_SIM_DATABASE)
        TS = Timeseries(pool)

        latest_fgt = TS.get_obs_end(dis_id)

        if output_dir is None:
            output_dir = os.path.join(OUTPUT_DIRECTORY, (datetime.utcnow() + timedelta(hours=5, minutes=30)).strftime('%Y-%m-%d_%H-00-00'))
        if file_name is None:
            file_name = 'mike_dis.txt'  #.format(latest_fgt.strftime('%Y-%m-%d_%H-%M-00'))

        mike_dis_file_path = os.path.join(output_dir, file_name)

        if not os.path.isfile(mike_dis_file_path):
            makedir_if_not_exist_given_filepath(mike_dis_file_path)
            print("{} start preparing mike discharge input".format(datetime.now()))
Exemple #19
0
            start_time = (datetime.now() -
                          timedelta(days=3)).strftime('%Y-%m-%d 23:00:00')
        else:
            check_time_format(time=start_time, model=flo2d_model)

        if end_time is None:
            end_time = (datetime.now() +
                        timedelta(days=1)).strftime('%Y-%m-%d 23:00:00')
        else:
            check_time_format(time=end_time, model=flo2d_model)

        timestep = 60

        curw_sim_pool = get_Pool(host=con_params.CURW_SIM_HOST,
                                 user=con_params.CURW_SIM_USERNAME,
                                 password=con_params.CURW_SIM_PASSWORD,
                                 port=con_params.CURW_SIM_PORT,
                                 db=con_params.CURW_SIM_DATABASE)

        curw_fcst_pool = get_Pool(host=con_params.CURW_FCST_HOST,
                                  user=con_params.CURW_FCST_USERNAME,
                                  password=con_params.CURW_FCST_PASSWORD,
                                  port=con_params.CURW_FCST_PORT,
                                  db=con_params.CURW_FCST_DATABASE)

        print("{} : ####### Insert hechms discharge series for {}.".format(
            datetime.now(), flo2d_model))
        update_discharge_from_hechms(curw_sim_pool=curw_sim_pool,
                                     curw_fcst_pool=curw_fcst_pool,
                                     flo2d_model=flo2d_model,
                                     method=method,
Exemple #20
0
def prepare_rfields(root_dir,
                    start_time,
                    end_time,
                    target_model,
                    interpolation_method="MME"):
    """
    Create rfields for flo2d
    :param root_dir: rfield root directory
    :param start_time:
    :param end_time:
    :param target_model: FLO2D model (e.g. flo2d_250, flo2d_150)
    :param interpolation_method: value interpolation method (e.g. "MME")
    :return:
    """

    end_time = datetime.strptime(end_time, COMMON_DATE_TIME_FORMAT)
    start_time = datetime.strptime(start_time, COMMON_DATE_TIME_FORMAT)

    if end_time < start_time:
        exit(1)

    length = 0
    if target_model == "flo2d_250":
        length = 9348
        timestep = 5
    elif target_model == "flo2d_150":
        length = 41767
        timestep = 15

    pool = get_Pool(host=CURW_SIM_HOST,
                    port=CURW_SIM_PORT,
                    user=CURW_SIM_USERNAME,
                    db=CURW_SIM_DATABASE,
                    password=CURW_SIM_PASSWORD)

    connection = pool.connection()

    try:
        timestamp = start_time
        while timestamp <= end_time:
            raincell = []
            # Extract raincell from db
            with connection.cursor() as cursor1:
                cursor1.callproc(
                    'prepare_flo2d_raincell',
                    (target_model, interpolation_method, timestamp))
                results = cursor1.fetchall()
                for result in results:
                    raincell.append(str(result.get('value')))

            day = timestamp.date()

            try:
                os.makedirs("{}/{}".format(root_dir, day))
            except FileExistsError:
                # directory already exists
                pass

            if len(raincell) == length:
                write_to_file(
                    "{}/{}/{}_{}.txt".format(
                        root_dir, day, interpolation_method,
                        timestamp.strftime('%Y-%m-%d_%H-%M')), raincell)

            timestamp = timestamp + timedelta(minutes=timestep)

    except Exception as ex:
        traceback.print_exc()
    finally:
        connection.close()
        print("{} {} generation process completed".format(
            datetime.now(), target_model))
        # variable details
        variable = read_attribute_from_config_file('variable', config, True)

        # station details
        station_name = read_attribute_from_config_file('station_name', config,
                                                       True)

        out_file_path = os.path.join(output_dir, run_date, run_time,
                                     output_file_name)

        timeseries = read_csv(out_file_path)

        pool = get_Pool(host=db_config['host'],
                        port=db_config['port'],
                        user=db_config['user'],
                        password=db_config['password'],
                        db=db_config['db'])

        hechms_stations = get_hechms_stations(pool=pool)

        station_id = hechms_stations.get(station_name)[0]
        lat = str(hechms_stations.get(station_name)[1])
        lon = str(hechms_stations.get(station_name)[2])

        source_id = get_source_id(pool=pool, model=model, version=version)

        variable_id = get_variable_id(pool=pool, variable=variable)

        unit_id = get_unit_id(pool=pool, unit=unit, unit_type=unit_type)
Exemple #22
0
def update_rainfall_obs(flo2d_model, method, grid_interpolation, timestep,
                        start_time, end_time):
    """
    Update rainfall observations for flo2d models
    :param flo2d_model: flo2d model
    :param method: value interpolation method
    :param grid_interpolation: grid interpolation method
    :param timestep: output timeseries timestep
    :return:
    """

    obs_start = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')

    try:

        # Connect to the database
        curw_obs_pool = get_Pool(host=con_params.CURW_OBS_HOST,
                                 user=con_params.CURW_OBS_USERNAME,
                                 password=con_params.CURW_OBS_PASSWORD,
                                 port=con_params.CURW_OBS_PORT,
                                 db=con_params.CURW_OBS_DATABASE)

        curw_obs_connection = curw_obs_pool.connection()

        curw_sim_pool = get_Pool(host=con_params.CURW_SIM_HOST,
                                 user=con_params.CURW_SIM_USERNAME,
                                 password=con_params.CURW_SIM_PASSWORD,
                                 port=con_params.CURW_SIM_PORT,
                                 db=con_params.CURW_SIM_DATABASE)

        TS = Sim_Timeseries(pool=curw_sim_pool)

        # [hash_id, station_id, station_name, latitude, longitude]
        # active_obs_stations = read_csv(os.path.join(ROOT_DIR,'grids/obs_stations/rainfall/curw_active_rainfall_obs_stations.csv'))
        active_obs_stations = extract_active_curw_obs_rainfall_stations(
            start_time=start_time, end_time=end_time)[1:]
        flo2d_grids = read_csv(
            os.path.join(ROOT_DIR, 'grids/flo2d/{}m.csv'.format(
                flo2d_model)))  # [Grid_ ID, X(longitude), Y(latitude)]

        stations_dict_for_obs = {}  # keys: obs station id , value: hash id

        for obs_index in range(len(active_obs_stations)):
            stations_dict_for_obs[active_obs_stations[obs_index]
                                  [1]] = active_obs_stations[obs_index][0]

        # flo2d_obs_mapping = get_flo2d_cells_to_obs_grid_mappings(pool=curw_sim_pool, grid_interpolation=grid_interpolation, flo2d_model=flo2d_model)
        flo2d_obs_mapping = find_nearest_obs_stations_for_flo2d_stations(
            flo2d_stations_csv=os.path.join(
                ROOT_DIR, 'grids/flo2d/{}m.csv'.format(flo2d_model)),
            obs_stations=active_obs_stations,
            flo2d_model=flo2d_model)

        for flo2d_index in range(len(flo2d_grids)):
            lat = flo2d_grids[flo2d_index][2]
            lon = flo2d_grids[flo2d_index][1]
            cell_id = flo2d_grids[flo2d_index][0]
            meta_data = {
                'latitude':
                float('%.6f' % float(lat)),
                'longitude':
                float('%.6f' % float(lon)),
                'model':
                flo2d_model,
                'method':
                method,
                'grid_id':
                '{}_{}_{}'.format(flo2d_model, grid_interpolation,
                                  (str(cell_id)).zfill(10))
            }

            tms_id = TS.get_timeseries_id(grid_id=meta_data.get('grid_id'),
                                          method=meta_data.get('method'))

            if tms_id is None:
                tms_id = TS.generate_timeseries_id(meta_data=meta_data)
                meta_data['id'] = tms_id
                TS.insert_run(meta_data=meta_data)

            print("grid_id:", cell_id)
            print("grid map:", flo2d_obs_mapping.get(cell_id))
            obs1_station_id = flo2d_obs_mapping.get(cell_id)[0]
            obs2_station_id = flo2d_obs_mapping.get(cell_id)[1]
            obs3_station_id = flo2d_obs_mapping.get(cell_id)[2]

            obs_timeseries = []

            if timestep == 5:
                if obs1_station_id != str(-1):
                    obs1_hash_id = stations_dict_for_obs.get(obs1_station_id)

                    ts = extract_obs_rain_5_min_ts(
                        connection=curw_obs_connection,
                        start_time=obs_start,
                        id=obs1_hash_id,
                        end_time=end_time)

                    if ts is not None and len(ts) > 1:
                        obs_timeseries.extend(
                            process_5_min_ts(newly_extracted_timeseries=ts,
                                             expected_start=obs_start)[1:])
                        # obs_start = ts[-1][0]

                    if obs2_station_id != str(-1):
                        obs2_hash_id = stations_dict_for_obs.get(
                            obs2_station_id)

                        ts2 = extract_obs_rain_5_min_ts(
                            connection=curw_obs_connection,
                            start_time=obs_start,
                            id=obs2_hash_id,
                            end_time=end_time)
                        if ts2 is not None and len(ts2) > 1:
                            obs_timeseries = fill_missing_values(
                                newly_extracted_timeseries=ts2,
                                OBS_TS=obs_timeseries)
                            if obs_timeseries is not None and len(
                                    obs_timeseries) > 0:
                                expected_start = obs_timeseries[-1][0]
                            else:
                                expected_start = obs_start
                            obs_timeseries.extend(
                                process_5_min_ts(
                                    newly_extracted_timeseries=ts2,
                                    expected_start=expected_start)[1:])
                            # obs_start = ts2[-1][0]

                        if obs3_station_id != str(-1):
                            obs3_hash_id = stations_dict_for_obs.get(
                                obs3_station_id)

                            ts3 = extract_obs_rain_5_min_ts(
                                connection=curw_obs_connection,
                                start_time=obs_start,
                                id=obs3_hash_id,
                                end_time=end_time)
                            if ts3 is not None and len(ts3) > 1 and len(
                                    obs_timeseries) > 0:
                                obs_timeseries = fill_missing_values(
                                    newly_extracted_timeseries=ts3,
                                    OBS_TS=obs_timeseries)
                                if obs_timeseries is not None:
                                    expected_start = obs_timeseries[-1][0]
                                else:
                                    expected_start = obs_start
                                obs_timeseries.extend(
                                    process_5_min_ts(
                                        newly_extracted_timeseries=ts3,
                                        expected_start=expected_start)[1:])
            elif timestep == 15:
                if obs1_station_id != str(-1):
                    obs1_hash_id = stations_dict_for_obs.get(obs1_station_id)

                    ts = extract_obs_rain_15_min_ts(
                        connection=curw_obs_connection,
                        start_time=obs_start,
                        id=obs1_hash_id,
                        end_time=end_time)

                    if ts is not None and len(ts) > 1:
                        obs_timeseries.extend(
                            process_15_min_ts(newly_extracted_timeseries=ts,
                                              expected_start=obs_start)[1:])
                        # obs_start = ts[-1][0]

                    if obs2_station_id != str(-1):
                        obs2_hash_id = stations_dict_for_obs.get(
                            obs2_station_id)

                        ts2 = extract_obs_rain_15_min_ts(
                            connection=curw_obs_connection,
                            start_time=obs_start,
                            id=obs2_hash_id,
                            end_time=end_time)
                        if ts2 is not None and len(ts2) > 1:
                            obs_timeseries = fill_missing_values(
                                newly_extracted_timeseries=ts2,
                                OBS_TS=obs_timeseries)
                            if obs_timeseries is not None and len(
                                    obs_timeseries) > 0:
                                expected_start = obs_timeseries[-1][0]
                            else:
                                expected_start = obs_start
                            obs_timeseries.extend(
                                process_15_min_ts(
                                    newly_extracted_timeseries=ts2,
                                    expected_start=expected_start)[1:])
                            # obs_start = ts2[-1][0]

                        if obs3_station_id != str(-1):
                            obs3_hash_id = stations_dict_for_obs.get(
                                obs3_station_id)

                            ts3 = extract_obs_rain_15_min_ts(
                                connection=curw_obs_connection,
                                start_time=obs_start,
                                id=obs3_hash_id,
                                end_time=end_time)
                            if ts3 is not None and len(ts3) > 1 and len(
                                    obs_timeseries) > 0:
                                obs_timeseries = fill_missing_values(
                                    newly_extracted_timeseries=ts3,
                                    OBS_TS=obs_timeseries)
                                if obs_timeseries is not None:
                                    expected_start = obs_timeseries[-1][0]
                                else:
                                    expected_start = obs_start
                                obs_timeseries.extend(
                                    process_15_min_ts(
                                        newly_extracted_timeseries=ts3,
                                        expected_start=expected_start)[1:])

            for i in range(len(obs_timeseries)):
                if obs_timeseries[i][1] == -99999:
                    obs_timeseries[i][1] = 0

            print("### obs timeseries length ###", len(obs_timeseries))
            if obs_timeseries is not None and len(
                    obs_timeseries) > 0 and obs_timeseries[-1][0] != end_time:
                obs_timeseries.append(
                    [datetime.strptime(end_time, DATE_TIME_FORMAT), 0])

            final_ts = process_continuous_ts(original_ts=obs_timeseries,
                                             expected_start=datetime.strptime(
                                                 start_time, DATE_TIME_FORMAT),
                                             filling_value=0,
                                             timestep=timestep)

            if final_ts is not None and len(final_ts) > 0:
                TS.insert_data(timeseries=final_ts, tms_id=tms_id, upsert=True)
                TS.update_latest_obs(id_=tms_id, obs_end=(final_ts[-1][1]))

    except Exception as e:
        traceback.print_exc()
        logger.error(
            "Exception occurred while updating obs rainfalls in curw_sim.")
    finally:
        curw_obs_connection.close()
        destroy_Pool(pool=curw_sim_pool)
        destroy_Pool(pool=curw_obs_pool)
        logger.info("Process finished")
                ])

        tide_ts = append_ts(original_ts=tide_ts, new_ts=ts)

    avg_tide_ts = average_timeseries(tide_ts)

    return avg_tide_ts


if __name__ == "__main__":

    try:

        curw_obs_pool = get_Pool(host=CURW_OBS_HOST,
                                 user=CURW_OBS_USERNAME,
                                 password=CURW_OBS_PASSWORD,
                                 port=CURW_OBS_PORT,
                                 db=CURW_OBS_DATABASE)

        connection = curw_obs_pool.connection()

        curw_sim_pool = get_Pool(host=CURW_SIM_HOST,
                                 user=CURW_SIM_USERNAME,
                                 password=CURW_SIM_PASSWORD,
                                 port=CURW_SIM_PORT,
                                 db=CURW_SIM_DATABASE)

        # [station_name,latitude,longitude,target]
        extract_stations = read_csv('grids/tide_stations/extract_stations.csv')
        extract_stations_dict = {
        }  # keys: station_name , value: [latitude, longitude, target_model]
def map_curw_id(result):

    try:
        # if curw_id doesnot exist in the table retrieve meta data to generate curw_obs hash id
        curw_id = check_id(result)
        if curw_id is None:
            print("The event id does not exist in the id_mapping table")
            #generate a new obs hash id, for that get meta data
            meta_data = get_meta_data(result)
            #print("*****************")
            #print(meta_data)
            dict1 = meta_data[0]
            dict2 = meta_data[1]
            dict3 = meta_data[2]
            dict4 = meta_data[3]

            run_name = dict1['run_name']
            start_date = dict1['start_date']
            station_name = dict2['station_name']
            latitude = dict2['latitude']
            longitude = dict2['longitude']
            description = dict2['description']
            variable = dict3['variable']
            unit = dict4['unit']
            unit_type = dict4['unit-type']
            pool = get_Pool(host=CURW_OBS_HOST,
                            port=CURW_OBS_PORT,
                            user=CURW_OBS_USERNAME,
                            password=CURW_OBS_PASSWORD,
                            db=CURW_OBS_DATABASE)

            obs_hash_id = generate_curw_obs_hash_id(
                pool,
                variable=variable,
                unit=unit,
                unit_type=unit_type,
                latitude=latitude,
                longitude=longitude,
                run_name=run_name,
                station_name=station_name,
                description=description,
                start_date=start_date.strftime(COMMON_DATE_FORMAT))

            #insert the corresponding obs_hash_id to curw_id
            insert_id_rows(result, obs_hash_id)

            #then extract the time series
            timeseries = []
            timeseries = extract_timeseries(obs_hash_id)
            #print("***********")
            #print(timeseries)
            #print("***********")

            #insert the timeseries in obs_db
            insert_timeseries(pool=pool,
                              timeseries=timeseries,
                              tms_id=obs_hash_id,
                              end_date=timeseries[-1][0])
        else:
            #get the relavant obs_hashId to the curw_id
            obs_hash = get_obs_hash(result)
            # then extract the time series
            timeseries = []
            timeseries = extract_timeseries(obs_hash)
            #print("*******")
            #print(timeseries)
            #print(timeseries[-1][0])
            #print("*******")

            # insert the timeseries in obs_db
            insert_timeseries(pool=pool,
                              timeseries=timeseries,
                              tms_id=result,
                              end_date=timeseries[-1][0])

    except Exception as e:
        traceback.print_exc()
    finally:
        destroy_Pool(pool=pool)
        print("Process finished")
Exemple #25
0
def extract_distrubuted_hechms_outputs(output_file_name, output_dir, run_date,
                                       run_time):
    """
    Config.json 
    {
      "output_file_name": "DailyDischarge.csv",
      "output_dir": "",

      "run_date": "2019-05-24",
      "run_time": "00:00:00",
      "utc_offset": "",

      "sim_tag": "hourly_run",

      "model": "HECHMS",
      "version": "single",

      "unit": "m3/s",
      "unit_type": "Instantaneous",

      "variable": "Discharge",

      "station_name": "Hanwella"
    }

    """
    try:

        config = json.loads(open('config.json').read())

        # output related details
        output_file_name = output_file_name
        output_dir = output_dir

        run_date = run_date
        run_time = run_time

        utc_offset = read_attribute_from_config_file('utc_offset', config,
                                                     False)
        if utc_offset is None:
            utc_offset = ''

        # sim tag
        sim_tag = read_attribute_from_config_file('sim_tag', config, True)

        # source details
        model = read_attribute_from_config_file('model', config, True)
        version = read_attribute_from_config_file('version', config, True)

        # unit details
        unit = read_attribute_from_config_file('unit', config, True)
        unit_type = UnitType.getType(
            read_attribute_from_config_file('unit_type', config, True))

        # variable details
        variable = read_attribute_from_config_file('variable', config, True)

        # station details
        station_name = read_attribute_from_config_file('station_name', config,
                                                       True)

        out_file_path = os.path.join(output_dir, output_file_name)

        if not os.path.exists(out_file_path):
            msg = 'no file :: {}'.format(out_file_path)
            logger.warning(msg)
            print(msg)
            exit(1)

        fgt = get_file_last_modified_time(out_file_path)
        print("fgt, ", fgt)

        timeseries = read_csv(out_file_path)

        pool = get_Pool(host=CURW_FCST_HOST,
                        port=CURW_FCST_PORT,
                        db=CURW_FCST_DATABASE,
                        user=CURW_FCST_USERNAME,
                        password=CURW_FCST_PASSWORD)

        hechms_stations = get_hechms_stations(pool=pool)

        station_id = hechms_stations.get(station_name)[0]
        lat = str(hechms_stations.get(station_name)[1])
        lon = str(hechms_stations.get(station_name)[2])

        source_id = get_source_id(pool=pool, model=model, version=version)

        variable_id = get_variable_id(pool=pool, variable=variable)

        unit_id = get_unit_id(pool=pool, unit=unit, unit_type=unit_type)

        tms_meta = {
            'sim_tag': sim_tag,
            'model': model,
            'version': version,
            'variable': variable,
            'unit': unit,
            'unit_type': unit_type.value,
            'latitude': lat,
            'longitude': lon,
            'station_id': station_id,
            'source_id': source_id,
            'variable_id': variable_id,
            'unit_id': unit_id
        }

        utcOffset = getUTCOffset(utc_offset, default=True)

        if utcOffset != timedelta():
            tms_meta['utcOffset'] = utcOffset

        # Push timeseries to database
        save_forecast_timeseries_to_db(pool=pool,
                                       timeseries=timeseries,
                                       run_date=run_date,
                                       run_time=run_time,
                                       tms_meta=tms_meta,
                                       fgt=fgt)

    except Exception as e:
        logger.error('JSON config data loading error.')
        print('JSON config data loading error.')
        traceback.print_exc()
    finally:
        logger.info("Process finished.")
        print("Process finished.")
Exemple #26
0
def upload_waterlevels(dir_path, ts_start_date, ts_start_time, run_date, run_time):
    """
    Config.json
    {
      "HYCHAN_OUT_FILE": "HYCHAN.OUT",
      "TIMDEP_FILE": "TIMDEP.OUT",
      "output_dir": "",

      "run_date": "2019-05-24",
      "run_time": "",
      "ts_start_date": "",
      "ts_start_time": "",
      "utc_offset": "",

      "sim_tag": "",

      "model": "WRF",
      "version": "v3",

      "unit": "mm",
      "unit_type": "Accumulative",

      "variable": "Precipitation"
    }

    """
    try:
        config_path = os.path.join(os.getcwd(), 'extract', 'config_curw_fcst.json')
        config = json.loads(open(config_path).read())

        # flo2D related details
        HYCHAN_OUT_FILE = read_attribute_from_config_file('HYCHAN_OUT_FILE', config, True)
        TIMDEP_FILE = read_attribute_from_config_file('TIMDEP_FILE', config, True)
        output_dir = dir_path

        run_date = run_date
        run_time = run_time
        ts_start_date = ts_start_date
        ts_start_time = ts_start_time
        utc_offset = read_attribute_from_config_file('utc_offset', config, False)
        if utc_offset is None:
            utc_offset = ''

        # sim tag
        sim_tag = read_attribute_from_config_file('sim_tag', config, True)

        # source details
        model = read_attribute_from_config_file('model', config, True)
        version = read_attribute_from_config_file('version', config, True)

        # unit details
        unit = read_attribute_from_config_file('unit', config, True)
        unit_type = UnitType.getType(read_attribute_from_config_file('unit_type', config, True))

        # variable details
        variable = read_attribute_from_config_file('variable', config, True)

        hychan_out_file_path = os.path.join(output_dir, HYCHAN_OUT_FILE)
        timdep_file_path = os.path.join(output_dir, TIMDEP_FILE)

        pool = get_Pool(host=CURW_FCST_HOST, port=CURW_FCST_PORT, db=CURW_FCST_DATABASE, user=CURW_FCST_USERNAME,
                        password=CURW_FCST_PASSWORD)

        # pool = get_Pool(host=HOST, port=PORT, user=USERNAME, password=PASSWORD, db=DATABASE)

        flo2d_model_name = '{}_{}'.format(model, version)

        flo2d_source = json.loads(get_source_parameters(pool=pool, model=model, version=version))

        print("############### source ############", flo2d_source)

        flo2d_stations = get_flo2d_output_stations(pool=pool, flo2d_model=StationEnum.getType(flo2d_model_name))

        print("############### 1st occurrence ############", flo2d_stations)

        source_id = get_source_id(pool=pool, model=model, version=version)

        variable_id = get_variable_id(pool=pool, variable=variable)

        unit_id = get_unit_id(pool=pool, unit=unit, unit_type=unit_type)

        tms_meta = {
            'sim_tag': sim_tag,
            'model': model,
            'version': version,
            'variable': variable,
            'unit': unit,
            'unit_type': unit_type.value,
            'source_id': source_id,
            'variable_id': variable_id,
            'unit_id': unit_id
        }

        CHANNEL_CELL_MAP = flo2d_source["CHANNEL_CELL_MAP"]

        FLOOD_PLAIN_CELL_MAP = flo2d_source["FLOOD_PLAIN_CELL_MAP"]

        ELEMENT_NUMBERS = CHANNEL_CELL_MAP.keys()
        FLOOD_ELEMENT_NUMBERS = FLOOD_PLAIN_CELL_MAP.keys()
        SERIES_LENGTH = 0
        MISSING_VALUE = -999

        utcOffset = getUTCOffset(utc_offset, default=True)

        print('Extract Water Level Result of FLO2D on', run_date, '@', run_time, 'with Base time of', ts_start_date,
              '@', ts_start_time)

        # Check HYCHAN.OUT file exists
        if not os.path.exists(hychan_out_file_path):
            print('Unable to find file : ', hychan_out_file_path)
            traceback.print_exc()

        #####################################
        # Calculate the size of time series #
        #####################################
        bufsize = 65536
        with open(hychan_out_file_path) as infile:
            isWaterLevelLines = False
            isCounting = False
            countSeriesSize = 0  # HACK: When it comes to the end of file, unable to detect end of time series
            while True:
                lines = infile.readlines(bufsize)
                if not lines or SERIES_LENGTH:
                    break
                for line in lines:
                    if line.startswith('CHANNEL HYDROGRAPH FOR ELEMENT NO:', 5):
                        isWaterLevelLines = True
                    elif isWaterLevelLines:
                        cols = line.split()
                        if len(cols) > 0 and cols[0].replace('.', '', 1).isdigit():
                            countSeriesSize += 1
                            isCounting = True
                        elif isWaterLevelLines and isCounting:
                            SERIES_LENGTH = countSeriesSize
                            break

        print('Series Length is :', SERIES_LENGTH)
        bufsize = 65536
        #################################################################
        # Extract Channel Water Level elevations from HYCHAN.OUT file   #
        #################################################################
        print('Extract Channel Water Level Result of FLO2D (HYCHAN.OUT) on', run_date, '@', run_time,
              'with Base time of',
              ts_start_date, '@', ts_start_time)
        with open(hychan_out_file_path) as infile:
            isWaterLevelLines = False
            isSeriesComplete = False
            waterLevelLines = []
            seriesSize = 0  # HACK: When it comes to the end of file, unable to detect end of time series
            while True:
                lines = infile.readlines(bufsize)
                if not lines:
                    break
                for line in lines:
                    if line.startswith('CHANNEL HYDROGRAPH FOR ELEMENT NO:', 5):
                        seriesSize = 0
                        elementNo = line.split()[5]

                        if elementNo in ELEMENT_NUMBERS:
                            isWaterLevelLines = True
                            waterLevelLines.append(line)
                        else:
                            isWaterLevelLines = False

                    elif isWaterLevelLines:
                        cols = line.split()
                        if len(cols) > 0 and isfloat(cols[0]):
                            seriesSize += 1
                            waterLevelLines.append(line)

                            if seriesSize == SERIES_LENGTH:
                                isSeriesComplete = True

                    if isSeriesComplete:
                        baseTime = datetime.strptime('%s %s' % (ts_start_date, ts_start_time), '%Y-%m-%d %H:%M:%S')
                        timeseries = []
                        elementNo = waterLevelLines[0].split()[5]
                        # print('Extracted Cell No', elementNo, CHANNEL_CELL_MAP[elementNo])
                        for ts in waterLevelLines[1:]:
                            v = ts.split()
                            if len(v) < 1:
                                continue
                            # Get flood level (Elevation)
                            value = v[1]
                            # Get flood depth (Depth)
                            # value = v[2]
                            if not isfloat(value):
                                value = MISSING_VALUE
                                continue  # If value is not present, skip
                            if value == 'NaN':
                                continue  # If value is NaN, skip
                            timeStep = float(v[0])
                            currentStepTime = baseTime + timedelta(hours=timeStep)
                            dateAndTime = currentStepTime.strftime("%Y-%m-%d %H:%M:%S")
                            timeseries.append([dateAndTime, value])

                        # Save Forecast values into Database
                        opts = {
                            'elementNo': elementNo,
                            'tms_meta': tms_meta
                        }
                        # print('>>>>>', opts)
                        if utcOffset != timedelta():
                            opts['utcOffset'] = utcOffset

                        # Push timeseries to database
                        save_forecast_timeseries_to_db(pool=pool, timeseries=timeseries,
                                                       run_date=run_date, run_time=run_time, opts=opts,
                                                       flo2d_stations=flo2d_stations)

                        isWaterLevelLines = False
                        isSeriesComplete = False
                        waterLevelLines = []
                # -- END for loop
            # -- END while loop

        #################################################################
        # Extract Flood Plain water elevations from TIMEDEP.OUT file    #
        #################################################################

        if not os.path.exists(timdep_file_path):
            print('Unable to find file : ', timdep_file_path)
            traceback.print_exc()

        print('Extract Flood Plain Water Level Result of FLO2D (TIMEDEP.OUT) on', run_date, '@', run_time,
              'with Base time of', ts_start_date,
              '@', ts_start_time)

        with open(timdep_file_path) as infile:
            waterLevelLines = []
            waterLevelSeriesDict = dict.fromkeys(FLOOD_ELEMENT_NUMBERS, [])
            while True:
                lines = infile.readlines(bufsize)
                if not lines:
                    break
                for line in lines:
                    if len(line.split()) == 1:
                        # continue
                        if len(waterLevelLines) > 0:
                            waterLevels = get_water_level_of_channels(waterLevelLines, FLOOD_ELEMENT_NUMBERS)

                            # Get Time stamp Ref:http://stackoverflow.com/a/13685221/1461060
                            # print('waterLevelLines[0].split() : ', waterLevelLines[0].split())
                            ModelTime = float(waterLevelLines[0].split()[0])
                            baseTime = datetime.strptime('%s %s' % (ts_start_date, ts_start_time), '%Y-%m-%d %H:%M:%S')
                            currentStepTime = baseTime + timedelta(hours=ModelTime)
                            dateAndTime = currentStepTime.strftime("%Y-%m-%d %H:%M:%S")

                            for elementNo in FLOOD_ELEMENT_NUMBERS:
                                tmpTS = waterLevelSeriesDict[elementNo][:]
                                if elementNo in waterLevels:
                                    tmpTS.append([dateAndTime, waterLevels[elementNo]])
                                else:
                                    tmpTS.append([dateAndTime, MISSING_VALUE])
                                waterLevelSeriesDict[elementNo] = tmpTS

                            isWaterLevelLines = False
                            # for l in waterLevelLines :
                            # print(l)
                            waterLevelLines = []
                    waterLevelLines.append(line)

            # print('len(FLOOD_ELEMENT_NUMBERS) : ', len(FLOOD_ELEMENT_NUMBERS))
            for elementNo in FLOOD_ELEMENT_NUMBERS:

                # Save Forecast values into Database
                opts = {
                    'elementNo': elementNo,
                    'tms_meta': tms_meta
                }
                if utcOffset != timedelta():
                    opts['utcOffset'] = utcOffset

                # Push timeseries to database
                save_forecast_timeseries_to_db(pool=pool, timeseries=waterLevelSeriesDict[elementNo],
                                               run_date=run_date, run_time=run_time, opts=opts,
                                               flo2d_stations=flo2d_stations)

    except Exception as e:
        logger.error('JSON config data loading error.')
        print('JSON config data loading error.')
        traceback.print_exc()
    finally:
        logger.info("Process finished.")
        print("Process finished.")
Exemple #27
0
            else:
                return None
    except Exception as exception:
        error_message = "Retrieving template failed."
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()


set_db_config_file_path(os.path.join(ROOT_DIRECTORY, 'db_adapter_config.json'))

pool = get_Pool(host=con_params.CURW_FCST_HOST,
                port=con_params.CURW_FCST_PORT,
                db=con_params.CURW_FCST_DATABASE,
                user=con_params.CURW_FCST_USERNAME,
                password=con_params.CURW_FCST_PASSWORD)

source_id = 1
variable_id = 1
sim_tag = "test"
fgt = "2020-04-20 00:00:00"
run_info = {"test": "test1"}
template_path = "D:\\flo2d_output\\flo2d_150\\2020-03-16\\02-00-08\\template.tar.gz"
template = convertToBinaryData(template_path)
insert_run_metadata(pool=pool,
                    source_id=source_id,
                    variable_id=variable_id,
                    sim_tag=sim_tag,
                    fgt=fgt,
Exemple #28
0
def update_rainfall_obs(target_model, method, timestep, start_time, end_time):
    """
    Update rainfall observations for flo2d models
    :param model: target model
    :param method: value interpolation method
    :param grid_interpolation: grid interpolation method
    :param timestep:
    :return:
    """
    obs_start = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')
    try:

        # Connect to the database
        curw_obs_pool = get_Pool(host=con_params.CURW_OBS_HOST,
                                 user=con_params.CURW_OBS_USERNAME,
                                 password=con_params.CURW_OBS_PASSWORD,
                                 port=con_params.CURW_OBS_PORT,
                                 db=con_params.CURW_OBS_DATABASE)

        curw_obs_connection = curw_obs_pool.connection()

        curw_sim_pool = get_Pool(host=con_params.CURW_SIM_HOST,
                                 user=con_params.CURW_SIM_USERNAME,
                                 password=con_params.CURW_SIM_PASSWORD,
                                 port=con_params.CURW_SIM_PORT,
                                 db=con_params.CURW_SIM_DATABASE)

        TS = Timeseries(pool=curw_sim_pool)

        # [hash_id, station_id, station_name, latitude, longitude]
        active_obs_stations = extract_active_curw_obs_rainfall_stations(
            start_time=start_time, end_time=end_time)[1:]
        obs_stations_dict = {
        }  # keys: obs station id , value: [hash id, name, latitude, longitude]

        for obs_index in range(len(active_obs_stations)):
            obs_stations_dict[active_obs_stations[obs_index][1]] = [
                active_obs_stations[obs_index][0],
                active_obs_stations[obs_index][2],
                active_obs_stations[obs_index][3],
                active_obs_stations[obs_index][4]
            ]

        for obs_id in obs_stations_dict.keys():
            meta_data = {
                'latitude':
                float('%.6f' % float(obs_stations_dict.get(obs_id)[2])),
                'longitude':
                float('%.6f' % float(obs_stations_dict.get(obs_id)[3])),
                'model':
                target_model,
                'method':
                method,
                'grid_id':
                'rainfall_{}_{}'.format(obs_id,
                                        obs_stations_dict.get(obs_id)[1])
            }

            tms_id = TS.get_timeseries_id_if_exists(meta_data=meta_data)

            if tms_id is None:
                tms_id = TS.generate_timeseries_id(meta_data=meta_data)
                meta_data['id'] = tms_id
                TS.insert_run(meta_data=meta_data)

            TS.update_grid_id(id_=tms_id, grid_id=meta_data['grid_id'])

            obs_hash_id = obs_stations_dict.get(obs_id)[0]

            obs_timeseries = []

            if timestep == 5:
                ts = extract_obs_rain_5_min_ts(connection=curw_obs_connection,
                                               start_time=obs_start,
                                               end_time=end_time,
                                               id=obs_hash_id)
                if ts is not None and len(ts) > 1:
                    obs_timeseries.extend(
                        process_5_min_ts(newly_extracted_timeseries=ts,
                                         expected_start=obs_start)[1:])
                    # obs_start = ts[-1][0]
            elif timestep == 15:
                ts = extract_obs_rain_15_min_ts(connection=curw_obs_connection,
                                                start_time=obs_start,
                                                end_time=end_time,
                                                id=obs_hash_id)
                if ts is not None and len(ts) > 1:
                    obs_timeseries.extend(
                        process_15_min_ts(newly_extracted_timeseries=ts,
                                          expected_start=obs_start)[1:])
                    # obs_start = ts[-1][0]

            # for i in range(len(obs_timeseries)):
            #     if obs_timeseries[i][1] == -99999:
            #         obs_timeseries[i][1] = 0

            if obs_timeseries is not None and len(obs_timeseries) > 0:
                TS.replace_data(timeseries=obs_timeseries, tms_id=tms_id)

    except Exception as e:
        traceback.print_exc()
        logger.error(
            "Exception occurred while updating obs rainfalls in curw_sim.")
    finally:
        curw_obs_connection.close()
        destroy_Pool(pool=curw_sim_pool)
        destroy_Pool(pool=curw_obs_pool)
import traceback
from db_adapter.base import get_Pool, destroy_Pool
from db_adapter.curw_sim.grids import add_obs_to_d03_grid_mappings_for_rainfall, get_obs_to_d03_grid_mappings_for_rainfall, \
    add_flo2d_raincell_grid_mappings, get_flo2d_cells_to_obs_grid_mappings, get_flo2d_cells_to_wrf_grid_mappings, \
    add_flo2d_initial_conditions, get_flo2d_initial_conditions, \
    GridInterpolationEnum
from db_adapter.constants import CURW_SIM_HOST, CURW_SIM_PORT, CURW_SIM_USERNAME, CURW_SIM_PASSWORD, CURW_SIM_DATABASE

print(" Add obs to wrf_d03 grid mappings")

try:

    pool = get_Pool(host=CURW_SIM_HOST,
                    port=CURW_SIM_PORT,
                    user=CURW_SIM_USERNAME,
                    password=CURW_SIM_PASSWORD,
                    db=CURW_SIM_DATABASE)

    # grid_interpolation_method = GridInterpolationEnum.getAbbreviation(GridInterpolationEnum.MDPA)

    # print(" Add flo2d 250 grid mappings")
    # add_flo2d_raincell_grid_mappings(pool=pool, flo2d_model='flo2d_250', grid_interpolation=grid_interpolation_method)
    # print("{} flo2d 250 grids added".format(len(get_flo2d_cells_to_wrf_grid_mappings(pool=pool, flo2d_model='flo2d_250', grid_interpolation=grid_interpolation_method).keys())))
    # print("{} flo2d 250 grids added".format(len(get_flo2d_cells_to_obs_grid_mappings(pool=pool, flo2d_model='flo2d_250', grid_interpolation=grid_interpolation_method).keys())))
    #
    #
    # print(" Add flo2d 150 grid mappings")
    # add_flo2d_raincell_grid_mappings(pool=pool, flo2d_model='flo2d_150', grid_interpolation=grid_interpolation_method)
    # print("{} flo2d 150 grids added".format(len(get_flo2d_cells_to_wrf_grid_mappings(pool=pool, flo2d_model='flo2d_150', grid_interpolation=grid_interpolation_method).keys())))
    # print("{} flo2d 150 grids added".format(len(get_flo2d_cells_to_obs_grid_mappings(pool=pool, flo2d_model='flo2d_150', grid_interpolation=grid_interpolation_method).keys())))
    #
Exemple #30
0
def prepare_chan(chan_file_path, start, flo2d_model):

    flo2d_version = flo2d_model.split('_')[1]

    try:

        curw_sim_pool = get_Pool(host=con_params.CURW_SIM_HOST,
                                 user=con_params.CURW_SIM_USERNAME,
                                 password=con_params.CURW_SIM_PASSWORD,
                                 port=con_params.CURW_SIM_PORT,
                                 db=con_params.CURW_SIM_DATABASE)

        curw_obs_pool = get_Pool(host=con_params.CURW_OBS_HOST,
                                 user=con_params.CURW_OBS_USERNAME,
                                 password=con_params.CURW_OBS_PASSWORD,
                                 port=con_params.CURW_OBS_PORT,
                                 db=con_params.CURW_OBS_DATABASE)
        obs_connection = curw_obs_pool.connection()

        # retrieve initial conditions from database
        initial_conditions = get_flo2d_initial_conditions(
            pool=curw_sim_pool, flo2d_model=flo2d_model)
        print(initial_conditions)

        # chan head
        head_file = open(
            os.path.join(ROOT_DIRECTORY, "input", "chan",
                         "chan_{}_head.dat".format(flo2d_version)), "r")
        head = head_file.read()
        head_file.close()
        write_file_to_file(chan_file_path, file_content=head)

        # chan body
        chan_processed_body = []

        body_file_name = os.path.join(ROOT_DIRECTORY, "input", "chan",
                                      "chan_{}_body.dat".format(flo2d_version))
        chan_body = [line.rstrip('\n') for line in open(body_file_name, "r")]

        i = 0
        while i < len(chan_body):
            up_strm = chan_body[i].split()[0]
            up_strm_default = chan_body[i].split()[1]
            dwn_strm = chan_body[i + 1].split()[0]
            dwn_strm_default = chan_body[i + 1].split()[1]
            grid_id = "{}_{}_{}".format(flo2d_model, up_strm, dwn_strm)
            print(grid_id)
            wl_id = initial_conditions.get(grid_id)[2]
            offset = (datetime.strptime(start, DATE_TIME_FORMAT) +
                      timedelta(hours=2)).strftime(DATE_TIME_FORMAT)
            water_level = getWL(connection=obs_connection,
                                wl_id=wl_id,
                                start_date=start,
                                end_date=offset)
            if water_level is None:
                chan_processed_body.append("{}{}".format(
                    up_strm.ljust(6), (str(up_strm_default)).rjust(6)))
                chan_processed_body.append("{}{}".format(
                    dwn_strm.ljust(6), (str(dwn_strm_default)).rjust(6)))
            else:
                chan_processed_body.append("{}{}".format(
                    up_strm.ljust(6), (str(water_level)).rjust(6)))
                chan_processed_body.append("{}{}".format(
                    dwn_strm.ljust(6), (str(water_level)).rjust(6)))
            i += 2

        append_to_file(chan_file_path, data=chan_processed_body)

        # chan tail
        tail_file = open(
            os.path.join(ROOT_DIRECTORY, "input", "chan",
                         "chan_{}_tail.dat".format(flo2d_version)), "r")
        tail = tail_file.read()
        tail_file.close()
        append_file_to_file(chan_file_path, file_content=tail)

    except Exception as e:
        print(traceback.print_exc())
    finally:
        destroy_Pool(curw_sim_pool)
        destroy_Pool(curw_obs_pool)
        print("Chan generated")