示例#1
0
def add_unit(pool, unit, unit_type):
    """
    Insert units into the database
    :param pool: database connection pool
    :param unit: string
    :param unit_type: UnitType enum value. This value can be any of {Accumulative, Instantaneous, Mean} set
    :return: True if the unit has been added to the "Unit" table of the database, else False
    """

    connection = pool.connection()
    try:
        if get_unit_id(pool=pool, unit=unit, unit_type=unit_type) is None:
            with connection.cursor() as cursor:
                sql_statement = "INSERT INTO `unit` (`unit`, `type`) VALUES ( %s, %s)"
                row_count = cursor.execute(sql_statement,
                                           (unit, unit_type.value))
                connection.commit()
                return True if row_count > 0 else False
        else:
            logger.info(
                "Unit with unit={}, unit_type={} already exists in the database"
                .format(unit, unit_type))
            return False
    except Exception as exception:
        connection.rollback()
        error_message = "Insertion of unit: unit={}, unit_type={} failed".format(
            unit, unit_type)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
示例#2
0
def add_source(pool, model, version, parameters=None):
    """
    Insert sources into the database
    :param pool: database connection pool
    :param model: string
    :param version: string
    :param parameters: JSON
    :return: True if the source has been added to the "Source' table of the database, else False
    """

    connection = pool.connection()
    try:
        if get_source_id(pool=pool, model=model, version=version) is None:
            with connection.cursor() as cursor:
                sql_statement = "INSERT INTO `source` (`model`, `version`, `parameters`) VALUES ( %s, %s, %s)"
                row_count = cursor.execute(
                    sql_statement, (model, version, json.dumps(parameters)))
                connection.commit()
                return True if row_count > 0 else False
        else:
            logger.info(
                "Source with model={} and version={} already exists in the database"
                .format(model, version))
            return False
    except Exception as exception:
        connection.rollback()
        error_message = "Insertion of source: model={}, version={} and parameters={} failed".format(
            model, version, parameters)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
示例#3
0
def delete_variable(pool, variable):
    """
    Delete variable from Variable table, given variable name
    :param pool: database connection pool
    :param variable: string
    :return: True if the deletion was successful, else False
    """

    connection = pool.connection()
    try:

        with connection.cursor() as cursor:
            sql_statement = "DELETE FROM `variable` WHERE `variable`=%s"
            row_count = cursor.execute(sql_statement, variable)
            connection.commit()
            if row_count > 0:
                return True
            else:
                logger.info(
                    "There's no record of variable in the database with variable={}"
                    .format(variable))
                return False
    except Exception as exception:
        connection.rollback()
        error_message = "Deleting variable with variable={} failed.".format(
            variable)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
示例#4
0
def delete_unit(pool, unit, unit_type):
    """
    Delete unit from Unit table, given unit and unit_type
    :param pool: database connection pool
    :param unit: string
    :param unit_type: UnitType enum value. This value can be any of {Accumulative, Instantaneous, Mean} set
    :return: True if the deletion was successful, else False
    """

    connection = pool.connection()
    try:

        with connection.cursor() as cursor:
            sql_statement = "DELETE FROM `unit` WHERE `unit`=%s and `type`=%s"
            row_count = cursor.execute(sql_statement, (unit, unit_type.value))
            connection.commit()
            if row_count > 0:
                return True
            else:
                logger.info(
                    "There's no record of unit in the database with unit={} and unit_type={}"
                    .format(unit, unit_type))
                return False
    except Exception as exception:
        connection.rollback()
        error_message = "Deleting unit with unit={} and unit_type={} failed.".format(
            unit, unit_type)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
示例#5
0
def delete_source(pool, model, version):
    """
    Delete source from Source table, given model and version
    :param pool: database connection pool
    :param model: str
    :param version: str
    :return: True if the deletion was successful
    """

    connection = pool.connection()
    try:

        with connection.cursor() as cursor:
            sql_statement = "DELETE FROM `source` WHERE `model`=%s and `version`=%s"
            row_count = cursor.execute(sql_statement, (model, version))
            connection.commit()
            if row_count > 0:
                return True
            else:
                logger.info(
                    "There's no record of source in the database with model={} and version={}"
                    .format(model, version))
                return False
    except Exception as exception:
        connection.rollback()
        error_message = "Deleting source with model={} and version={} failed.".format(
            model, version)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
示例#6
0
def add_variable(pool, variable):
    """
    Insert variables into the database
    :param pool: database connection pool
    :param variable: string
    :return: True if the variable has been added to the "Variable" table of the database, else False
    """

    connection = pool.connection()
    try:
        if get_variable_id(pool=pool, variable=variable) is None:
            with connection.cursor() as cursor:
                sql_statement = "INSERT INTO `variable` (`variable`) VALUES ( %s)"
                row_count = cursor.execute(sql_statement, variable)
                connection.commit()
                return True if row_count > 0 else False
        else:
            logger.info(
                "Variable with variable={} already exists in the database".
                format(variable))
            return False
    except Exception as exception:
        connection.rollback()
        error_message = "Insertion of variable: variable={} failed".format(
            variable)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
示例#7
0
def delete_source_by_id(pool, id_):
    """
    Delete source from Source table by id
    :param pool: database connection pool
    :param id_:
    :return: True if the deletion was successful, else False
    """

    connection = pool.connection()
    try:

        with connection.cursor() as cursor:
            sql_statement = "DELETE FROM `source` WHERE `id`=%s"
            row_count = cursor.execute(sql_statement, id_)
            connection.commit()
            if row_count > 0:
                return True
            else:
                logger.info(
                    "There's no record of source in the database with the source id {}"
                    .format(id_))
                return False
    except Exception as exception:
        connection.rollback()
        error_message = "Deleting source with id {} failed.".format(id_)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
示例#8
0
def add_station(pool, name, latitude, longitude, description, station_type):
    """
    Insert sources into the database

    Station ids ranged as below;
    - 1 xx xxx - CUrW (stationId: curw_<SOMETHING>)
    - 2 xx xxx - Megapolis (stationId: megapolis_<SOMETHING>)
    - 3 xx xxx - Government (stationId: gov_<SOMETHING>. May follow as gov_irr_<SOMETHING>)
    - 4 xx xxx - Public (stationId: pub_<SOMETHING>)
    - 8 xx xxx - Satellite (stationId: sat_<SOMETHING>)

    Simulation models station ids ranged over 1’000’000 as below;
    - 1 1xx xxx - WRF (stationId: [;<prefix>_]wrf_<SOMETHING>)
    - 1 2xx xxx - FLO2D (stationId: [;<prefix>_]flo2d_<SOMETHING>)model
    - 1 3xx xxx - MIKE (stationId: [;<prefix>_]mike_<SOMETHING>)

    :param pool: database connection pool
    :param name: string
    :param latitude: double
    :param longitude: double
    :param description: string
    :param station_type: StationEnum: which defines the station type
    such as 'CUrW'
    :return: True if the station is added into the 'Station' table, else False
    """
    initial_value = station_type.value
    range_ = StationEnum.getRange(station_type)

    connection = pool.connection()
    try:
        if get_station_id(pool=pool, latitude=latitude, longitude=longitude, station_type=station_type) is None:

            with connection.cursor() as cursor1:
                sql_statement = "SELECT `id` FROM `station` WHERE `id` BETWEEN %s and %s ORDER BY `id` DESC"
                row_count = cursor1.execute(sql_statement, (initial_value, initial_value + range_ - 1))
                if row_count > 0:
                    station_id = cursor1.fetchone()['id'] + 1
                else:
                    station_id = initial_value

            with connection.cursor() as cursor2:
                sql_statement = "INSERT INTO `station` (`id`, `name`, `latitude`, `longitude`, `description`) " \
                                "VALUES ( %s, %s, %s, %s, %s)"
                row_count = cursor2.execute(sql_statement, (station_id, name, latitude, longitude, description))
                connection.commit()
                return True if row_count > 0 else False
        else:
            logger.info("Station with latitude={} longitude={} and station_type={} already exists in the database"
                .format(latitude, longitude, station_type))
            return False
    except Exception as exception:
        connection.rollback()
        error_message = "Insertion of station: name={}, latitude={}, longitude={}, description={}, " \
                        "and station_type={} failed.".format(name, latitude, longitude, description, station_type)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
def create_test_schema_db():

    # connect to the MySQL engine
    engine = get_engine(DIALECT_MYSQL, DRIVER_PYMYSQL, HOST, PORT, DATABASE,
                        USERNAME, PASSWORD)

    # create the schema using classes defined
    CurwFcstBase.metadata.create_all(engine)

    logger.info("test_schema schema generated.")
def gen_all_d03_rfields_locally(source_names, version, sim_tag):
    """
       Generate d03 rfields for SL extent
       :param source_names: e.g.: WRF_A,WRF_C
       :param version: e.g.: v4.0
       :param sim_tag: e.g.: "evening_18hrs"
       :return:  True if successful, False otherwise
    """
    rfield_command_d03 = "sudo nice -n -15 nohup  /home/uwcc-admin/curw_rfield_extractor/gen_SL_d03_rfield.py -m {} -v {} -s {} 2>&1 " \
                         "/home/uwcc-admin/curw_rfield_extractor/rfield.log".format(source_names, version, sim_tag)

    logger.info("Generate {} d03 rfield files.".format(source_names))
    output = os.system(rfield_command_d03)
    if output is not 0:
        return False
    return True
def extract_wrf_data(wrf_system, config_data, tms_meta):
    logger.info(
        "######################################## {} #######################################"
        .format(wrf_system))
    for date in config_data['dates']:

        #     /wrf_nfs/wrf/4.0/18/A/2019-07-30/d03_RAINNC.nc

        output_dir = os.path.join(config_data['wrf_dir'],
                                  config_data['version'],
                                  config_data['gfs_data_hour'], wrf_system,
                                  date)
        rainnc_net_cdf_file = 'd03_RAINNC.nc'

        rainnc_net_cdf_file_path = os.path.join(output_dir,
                                                rainnc_net_cdf_file)

        try:
            source_name = "{}_{}".format(config_data['model'], wrf_system)
            source_id = get_source_id(pool=pool,
                                      model=source_name,
                                      version=tms_meta['version'])

            if source_id is None:
                add_source(pool=pool,
                           model=source_name,
                           version=tms_meta['version'])
                source_id = get_source_id(pool=pool,
                                          model=source_name,
                                          version=tms_meta['version'])

        except Exception:
            msg = "Exception occurred while loading source meta data for WRF_{} from database.".format(
                wrf_system)
            logger.error(msg)
            email_content[datetime.now().strftime(
                COMMON_DATE_TIME_FORMAT)] = msg
            return False

        tms_meta['model'] = source_name
        tms_meta['source_id'] = source_id

        return read_netcdf_file(
            pool=pool,
            rainnc_net_cdf_file_path=rainnc_net_cdf_file_path,
            tms_meta=tms_meta)
def gen_all_d03_rfields(source_names, version, sim_tag, rfield_host,
                        rfield_key, rfield_user):
    """
       Generate d03 rfields for SL extent
       :param source_names: e.g.: WRF_A,WRF_C
       :param version: e.g.: v4.0
       :param sim_tag: e.g.: "evening_18hrs"
       :param rfield_host:
       :param rfield_key:
       :param rfield_user:
       :return:  True if successful, False otherwise
    """
    rfield_command_d03 = "sudo nice -n -15 nohup  ./curw_rfield_extractor/gen_SL_d03_rfield.py -m {} -v {} -s {} 2>&1 " \
                         "./curw_rfield_extractor/rfield.log".format(source_names, version, sim_tag)

    logger.info("Generate {} d03 rfield files.".format(source_names))
    return run_remote_command(host=rfield_host,
                              key=rfield_key,
                              user=rfield_user,
                              command=rfield_command_d03)
示例#13
0
def delete_station(pool, latitude, longitude, station_type):
    """
    Delete station from Station table
    :param pool: database connection pool
    :param latitude:
    :param longitude:
    :param station_type: StationEnum: which defines the station type
    such as 'CUrW'
    :return: True if the deletion was successful, else False
    """

    connection = pool.connection()
    try:
        initial_value = str(station_type.value)

        if len(initial_value)==6:
            pattern = "{}_____".format(initial_value[0])
        elif len(initial_value)==7:
            pattern = "{}{}_____".format(initial_value[0], initial_value[1])

        with connection.cursor() as cursor:
            sql_statement = "DELETE FROM `station` WHERE `id` like %s and `latitude`=%s and `longitude`=%s and `station_type`=%s;"
            row_count = cursor.execute(sql_statement, (pattern, latitude, longitude, StationEnum.getTypeString(station_type)))
            connection.commit()
            if row_count > 0:
                return True
            else:
                logger.info("There's no record of station in the database with latitude={}, "
                            "longitude={}, and station_type{}".format(latitude, longitude, station_type))
                return False
    except Exception as exception:
        connection.rollback()
        error_message = "Deleting station with latitude={}, longitude={}, and station_type{} failed."\
            .format(latitude, longitude, station_type)
        logger.error(error_message)
        traceback.print_exc()
        raise exception
    finally:
        if connection is not None:
            connection.close()
        kelani_basin_rfield_status = gen_kelani_basin_rfields_locally(
            source_names=source_list, version=version, sim_tag=sim_tag)

        if not kelani_basin_rfield_status:
            email_content[datetime.now().strftime(
                COMMON_DATE_TIME_FORMAT
            )] = "Kelani basin rfiled generation for {} failed".format(
                source_list)

        # d03_rfield_status = gen_all_d03_rfields(source_names=source_list, version=version, sim_tag=sim_tag,
        #                                         rfield_host=rfield_host, rfield_key=rfield_key, rfield_user=rfield_user)

        d03_rfield_status = gen_all_d03_rfields_locally(
            source_names=source_list, version=version, sim_tag=sim_tag)

        if not d03_rfield_status:
            email_content[datetime.now().strftime(
                COMMON_DATE_TIME_FORMAT
            )] = "SL d03 rfiled generation for {} failed".format(source_list)

    except Exception as e:
        msg = 'Multiprocessing error.'
        logger.error(msg)
        email_content[datetime.now().strftime(COMMON_DATE_TIME_FORMAT)] = msg
        traceback.print_exc()
    finally:
        mp_pool.close()
        destroy_Pool(pool)
        logger.info("Process finished.")
        logger.info("Email Content {}".format(json.dumps(email_content)))
示例#15
0
from db_adapter.logger import logger


def get_engine(dialect, driver, host, port, db, user, password):
    """ Connecting to database """

    db_url = "%s+%s://%s:%s@%s:%d/%s" % (dialect, driver, user, password, host,
                                         port, db)
    return create_engine(db_url, echo=False)


def get_sessionmaker(engine):
    return sessionmaker(bind=engine)


# ---- declarative_base ----
# allows us to create classes that include directives to describe the actual
# database table they will be mapped to.

# CurwFcstBase class for all the schema model classes of "curw-fcst" database
logger.info("Declaring an orm mapping for curw_fcst database.")
CurwFcstBase = declarative_base()

# CurwObsBase class for all the schema model classes of "curw-obs" database
logger.info("Declaring an orm mapping for curw_obs database.")
CurwObsBase = declarative_base()

# CurwSimBase class for all the schema model classes of "curw-sim" database
logger.info("Declaring an orm mapping for curw_sim database.")
CurwSimBase = declarative_base()
示例#16
0
        tms_id = TS.get_timeseries_id_if_exists(meta_data=meta_data)

        start_date = (datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d %H:00:00")

        filled_ts = []

        if tms_id is None:
            exit(0)
        else:
            end_date = TS.get_obs_end(id_=tms_id)
            original_ts = TS.get_timeseries(id_=tms_id, start_date=start_date, end_date=end_date)
            filled_ts = fill_ts_missing_entries(start=start_date, end=end_date, timeseries=original_ts,
                                                interpolation_method='linear', timestep=60)

        for i in range(len(filled_ts)):
            if filled_ts[i][1] < 0.2:
                filled_ts[i][1] = 0.2

        if filled_ts is not None and len(filled_ts) > 0:
            TS.insert_data(timeseries=filled_ts, tms_id=tms_id, upsert=True)
            TS.update_latest_obs(id_=tms_id, obs_end=filled_ts[-1][1])

    except Exception as e:
        traceback.print_exc()
        logger.error("Exception occurred.")
    finally:
        logger.info("Filled missing values")
        destroy_Pool(pool=curw_sim_pool)


示例#17
0
        # ########

        # USERNAME = "******"
        # PASSWORD = "******"
        # HOST = "127.0.0.1"
        # PORT = 3306
        # DATABASE = "curw_fcst"

        # pool = get_Pool(host=HOST, port=PORT, user=USERNAME, password=PASSWORD, db=DATABASE)

        add_source(pool=pool, model=model, version=version, parameters=None)
        # add_variable(pool=pool, variable=variable)
        # add_unit(pool=pool, unit=unit, unit_type=unit_type)

        # add hechms output stations

        for i in range(len(hechms_stations)):
            station_name = hechms_stations[i][0]
            lat = hechms_stations[i][1]
            lon = hechms_stations[i][2]
            add_station(pool=pool, name=station_name, latitude="%.6f" % float(lat), longitude="%.6f" % float(lon),
                    station_type=StationEnum.HECHMS, description="hecHMS output station")

    except Exception:
        logger.info("Initialization process failed.")
        traceback.print_exc()
    finally:
        logger.info("Initialization process finished.")
        destroy_Pool(pool=pool)

示例#18
0
def update_rainfall_from_file(curw_sim_pool,
                              flo2d_grid_polygon_map,
                              stations_dict,
                              rainfall_df,
                              flo2d_model,
                              method,
                              grid_interpolation,
                              timestep,
                              start_time=None,
                              end_time=None):
    """
    Update rainfall observations for flo2d models
    :param flo2d_model: flo2d model
    :param method: value interpolation method
    :param grid_interpolation: grid interpolation method
    :param timestep: output timeseries timestep
    :return:
    """

    # start = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')

    try:

        TS = Sim_Timeseries(pool=curw_sim_pool)

        # # [hash_id, station_id, station_name, latitude, longitude]
        # flo2d_grid_polygon_map :: [Grid_ ID, X(longitude), Y(latitude), matching_point]

        # stations_dict_for_obs = { }  # keys: obs station id , value: hash id

        for grid in flo2d_grid_polygon_map:
            lat = grid[2]
            lon = grid[1]
            cell_id = grid[0]
            meta_data = {
                'latitude':
                float('%.6f' % float(lat)),
                'longitude':
                float('%.6f' % float(lon)),
                'model':
                flo2d_model,
                'method':
                method,
                'grid_id':
                '{}_{}_{}'.format(flo2d_model, grid_interpolation,
                                  (str(cell_id)).zfill(10))
            }

            if len(grid) > 3:
                polygon = grid[3]

                poly_lat = stations_dict.get(polygon)[1]
                poly_lon = stations_dict.get(polygon)[0]

                processed_ts = rainfall_df.loc[
                    (rainfall_df['latitude'] == poly_lat)
                    & (rainfall_df['longitude'] == poly_lon)][[
                        'time', 'value'
                    ]].values.tolist()

            else:
                processed_ts = rainfall_df.groupby('time').mean().round(
                    3)['value'].reset_index().values.tolist()

            tms_id = TS.get_timeseries_id(grid_id=meta_data.get('grid_id'),
                                          method=meta_data.get('method'))

            if tms_id is None:
                tms_id = TS.generate_timeseries_id(meta_data=meta_data)
                meta_data['id'] = tms_id
                TS.insert_run(meta_data=meta_data)

            print("grid_id:", meta_data['grid_id'])
            print(processed_ts)

            # for i in range(len(obs_timeseries)):
            #     if obs_timeseries[i][1] == -99999:
            #         obs_timeseries[i][1] = 0

            if processed_ts is not None and len(processed_ts) > 0:
                TS.insert_data(timeseries=processed_ts,
                               tms_id=tms_id,
                               upsert=True)

    except Exception as e:
        traceback.print_exc()
        logger.error(
            "Exception occurred while updating obs rainfalls in curw_sim.")
    finally:
        destroy_Pool(pool=curw_sim_pool)
        logger.info("Process finished")
示例#19
0
def update_rainfall_obs(flo2d_model, method, grid_interpolation, timestep):
    """
    Update rainfall observations for flo2d models
    :param flo2d_model: flo2d model
    :param method: value interpolation method
    :param grid_interpolation: grid interpolation method
    :param timestep: output timeseries timestep
    :return:
    """

    now = datetime.now()
    OBS_START_STRING = (now - timedelta(days=10)).strftime('%Y-%m-%d %H:00:00')
    OBS_START = datetime.strptime(OBS_START_STRING, '%Y-%m-%d %H:%M:%S')

    try:

        # Connect to the database
        curw_obs_pool = get_Pool(host=CURW_OBS_HOST,
                                 user=CURW_OBS_USERNAME,
                                 password=CURW_OBS_PASSWORD,
                                 port=CURW_OBS_PORT,
                                 db=CURW_OBS_DATABASE)

        curw_obs_connection = curw_obs_pool.connection()

        curw_sim_pool = get_Pool(host=CURW_SIM_HOST,
                                 user=CURW_SIM_USERNAME,
                                 password=CURW_SIM_PASSWORD,
                                 port=CURW_SIM_PORT,
                                 db=CURW_SIM_DATABASE)

        # test ######
        # pool = get_Pool(host=HOST, user=USERNAME, password=PASSWORD, port=PORT, db=DATABASE)

        TS = Sim_Timeseries(pool=curw_sim_pool)

        # [hash_id, station_id, station_name, latitude, longitude]
        active_obs_stations = read_csv(
            'grids/obs_stations/rainfall/curw_active_rainfall_obs_stations.csv'
        )
        flo2d_grids = read_csv('grids/flo2d/{}m.csv'.format(
            flo2d_model))  # [Grid_ ID, X(longitude), Y(latitude)]

        stations_dict_for_obs = {}  # keys: obs station id , value: hash id

        for obs_index in range(len(active_obs_stations)):
            stations_dict_for_obs[active_obs_stations[obs_index]
                                  [1]] = active_obs_stations[obs_index][0]

        flo2d_obs_mapping = get_flo2d_cells_to_obs_grid_mappings(
            pool=curw_sim_pool,
            grid_interpolation=grid_interpolation,
            flo2d_model=flo2d_model)

        for flo2d_index in range(len(flo2d_grids)):
            obs_start = OBS_START
            lat = flo2d_grids[flo2d_index][2]
            lon = flo2d_grids[flo2d_index][1]
            cell_id = flo2d_grids[flo2d_index][0]
            meta_data = {
                'latitude':
                float('%.6f' % float(lat)),
                'longitude':
                float('%.6f' % float(lon)),
                'model':
                flo2d_model,
                'method':
                method,
                'grid_id':
                '{}_{}_{}'.format(flo2d_model, grid_interpolation,
                                  (str(cell_id)).zfill(10))
            }

            tms_id = TS.get_timeseries_id(grid_id=meta_data.get('grid_id'),
                                          method=meta_data.get('method'))

            if tms_id is None:
                tms_id = TS.generate_timeseries_id(meta_data=meta_data)
                meta_data['id'] = tms_id
                TS.insert_run(meta_data=meta_data)

            obs_end = TS.get_obs_end(id_=tms_id)

            if obs_end is not None:
                obs_start = obs_end - timedelta(hours=1)

            obs1_station_id = str(
                flo2d_obs_mapping.get(meta_data['grid_id'])[0])
            obs2_station_id = str(
                flo2d_obs_mapping.get(meta_data['grid_id'])[1])
            obs3_station_id = str(
                flo2d_obs_mapping.get(meta_data['grid_id'])[2])

            obs_timeseries = []

            if timestep == 5:
                if obs1_station_id != str(-1):
                    obs1_hash_id = stations_dict_for_obs.get(obs1_station_id)

                    ts = extract_obs_rain_5_min_ts(
                        connection=curw_obs_connection,
                        start_time=obs_start,
                        id=obs1_hash_id)
                    if ts is not None and len(ts) > 1:
                        obs_timeseries.extend(
                            process_5_min_ts(newly_extracted_timeseries=ts,
                                             expected_start=obs_start)[1:])
                        # obs_start = ts[-1][0]

                    if obs2_station_id != str(-1):
                        obs2_hash_id = stations_dict_for_obs.get(
                            obs2_station_id)

                        ts2 = extract_obs_rain_5_min_ts(
                            connection=curw_obs_connection,
                            start_time=obs_start,
                            id=obs2_hash_id)
                        if ts2 is not None and len(ts2) > 1:
                            obs_timeseries = fill_missing_values(
                                newly_extracted_timeseries=ts2,
                                OBS_TS=obs_timeseries)
                            if obs_timeseries is not None and len(
                                    obs_timeseries) > 0:
                                expected_start = obs_timeseries[-1][0]
                            else:
                                expected_start = obs_start
                            obs_timeseries.extend(
                                process_5_min_ts(
                                    newly_extracted_timeseries=ts2,
                                    expected_start=expected_start)[1:])
                            # obs_start = ts2[-1][0]

                        if obs3_station_id != str(-1):
                            obs3_hash_id = stations_dict_for_obs.get(
                                obs3_station_id)

                            ts3 = extract_obs_rain_5_min_ts(
                                connection=curw_obs_connection,
                                start_time=obs_start,
                                id=obs3_hash_id)
                            if ts3 is not None and len(ts3) > 1 and len(
                                    obs_timeseries) > 0:
                                obs_timeseries = fill_missing_values(
                                    newly_extracted_timeseries=ts3,
                                    OBS_TS=obs_timeseries)
                                if obs_timeseries is not None:
                                    expected_start = obs_timeseries[-1][0]
                                else:
                                    expected_start = obs_start
                                obs_timeseries.extend(
                                    process_5_min_ts(
                                        newly_extracted_timeseries=ts3,
                                        expected_start=expected_start)[1:])
            elif timestep == 15:
                if obs1_station_id != str(-1):
                    obs1_hash_id = stations_dict_for_obs.get(obs1_station_id)

                    ts = extract_obs_rain_15_min_ts(
                        connection=curw_obs_connection,
                        start_time=obs_start,
                        id=obs1_hash_id)
                    if ts is not None and len(ts) > 1:
                        obs_timeseries.extend(
                            process_15_min_ts(newly_extracted_timeseries=ts,
                                              expected_start=obs_start)[1:])
                        # obs_start = ts[-1][0]

                    if obs2_station_id != str(-1):
                        obs2_hash_id = stations_dict_for_obs.get(
                            obs2_station_id)

                        ts2 = extract_obs_rain_15_min_ts(
                            connection=curw_obs_connection,
                            start_time=obs_start,
                            id=obs2_hash_id)
                        if ts2 is not None and len(ts2) > 1:
                            obs_timeseries = fill_missing_values(
                                newly_extracted_timeseries=ts2,
                                OBS_TS=obs_timeseries)
                            if obs_timeseries is not None and len(
                                    obs_timeseries) > 0:
                                expected_start = obs_timeseries[-1][0]
                            else:
                                expected_start = obs_start
                            obs_timeseries.extend(
                                process_15_min_ts(
                                    newly_extracted_timeseries=ts2,
                                    expected_start=expected_start)[1:])
                            # obs_start = ts2[-1][0]

                        if obs3_station_id != str(-1):
                            obs3_hash_id = stations_dict_for_obs.get(
                                obs3_station_id)

                            ts3 = extract_obs_rain_15_min_ts(
                                connection=curw_obs_connection,
                                start_time=obs_start,
                                id=obs3_hash_id)
                            if ts3 is not None and len(ts3) > 1 and len(
                                    obs_timeseries) > 0:
                                obs_timeseries = fill_missing_values(
                                    newly_extracted_timeseries=ts3,
                                    OBS_TS=obs_timeseries)
                                if obs_timeseries is not None:
                                    expected_start = obs_timeseries[-1][0]
                                else:
                                    expected_start = obs_start
                                obs_timeseries.extend(
                                    process_15_min_ts(
                                        newly_extracted_timeseries=ts3,
                                        expected_start=expected_start)[1:])

            for i in range(len(obs_timeseries)):
                if obs_timeseries[i][1] == -99999:
                    obs_timeseries[i][1] = 0

            if obs_timeseries is not None and len(obs_timeseries) > 0:
                TS.insert_data(timeseries=obs_timeseries,
                               tms_id=tms_id,
                               upsert=True)
                TS.update_latest_obs(id_=tms_id,
                                     obs_end=(obs_timeseries[-1][1]))

    except Exception as e:
        traceback.print_exc()
        logger.error(
            "Exception occurred while updating obs rainfalls in curw_sim.")
    finally:
        curw_obs_connection.close()
        destroy_Pool(pool=curw_sim_pool)
        destroy_Pool(pool=curw_obs_pool)
        logger.info("Process finished")
示例#20
0
def update_rainfall_obs(flo2d_model, method, grid_interpolation, timestep,
                        start_time, end_time):
    """
    Update rainfall observations for flo2d models
    :param flo2d_model: flo2d model
    :param method: value interpolation method
    :param grid_interpolation: grid interpolation method
    :param timestep: output timeseries timestep
    :return:
    """

    obs_start = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')

    try:

        # Connect to the database
        curw_obs_pool = get_Pool(host=con_params.CURW_OBS_HOST,
                                 user=con_params.CURW_OBS_USERNAME,
                                 password=con_params.CURW_OBS_PASSWORD,
                                 port=con_params.CURW_OBS_PORT,
                                 db=con_params.CURW_OBS_DATABASE)

        curw_obs_connection = curw_obs_pool.connection()

        curw_sim_pool = get_Pool(host=con_params.CURW_SIM_HOST,
                                 user=con_params.CURW_SIM_USERNAME,
                                 password=con_params.CURW_SIM_PASSWORD,
                                 port=con_params.CURW_SIM_PORT,
                                 db=con_params.CURW_SIM_DATABASE)

        TS = Sim_Timeseries(pool=curw_sim_pool)

        # [hash_id, station_id, station_name, latitude, longitude]
        # active_obs_stations = read_csv(os.path.join(ROOT_DIR,'grids/obs_stations/rainfall/curw_active_rainfall_obs_stations.csv'))
        active_obs_stations = extract_active_curw_obs_rainfall_stations(
            start_time=start_time, end_time=end_time)[1:]
        flo2d_grids = read_csv(
            os.path.join(ROOT_DIR, 'grids/flo2d/{}m.csv'.format(
                flo2d_model)))  # [Grid_ ID, X(longitude), Y(latitude)]

        stations_dict_for_obs = {}  # keys: obs station id , value: hash id

        for obs_index in range(len(active_obs_stations)):
            stations_dict_for_obs[active_obs_stations[obs_index]
                                  [1]] = active_obs_stations[obs_index][0]

        # flo2d_obs_mapping = get_flo2d_cells_to_obs_grid_mappings(pool=curw_sim_pool, grid_interpolation=grid_interpolation, flo2d_model=flo2d_model)
        flo2d_obs_mapping = find_nearest_obs_stations_for_flo2d_stations(
            flo2d_stations_csv=os.path.join(
                ROOT_DIR, 'grids/flo2d/{}m.csv'.format(flo2d_model)),
            obs_stations=active_obs_stations,
            flo2d_model=flo2d_model)

        for flo2d_index in range(len(flo2d_grids)):
            lat = flo2d_grids[flo2d_index][2]
            lon = flo2d_grids[flo2d_index][1]
            cell_id = flo2d_grids[flo2d_index][0]
            meta_data = {
                'latitude':
                float('%.6f' % float(lat)),
                'longitude':
                float('%.6f' % float(lon)),
                'model':
                flo2d_model,
                'method':
                method,
                'grid_id':
                '{}_{}_{}'.format(flo2d_model, grid_interpolation,
                                  (str(cell_id)).zfill(10))
            }

            tms_id = TS.get_timeseries_id(grid_id=meta_data.get('grid_id'),
                                          method=meta_data.get('method'))

            if tms_id is None:
                tms_id = TS.generate_timeseries_id(meta_data=meta_data)
                meta_data['id'] = tms_id
                TS.insert_run(meta_data=meta_data)

            print("grid_id:", cell_id)
            print("grid map:", flo2d_obs_mapping.get(cell_id))
            obs1_station_id = flo2d_obs_mapping.get(cell_id)[0]
            obs2_station_id = flo2d_obs_mapping.get(cell_id)[1]
            obs3_station_id = flo2d_obs_mapping.get(cell_id)[2]

            obs_timeseries = []

            if timestep == 5:
                if obs1_station_id != str(-1):
                    obs1_hash_id = stations_dict_for_obs.get(obs1_station_id)

                    ts = extract_obs_rain_5_min_ts(
                        connection=curw_obs_connection,
                        start_time=obs_start,
                        id=obs1_hash_id,
                        end_time=end_time)

                    if ts is not None and len(ts) > 1:
                        obs_timeseries.extend(
                            process_5_min_ts(newly_extracted_timeseries=ts,
                                             expected_start=obs_start)[1:])
                        # obs_start = ts[-1][0]

                    if obs2_station_id != str(-1):
                        obs2_hash_id = stations_dict_for_obs.get(
                            obs2_station_id)

                        ts2 = extract_obs_rain_5_min_ts(
                            connection=curw_obs_connection,
                            start_time=obs_start,
                            id=obs2_hash_id,
                            end_time=end_time)
                        if ts2 is not None and len(ts2) > 1:
                            obs_timeseries = fill_missing_values(
                                newly_extracted_timeseries=ts2,
                                OBS_TS=obs_timeseries)
                            if obs_timeseries is not None and len(
                                    obs_timeseries) > 0:
                                expected_start = obs_timeseries[-1][0]
                            else:
                                expected_start = obs_start
                            obs_timeseries.extend(
                                process_5_min_ts(
                                    newly_extracted_timeseries=ts2,
                                    expected_start=expected_start)[1:])
                            # obs_start = ts2[-1][0]

                        if obs3_station_id != str(-1):
                            obs3_hash_id = stations_dict_for_obs.get(
                                obs3_station_id)

                            ts3 = extract_obs_rain_5_min_ts(
                                connection=curw_obs_connection,
                                start_time=obs_start,
                                id=obs3_hash_id,
                                end_time=end_time)
                            if ts3 is not None and len(ts3) > 1 and len(
                                    obs_timeseries) > 0:
                                obs_timeseries = fill_missing_values(
                                    newly_extracted_timeseries=ts3,
                                    OBS_TS=obs_timeseries)
                                if obs_timeseries is not None:
                                    expected_start = obs_timeseries[-1][0]
                                else:
                                    expected_start = obs_start
                                obs_timeseries.extend(
                                    process_5_min_ts(
                                        newly_extracted_timeseries=ts3,
                                        expected_start=expected_start)[1:])
            elif timestep == 15:
                if obs1_station_id != str(-1):
                    obs1_hash_id = stations_dict_for_obs.get(obs1_station_id)

                    ts = extract_obs_rain_15_min_ts(
                        connection=curw_obs_connection,
                        start_time=obs_start,
                        id=obs1_hash_id,
                        end_time=end_time)

                    if ts is not None and len(ts) > 1:
                        obs_timeseries.extend(
                            process_15_min_ts(newly_extracted_timeseries=ts,
                                              expected_start=obs_start)[1:])
                        # obs_start = ts[-1][0]

                    if obs2_station_id != str(-1):
                        obs2_hash_id = stations_dict_for_obs.get(
                            obs2_station_id)

                        ts2 = extract_obs_rain_15_min_ts(
                            connection=curw_obs_connection,
                            start_time=obs_start,
                            id=obs2_hash_id,
                            end_time=end_time)
                        if ts2 is not None and len(ts2) > 1:
                            obs_timeseries = fill_missing_values(
                                newly_extracted_timeseries=ts2,
                                OBS_TS=obs_timeseries)
                            if obs_timeseries is not None and len(
                                    obs_timeseries) > 0:
                                expected_start = obs_timeseries[-1][0]
                            else:
                                expected_start = obs_start
                            obs_timeseries.extend(
                                process_15_min_ts(
                                    newly_extracted_timeseries=ts2,
                                    expected_start=expected_start)[1:])
                            # obs_start = ts2[-1][0]

                        if obs3_station_id != str(-1):
                            obs3_hash_id = stations_dict_for_obs.get(
                                obs3_station_id)

                            ts3 = extract_obs_rain_15_min_ts(
                                connection=curw_obs_connection,
                                start_time=obs_start,
                                id=obs3_hash_id,
                                end_time=end_time)
                            if ts3 is not None and len(ts3) > 1 and len(
                                    obs_timeseries) > 0:
                                obs_timeseries = fill_missing_values(
                                    newly_extracted_timeseries=ts3,
                                    OBS_TS=obs_timeseries)
                                if obs_timeseries is not None:
                                    expected_start = obs_timeseries[-1][0]
                                else:
                                    expected_start = obs_start
                                obs_timeseries.extend(
                                    process_15_min_ts(
                                        newly_extracted_timeseries=ts3,
                                        expected_start=expected_start)[1:])

            for i in range(len(obs_timeseries)):
                if obs_timeseries[i][1] == -99999:
                    obs_timeseries[i][1] = 0

            print("### obs timeseries length ###", len(obs_timeseries))
            if obs_timeseries is not None and len(
                    obs_timeseries) > 0 and obs_timeseries[-1][0] != end_time:
                obs_timeseries.append(
                    [datetime.strptime(end_time, DATE_TIME_FORMAT), 0])

            final_ts = process_continuous_ts(original_ts=obs_timeseries,
                                             expected_start=datetime.strptime(
                                                 start_time, DATE_TIME_FORMAT),
                                             filling_value=0,
                                             timestep=timestep)

            if final_ts is not None and len(final_ts) > 0:
                TS.insert_data(timeseries=final_ts, tms_id=tms_id, upsert=True)
                TS.update_latest_obs(id_=tms_id, obs_end=(final_ts[-1][1]))

    except Exception as e:
        traceback.print_exc()
        logger.error(
            "Exception occurred while updating obs rainfalls in curw_sim.")
    finally:
        curw_obs_connection.close()
        destroy_Pool(pool=curw_sim_pool)
        destroy_Pool(pool=curw_obs_pool)
        logger.info("Process finished")
示例#21
0
def update_rainfall_obs(curw_obs_pool, curw_sim_pool, flo2d_model, method, grid_interpolation, timestep, start_time, end_time):

    """
    Update rainfall observations for flo2d models
    :param flo2d_model: flo2d model
    :param method: value interpolation method
    :param grid_interpolation: grid interpolation method
    :param timestep: output timeseries timestep
    :return:
    """

    # obs_start = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')

    try:

        curw_obs_connection = curw_obs_pool.connection()

        # [hash_id, station_id, station_name, latitude, longitude]
        # active_obs_stations = read_csv(os.path.join(ROOT_DIR,'grids/obs_stations/rainfall/curw_active_rainfall_obs_stations.csv'))
        active_obs_stations = extract_active_curw_obs_rainfall_stations(curw_obs_pool=curw_obs_pool, start_time=start_time, end_time=end_time)[1:]
        flo2d_grids = read_csv(os.path.join(ROOT_DIR,'grids/flo2d/{}m.csv'.format(flo2d_model)))  # [Grid_ ID, X(longitude), Y(latitude)]

        stations_dict_for_obs = { }  # keys: obs station id , value: hash id

        for obs_index in range(len(active_obs_stations)):
            stations_dict_for_obs[active_obs_stations[obs_index][1]] = active_obs_stations[obs_index][0]

        # flo2d_obs_mapping = get_flo2d_cells_to_obs_grid_mappings(pool=curw_sim_pool, grid_interpolation=grid_interpolation, flo2d_model=flo2d_model)
        flo2d_obs_mapping = find_nearest_obs_stations_for_flo2d_stations(
            flo2d_stations_csv=os.path.join(ROOT_DIR,'grids/flo2d/{}m.csv'.format(flo2d_model)),
            obs_stations=active_obs_stations, flo2d_model=flo2d_model)

        # retrieve observed timeseries
        obs_df = pd.DataFrame()
        obs_df['time'] = pd.date_range(start=start_time, end=end_time, freq='5min')

        for obs_id in stations_dict_for_obs.keys():
            ts = extract_obs_rain_5_min_ts(connection=curw_obs_connection, start_time=start_time,
                                           id=stations_dict_for_obs.get(obs_id), end_time=end_time)
            ts.insert(0, ['time', obs_id])
            ts_df = list_of_lists_to_df_first_row_as_columns(ts)
            ts_df[obs_id] = ts_df[obs_id].astype('float64')

            obs_df = pd.merge(obs_df, ts_df, how="left", on='time')

        obs_df.set_index('time', inplace=True)
        obs_df['0'] = 0
        if timestep == 15:
            obs_df = obs_df.resample('15min', label='right', closed='right').sum()

        TS = Sim_Timeseries(pool=curw_sim_pool)

        for flo2d_index in range(len(flo2d_grids)):
            lat = flo2d_grids[flo2d_index][2]
            lon = flo2d_grids[flo2d_index][1]
            cell_id = flo2d_grids[flo2d_index][0]
            meta_data = {
                    'latitude': float('%.6f' % float(lat)), 'longitude': float('%.6f' % float(lon)),
                    'model': flo2d_model, 'method': method,
                    'grid_id': '{}_{}_{}'.format(flo2d_model, grid_interpolation, (str(cell_id)).zfill(10))
                    }

            tms_id = TS.get_timeseries_id(grid_id=meta_data.get('grid_id'), method=meta_data.get('method'))

            if tms_id is None:
                tms_id = TS.generate_timeseries_id(meta_data=meta_data)
                meta_data['id'] = tms_id
                TS.insert_run(meta_data=meta_data)

            print(datetime.now().strftime(DATE_TIME_FORMAT))
            print("grid_id:", cell_id)
            obs_station_ids = flo2d_obs_mapping.get(cell_id)

            if len(obs_station_ids) == 1:
                obs_ts_df = obs_df[obs_station_ids].to_frame(name='final')
            elif len(obs_station_ids) == 2:
                obs_ts_df = obs_df[obs_station_ids]
                obs_ts_df[obs_station_ids[0]] = obs_ts_df[obs_station_ids[0]].fillna(obs_ts_df[obs_station_ids[1]])
                obs_ts_df['final'] = obs_ts_df[obs_station_ids[0]]
            elif len(obs_station_ids) == 3:
                obs_ts_df = obs_df[obs_station_ids]
                obs_ts_df[obs_station_ids[1]] = obs_ts_df[obs_station_ids[1]].fillna(obs_ts_df[obs_station_ids[2]])
                obs_ts_df[obs_station_ids[0]] = obs_ts_df[obs_station_ids[0]].fillna(obs_ts_df[obs_station_ids[1]])
                obs_ts_df['final'] = obs_ts_df[obs_station_ids[0]]
            else:
                obs_ts_df = obs_df['0'].to_frame(name='final')

            final_ts_df = obs_ts_df['final'].reset_index()
            final_ts_df['time'] = final_ts_df['time'].dt.strftime(DATE_TIME_FORMAT)
            final_ts = final_ts_df.values.tolist()

            if final_ts is not None and len(final_ts) > 0:
                TS.replace_data(timeseries=final_ts, tms_id=tms_id)
                TS.update_latest_obs(id_=tms_id, obs_end=(final_ts[-1][1]))

    except Exception as e:
        traceback.print_exc()
        logger.error("Exception occurred while updating obs rainfalls in curw_sim.")
    finally:
        curw_obs_connection.close()
        destroy_Pool(pool=curw_sim_pool)
        destroy_Pool(pool=curw_obs_pool)
        logger.info("Process finished")
示例#22
0
def extract_distrubuted_hechms_outputs(output_file_name, output_dir, run_date,
                                       run_time):
    """
    Config.json 
    {
      "output_file_name": "DailyDischarge.csv",
      "output_dir": "",

      "run_date": "2019-05-24",
      "run_time": "00:00:00",
      "utc_offset": "",

      "sim_tag": "hourly_run",

      "model": "HECHMS",
      "version": "single",

      "unit": "m3/s",
      "unit_type": "Instantaneous",

      "variable": "Discharge",

      "station_name": "Hanwella"
    }

    """
    try:

        config = json.loads(open('config.json').read())

        # output related details
        output_file_name = output_file_name
        output_dir = output_dir

        run_date = run_date
        run_time = run_time

        utc_offset = read_attribute_from_config_file('utc_offset', config,
                                                     False)
        if utc_offset is None:
            utc_offset = ''

        # sim tag
        sim_tag = read_attribute_from_config_file('sim_tag', config, True)

        # source details
        model = read_attribute_from_config_file('model', config, True)
        version = read_attribute_from_config_file('version', config, True)

        # unit details
        unit = read_attribute_from_config_file('unit', config, True)
        unit_type = UnitType.getType(
            read_attribute_from_config_file('unit_type', config, True))

        # variable details
        variable = read_attribute_from_config_file('variable', config, True)

        # station details
        station_name = read_attribute_from_config_file('station_name', config,
                                                       True)

        out_file_path = os.path.join(output_dir, output_file_name)

        if not os.path.exists(out_file_path):
            msg = 'no file :: {}'.format(out_file_path)
            logger.warning(msg)
            print(msg)
            exit(1)

        fgt = get_file_last_modified_time(out_file_path)
        print("fgt, ", fgt)

        timeseries = read_csv(out_file_path)

        pool = get_Pool(host=CURW_FCST_HOST,
                        port=CURW_FCST_PORT,
                        db=CURW_FCST_DATABASE,
                        user=CURW_FCST_USERNAME,
                        password=CURW_FCST_PASSWORD)

        hechms_stations = get_hechms_stations(pool=pool)

        station_id = hechms_stations.get(station_name)[0]
        lat = str(hechms_stations.get(station_name)[1])
        lon = str(hechms_stations.get(station_name)[2])

        source_id = get_source_id(pool=pool, model=model, version=version)

        variable_id = get_variable_id(pool=pool, variable=variable)

        unit_id = get_unit_id(pool=pool, unit=unit, unit_type=unit_type)

        tms_meta = {
            'sim_tag': sim_tag,
            'model': model,
            'version': version,
            'variable': variable,
            'unit': unit,
            'unit_type': unit_type.value,
            'latitude': lat,
            'longitude': lon,
            'station_id': station_id,
            'source_id': source_id,
            'variable_id': variable_id,
            'unit_id': unit_id
        }

        utcOffset = getUTCOffset(utc_offset, default=True)

        if utcOffset != timedelta():
            tms_meta['utcOffset'] = utcOffset

        # Push timeseries to database
        save_forecast_timeseries_to_db(pool=pool,
                                       timeseries=timeseries,
                                       run_date=run_date,
                                       run_time=run_time,
                                       tms_meta=tms_meta,
                                       fgt=fgt)

    except Exception as e:
        logger.error('JSON config data loading error.')
        print('JSON config data loading error.')
        traceback.print_exc()
    finally:
        logger.info("Process finished.")
        print("Process finished.")
示例#23
0
                # Save Forecast values into Database
                opts = {'elementNo': elementNo, 'tms_meta': tms_meta}
                if utcOffset != timedelta():
                    opts['utcOffset'] = utcOffset

                # Push timeseries to database
                save_forecast_timeseries_to_db(
                    pool=pool,
                    timeseries=waterLevelSeriesDict[elementNo],
                    run_date=run_date,
                    run_time=run_time,
                    opts=opts,
                    flo2d_stations=flo2d_stations,
                    fgt=fgt)

        run_info = json.loads(
            open(
                os.path.join(os.path.dirname(hychan_out_file_path),
                             "run_meta.json")).read())
        insert_run_metadata(pool=pool,
                            source_id=source_id,
                            variable_id=variable_id,
                            sim_tag=sim_tag,
                            fgt=fgt,
                            metadata=run_info,
                            template_path=template_path)
    except Exception as e:
        traceback.print_exc()
    finally:
        logger.info("Process finished.")
        print("Process finished.")
示例#24
0
def upload_waterlevels(dir_path, ts_start_date, ts_start_time, run_date, run_time):
    """
    Config.json
    {
      "HYCHAN_OUT_FILE": "HYCHAN.OUT",
      "TIMDEP_FILE": "TIMDEP.OUT",
      "output_dir": "",

      "run_date": "2019-05-24",
      "run_time": "",
      "ts_start_date": "",
      "ts_start_time": "",
      "utc_offset": "",

      "sim_tag": "",

      "model": "WRF",
      "version": "v3",

      "unit": "mm",
      "unit_type": "Accumulative",

      "variable": "Precipitation"
    }

    """
    try:
        config_path = os.path.join(os.getcwd(), 'extract', 'config_curw_fcst.json')
        config = json.loads(open(config_path).read())

        # flo2D related details
        HYCHAN_OUT_FILE = read_attribute_from_config_file('HYCHAN_OUT_FILE', config, True)
        TIMDEP_FILE = read_attribute_from_config_file('TIMDEP_FILE', config, True)
        output_dir = dir_path

        run_date = run_date
        run_time = run_time
        ts_start_date = ts_start_date
        ts_start_time = ts_start_time
        utc_offset = read_attribute_from_config_file('utc_offset', config, False)
        if utc_offset is None:
            utc_offset = ''

        # sim tag
        sim_tag = read_attribute_from_config_file('sim_tag', config, True)

        # source details
        model = read_attribute_from_config_file('model', config, True)
        version = read_attribute_from_config_file('version', config, True)

        # unit details
        unit = read_attribute_from_config_file('unit', config, True)
        unit_type = UnitType.getType(read_attribute_from_config_file('unit_type', config, True))

        # variable details
        variable = read_attribute_from_config_file('variable', config, True)

        hychan_out_file_path = os.path.join(output_dir, HYCHAN_OUT_FILE)
        timdep_file_path = os.path.join(output_dir, TIMDEP_FILE)

        pool = get_Pool(host=CURW_FCST_HOST, port=CURW_FCST_PORT, db=CURW_FCST_DATABASE, user=CURW_FCST_USERNAME,
                        password=CURW_FCST_PASSWORD)

        # pool = get_Pool(host=HOST, port=PORT, user=USERNAME, password=PASSWORD, db=DATABASE)

        flo2d_model_name = '{}_{}'.format(model, version)

        flo2d_source = json.loads(get_source_parameters(pool=pool, model=model, version=version))

        print("############### source ############", flo2d_source)

        flo2d_stations = get_flo2d_output_stations(pool=pool, flo2d_model=StationEnum.getType(flo2d_model_name))

        print("############### 1st occurrence ############", flo2d_stations)

        source_id = get_source_id(pool=pool, model=model, version=version)

        variable_id = get_variable_id(pool=pool, variable=variable)

        unit_id = get_unit_id(pool=pool, unit=unit, unit_type=unit_type)

        tms_meta = {
            'sim_tag': sim_tag,
            'model': model,
            'version': version,
            'variable': variable,
            'unit': unit,
            'unit_type': unit_type.value,
            'source_id': source_id,
            'variable_id': variable_id,
            'unit_id': unit_id
        }

        CHANNEL_CELL_MAP = flo2d_source["CHANNEL_CELL_MAP"]

        FLOOD_PLAIN_CELL_MAP = flo2d_source["FLOOD_PLAIN_CELL_MAP"]

        ELEMENT_NUMBERS = CHANNEL_CELL_MAP.keys()
        FLOOD_ELEMENT_NUMBERS = FLOOD_PLAIN_CELL_MAP.keys()
        SERIES_LENGTH = 0
        MISSING_VALUE = -999

        utcOffset = getUTCOffset(utc_offset, default=True)

        print('Extract Water Level Result of FLO2D on', run_date, '@', run_time, 'with Base time of', ts_start_date,
              '@', ts_start_time)

        # Check HYCHAN.OUT file exists
        if not os.path.exists(hychan_out_file_path):
            print('Unable to find file : ', hychan_out_file_path)
            traceback.print_exc()

        #####################################
        # Calculate the size of time series #
        #####################################
        bufsize = 65536
        with open(hychan_out_file_path) as infile:
            isWaterLevelLines = False
            isCounting = False
            countSeriesSize = 0  # HACK: When it comes to the end of file, unable to detect end of time series
            while True:
                lines = infile.readlines(bufsize)
                if not lines or SERIES_LENGTH:
                    break
                for line in lines:
                    if line.startswith('CHANNEL HYDROGRAPH FOR ELEMENT NO:', 5):
                        isWaterLevelLines = True
                    elif isWaterLevelLines:
                        cols = line.split()
                        if len(cols) > 0 and cols[0].replace('.', '', 1).isdigit():
                            countSeriesSize += 1
                            isCounting = True
                        elif isWaterLevelLines and isCounting:
                            SERIES_LENGTH = countSeriesSize
                            break

        print('Series Length is :', SERIES_LENGTH)
        bufsize = 65536
        #################################################################
        # Extract Channel Water Level elevations from HYCHAN.OUT file   #
        #################################################################
        print('Extract Channel Water Level Result of FLO2D (HYCHAN.OUT) on', run_date, '@', run_time,
              'with Base time of',
              ts_start_date, '@', ts_start_time)
        with open(hychan_out_file_path) as infile:
            isWaterLevelLines = False
            isSeriesComplete = False
            waterLevelLines = []
            seriesSize = 0  # HACK: When it comes to the end of file, unable to detect end of time series
            while True:
                lines = infile.readlines(bufsize)
                if not lines:
                    break
                for line in lines:
                    if line.startswith('CHANNEL HYDROGRAPH FOR ELEMENT NO:', 5):
                        seriesSize = 0
                        elementNo = line.split()[5]

                        if elementNo in ELEMENT_NUMBERS:
                            isWaterLevelLines = True
                            waterLevelLines.append(line)
                        else:
                            isWaterLevelLines = False

                    elif isWaterLevelLines:
                        cols = line.split()
                        if len(cols) > 0 and isfloat(cols[0]):
                            seriesSize += 1
                            waterLevelLines.append(line)

                            if seriesSize == SERIES_LENGTH:
                                isSeriesComplete = True

                    if isSeriesComplete:
                        baseTime = datetime.strptime('%s %s' % (ts_start_date, ts_start_time), '%Y-%m-%d %H:%M:%S')
                        timeseries = []
                        elementNo = waterLevelLines[0].split()[5]
                        # print('Extracted Cell No', elementNo, CHANNEL_CELL_MAP[elementNo])
                        for ts in waterLevelLines[1:]:
                            v = ts.split()
                            if len(v) < 1:
                                continue
                            # Get flood level (Elevation)
                            value = v[1]
                            # Get flood depth (Depth)
                            # value = v[2]
                            if not isfloat(value):
                                value = MISSING_VALUE
                                continue  # If value is not present, skip
                            if value == 'NaN':
                                continue  # If value is NaN, skip
                            timeStep = float(v[0])
                            currentStepTime = baseTime + timedelta(hours=timeStep)
                            dateAndTime = currentStepTime.strftime("%Y-%m-%d %H:%M:%S")
                            timeseries.append([dateAndTime, value])

                        # Save Forecast values into Database
                        opts = {
                            'elementNo': elementNo,
                            'tms_meta': tms_meta
                        }
                        # print('>>>>>', opts)
                        if utcOffset != timedelta():
                            opts['utcOffset'] = utcOffset

                        # Push timeseries to database
                        save_forecast_timeseries_to_db(pool=pool, timeseries=timeseries,
                                                       run_date=run_date, run_time=run_time, opts=opts,
                                                       flo2d_stations=flo2d_stations)

                        isWaterLevelLines = False
                        isSeriesComplete = False
                        waterLevelLines = []
                # -- END for loop
            # -- END while loop

        #################################################################
        # Extract Flood Plain water elevations from TIMEDEP.OUT file    #
        #################################################################

        if not os.path.exists(timdep_file_path):
            print('Unable to find file : ', timdep_file_path)
            traceback.print_exc()

        print('Extract Flood Plain Water Level Result of FLO2D (TIMEDEP.OUT) on', run_date, '@', run_time,
              'with Base time of', ts_start_date,
              '@', ts_start_time)

        with open(timdep_file_path) as infile:
            waterLevelLines = []
            waterLevelSeriesDict = dict.fromkeys(FLOOD_ELEMENT_NUMBERS, [])
            while True:
                lines = infile.readlines(bufsize)
                if not lines:
                    break
                for line in lines:
                    if len(line.split()) == 1:
                        # continue
                        if len(waterLevelLines) > 0:
                            waterLevels = get_water_level_of_channels(waterLevelLines, FLOOD_ELEMENT_NUMBERS)

                            # Get Time stamp Ref:http://stackoverflow.com/a/13685221/1461060
                            # print('waterLevelLines[0].split() : ', waterLevelLines[0].split())
                            ModelTime = float(waterLevelLines[0].split()[0])
                            baseTime = datetime.strptime('%s %s' % (ts_start_date, ts_start_time), '%Y-%m-%d %H:%M:%S')
                            currentStepTime = baseTime + timedelta(hours=ModelTime)
                            dateAndTime = currentStepTime.strftime("%Y-%m-%d %H:%M:%S")

                            for elementNo in FLOOD_ELEMENT_NUMBERS:
                                tmpTS = waterLevelSeriesDict[elementNo][:]
                                if elementNo in waterLevels:
                                    tmpTS.append([dateAndTime, waterLevels[elementNo]])
                                else:
                                    tmpTS.append([dateAndTime, MISSING_VALUE])
                                waterLevelSeriesDict[elementNo] = tmpTS

                            isWaterLevelLines = False
                            # for l in waterLevelLines :
                            # print(l)
                            waterLevelLines = []
                    waterLevelLines.append(line)

            # print('len(FLOOD_ELEMENT_NUMBERS) : ', len(FLOOD_ELEMENT_NUMBERS))
            for elementNo in FLOOD_ELEMENT_NUMBERS:

                # Save Forecast values into Database
                opts = {
                    'elementNo': elementNo,
                    'tms_meta': tms_meta
                }
                if utcOffset != timedelta():
                    opts['utcOffset'] = utcOffset

                # Push timeseries to database
                save_forecast_timeseries_to_db(pool=pool, timeseries=waterLevelSeriesDict[elementNo],
                                               run_date=run_date, run_time=run_time, opts=opts,
                                               flo2d_stations=flo2d_stations)

    except Exception as e:
        logger.error('JSON config data loading error.')
        print('JSON config data loading error.')
        traceback.print_exc()
    finally:
        logger.info("Process finished.")
        print("Process finished.")