Esempio n. 1
0
def create_outflow_old(dir_path, ts_start, ts_end):
    outflow_file_path = os.path.join(dir_path, 'OUTFLOW.DAT')
    init_tidal_path = os.path.join(os.getcwd(), 'outflow', 'INITTIDAL.CONF')
    config_path = os.path.join(os.getcwd(), 'outflow', 'config_old.json')
    print('create_outflow_old|outflow_file_path : ', outflow_file_path)
    print('create_outflow_old|init_tidal_path : ', init_tidal_path)
    print('create_outflow_old|config_path : ', config_path)
    with open(config_path) as json_file:
        config = json.load(json_file)
        adapter = MySQLAdapter(host=config['db_host'],
                               user=config['db_user'],
                               password=config['db_password'],
                               db=config['db_name'])
        opts = {
            'from': ts_start,
            'to': ts_end,
        }
        tidal_timeseries = get_forecast_timeseries(adapter, TIDAL_FORECAST_ID,
                                                   opts)
        if len(tidal_timeseries) > 0:
            print('tidal_timeseries::', len(tidal_timeseries),
                  tidal_timeseries[0], tidal_timeseries[-1])
        else:
            print('No data found for tidal timeseries: ', tidal_timeseries)
            sys.exit(1)
        adapter.close()
        print('Open FLO2D OUTFLOW ::', outflow_file_path)
        outflow_file = open(outflow_file_path, 'w')
        lines = []

        print('Reading INIT TIDAL CONF...')
        with open(init_tidal_path) as initTidalConfFile:
            initTidalLevels = initTidalConfFile.readlines()
            for initTidalLevel in initTidalLevels:
                if len(initTidalLevel.split()):  # Check if not empty line
                    lines.append(initTidalLevel)
                    if initTidalLevel[0] == 'N':
                        lines.append('{0} {1:{w}} {2:{w}}\n'.format(
                            'S', 0, 0, w=DAT_WIDTH))
                        base_date_time = datetime.strptime(
                            ts_start, '%Y-%m-%d %H:%M:%S')
                        for step in tidal_timeseries:
                            hours_so_far = (step[0] - base_date_time)
                            hours_so_far = 24 * hours_so_far.days + hours_so_far.seconds / (
                                60 * 60)
                            lines.append('{0} {1:{w}} {2:{w}{b}}\n'.format(
                                'S',
                                int(hours_so_far),
                                float(step[1]),
                                b='.2f',
                                w=DAT_WIDTH))
        outflow_file.writelines(lines)
        outflow_file.close()
        print('Finished writing OUTFLOW.DAT')
Esempio n. 2
0
    def setUpClass(cls):
        try:
            cls.root_dir = os.path.dirname(os.path.realpath(__file__))
            config = json.loads(open(pjoin(cls.root_dir, '../../config/CONFIG.json')).read())

            # Initialize Logger
            logging_config = json.loads(open(pjoin(cls.root_dir, '../../config/LOGGING_CONFIG.json')).read())
            logging.config.dictConfig(logging_config)
            cls.logger = logging.getLogger('MySQLAdapterTest')
            cls.logger.addHandler(logging.StreamHandler())
            cls.logger.info('setUpClass')

            MYSQL_HOST = "localhost"
            MYSQL_USER = "******"
            MYSQL_DB = "curw"
            MYSQL_PASSWORD = ""

            if 'MYSQL_HOST' in config:
                MYSQL_HOST = config['MYSQL_HOST']
            if 'MYSQL_USER' in config:
                MYSQL_USER = config['MYSQL_USER']
            if 'MYSQL_DB' in config:
                MYSQL_DB = config['MYSQL_DB']
            if 'MYSQL_PASSWORD' in config:
                MYSQL_PASSWORD = config['MYSQL_PASSWORD']

            cls.adapter = MySQLAdapter(host=MYSQL_HOST, user=MYSQL_USER, password=MYSQL_PASSWORD, db=MYSQL_DB)
            cls.eventIds = []
            cls.run_start_date = datetime.datetime(2018, 1, 2, 12, 0, 0)
            cls.run_end_date = datetime.datetime(2018, 1, 2, 16, 0, 0)
        except Exception as e:
            traceback.print_exc()
Esempio n. 3
0
def get_curw_adapter(mysql_config=None, mysql_config_path=None):
    if mysql_config_path is None:
        mysql_config_path = res_mgr.get_resource_path(
            'config/mysql_config.json')

    with open(mysql_config_path) as data_file:
        config = json.load(data_file)

    if mysql_config is not None and isinstance(mysql_config, dict):
        config.update(mysql_config)

    return MySQLAdapter(**config)
Esempio n. 4
0
def upload_waterlevels_curw(dir_path, ts_start_date, ts_start_time, run_date,
                            run_time):
    print(
        'upload_waterlevels_curw|[ts_start_date, ts_start_time, run_date, run_time] : ',
        [ts_start_date, ts_start_time, run_date, run_time])
    SERIES_LENGTH = 0
    MISSING_VALUE = -999

    try:
        config_path = os.path.join(os.getcwd(), 'extract', 'config.json')
        # print('config_path : ', config_path)
        utc_offset = ''
        with open(config_path) as json_file:
            config_data = json.load(json_file)
            output_dir = dir_path
            HYCHAN_OUT_FILE = config_data['hychan_out_file']
            TIMDEP_FILE = config_data['timdep_out_file']
            FLO2D_MODEL = config_data['flo2d_model']
            RUN_NAME = config_data['run_name']
            hychan_out_file_path = os.path.join(dir_path, HYCHAN_OUT_FILE)
            timdep_file_path = os.path.join(dir_path, TIMDEP_FILE)
            # print('hychan_out_file_path : ', hychan_out_file_path)
            # print('timdep_file_path : ', timdep_file_path)
            forceInsert = True
            MYSQL_HOST = config_data['db_host']
            MYSQL_USER = config_data['db_user']
            MYSQL_DB = config_data['db_name']
            MYSQL_PASSWORD = config_data['db_password']
            # if 'UTC_OFFSET' in config_data and len(
            #         config_data['UTC_OFFSET']):  # Use FLO2D Config file data, if available
            #     UTC_OFFSET = config_data['UTC_OFFSET']
            # if utc_offset:
            #     UTC_OFFSET = config_data
            utcOffset = getUTCOffset('', default=True)
            adapter = MySQLAdapter(host=MYSQL_HOST,
                                   user=MYSQL_USER,
                                   password=MYSQL_PASSWORD,
                                   db=MYSQL_DB)

            flo2d_source = adapter.get_source(name=FLO2D_MODEL)
            try:
                flo2d_source = json.loads(flo2d_source.get('parameters', "{}"))
                CHANNEL_CELL_MAP = {}
                if 'CHANNEL_CELL_MAP' in flo2d_source:
                    CHANNEL_CELL_MAP = flo2d_source['CHANNEL_CELL_MAP']
                FLOOD_PLAIN_CELL_MAP = {}
                if 'FLOOD_PLAIN_CELL_MAP' in flo2d_source:
                    FLOOD_PLAIN_CELL_MAP = flo2d_source['FLOOD_PLAIN_CELL_MAP']
                ELEMENT_NUMBERS = CHANNEL_CELL_MAP.keys()
                FLOOD_ELEMENT_NUMBERS = FLOOD_PLAIN_CELL_MAP.keys()
                # Calculate the size of time series
                bufsize = 65536
                with open(hychan_out_file_path) as infile:
                    isWaterLevelLines = False
                    isCounting = False
                    countSeriesSize = 0  # HACK: When it comes to the end of file, unable to detect end of time series
                    while True:
                        lines = infile.readlines(bufsize)
                        if not lines or SERIES_LENGTH:
                            break
                        for line in lines:
                            if line.startswith(
                                    'CHANNEL HYDROGRAPH FOR ELEMENT NO:', 5):
                                isWaterLevelLines = True
                            elif isWaterLevelLines:
                                cols = line.split()
                                if len(cols) > 0 and cols[0].replace(
                                        '.', '', 1).isdigit():
                                    countSeriesSize += 1
                                    isCounting = True
                                elif isWaterLevelLines and isCounting:
                                    SERIES_LENGTH = countSeriesSize
                                    break

                # print('Series Length is :', SERIES_LENGTH)
                bufsize = 65536
                #################################################################
                # Extract Channel Water Level elevations from HYCHAN.OUT file   #
                #################################################################
                with open(hychan_out_file_path) as infile:
                    isWaterLevelLines = False
                    isSeriesComplete = False
                    waterLevelLines = []
                    seriesSize = 0  # HACK: When it comes to the end of file, unable to detect end of time series
                    while True:
                        lines = infile.readlines(bufsize)
                        if not lines:
                            break
                        for line in lines:
                            if line.startswith(
                                    'CHANNEL HYDROGRAPH FOR ELEMENT NO:', 5):
                                seriesSize = 0
                                elementNo = line.split()[5]

                                if elementNo in ELEMENT_NUMBERS:
                                    isWaterLevelLines = True
                                    waterLevelLines.append(line)
                                else:
                                    isWaterLevelLines = False

                            elif isWaterLevelLines:
                                cols = line.split()
                                if len(cols) > 0 and isfloat(cols[0]):
                                    seriesSize += 1
                                    waterLevelLines.append(line)

                                    if seriesSize == SERIES_LENGTH:
                                        isSeriesComplete = True

                            if isSeriesComplete:
                                baseTime = datetime.strptime(
                                    '%s %s' % (ts_start_date, ts_start_time),
                                    '%Y-%m-%d %H:%M:%S')
                                timeseries = []
                                elementNo = waterLevelLines[0].split()[5]
                                # print('Extracted Cell No', elementNo, CHANNEL_CELL_MAP[elementNo])
                                for ts in waterLevelLines[1:]:
                                    v = ts.split()
                                    if len(v) < 1:
                                        continue
                                    # Get flood level (Elevation)
                                    value = v[1]
                                    # Get flood depth (Depth)
                                    # value = v[2]
                                    if not isfloat(value):
                                        value = MISSING_VALUE
                                        continue  # If value is not present, skip
                                    if value == 'NaN':
                                        continue  # If value is NaN, skip
                                    timeStep = float(v[0])
                                    currentStepTime = baseTime + timedelta(
                                        hours=timeStep)
                                    dateAndTime = currentStepTime.strftime(
                                        "%Y-%m-%d %H:%M:%S")
                                    timeseries.append([dateAndTime, value])
                                # Save Forecast values into Database
                                opts = {
                                    'forceInsert': forceInsert,
                                    'station': CHANNEL_CELL_MAP[elementNo],
                                    'run_name': RUN_NAME
                                }
                                # print('>>>>>', opts)
                                if utcOffset != timedelta():
                                    opts['utcOffset'] = utcOffset

                                # folder_path_ts = os.path.join(dir_path, 'time_series')
                                # if not os.path.exists(folder_path_ts):
                                #     try:
                                #         os.makedirs(folder_path_ts)
                                #     except OSError as e:
                                #         print(str(e))
                                # file_path = os.path.join(folder_path_ts, 'time_series_' + elementNo + '.txt')
                                # with open(file_path, 'w') as f:
                                #     for item in timeseries:
                                #         f.write("%s\n" % item)
                                save_forecast_timeseries(
                                    adapter, timeseries, run_date, run_time,
                                    opts)

                                isWaterLevelLines = False
                                isSeriesComplete = False
                                waterLevelLines = []
                        # -- END for loop
                    # -- END while loop

                #################################################################
                # Extract Flood Plain water elevations from BASE.OUT file       #
                #################################################################
                with open(timdep_file_path) as infile:
                    waterLevelLines = []
                    waterLevelSeriesDict = dict.fromkeys(
                        FLOOD_ELEMENT_NUMBERS, [])
                    while True:
                        lines = infile.readlines(bufsize)
                        if not lines:
                            break
                        for line in lines:
                            if len(line.split()) == 1:
                                if len(waterLevelLines) > 0:
                                    waterLevels = get_water_level_of_channels(
                                        waterLevelLines, FLOOD_ELEMENT_NUMBERS)
                                    # Get Time stamp Ref:http://stackoverflow.com/a/13685221/1461060
                                    # print(waterLevelLines[0].split())
                                    ModelTime = float(
                                        waterLevelLines[0].split()[0])
                                    baseTime = datetime.strptime(
                                        '%s %s' %
                                        (ts_start_date, ts_start_time),
                                        '%Y-%m-%d %H:%M:%S')
                                    currentStepTime = baseTime + timedelta(
                                        hours=ModelTime)
                                    dateAndTime = currentStepTime.strftime(
                                        "%Y-%m-%d %H:%M:%S")

                                    for elementNo in FLOOD_ELEMENT_NUMBERS:
                                        tmpTS = waterLevelSeriesDict[
                                            elementNo][:]
                                        if elementNo in waterLevels:
                                            tmpTS.append([
                                                dateAndTime,
                                                waterLevels[elementNo]
                                            ])
                                        else:
                                            tmpTS.append(
                                                [dateAndTime, MISSING_VALUE])
                                        waterLevelSeriesDict[elementNo] = tmpTS

                                    isWaterLevelLines = False
                                    # for l in waterLevelLines :
                                    # #print(l)
                                    waterLevelLines = []
                            waterLevelLines.append(line)
                    for elementNo in FLOOD_ELEMENT_NUMBERS:
                        opts = {
                            'forceInsert': forceInsert,
                            'station': FLOOD_PLAIN_CELL_MAP[elementNo],
                            'run_name': RUN_NAME,
                            'source': FLO2D_MODEL
                        }
                        if utcOffset != timedelta():
                            opts['utcOffset'] = utcOffset
                        # folder_path_ts = os.path.join(dir_path, 'time_series')
                        # if not os.path.exists(folder_path_ts):
                        #     try:
                        #         os.makedirs(folder_path_ts)
                        #     except OSError as e:
                        #         print(str(e))
                        # file_path = os.path.join(folder_path_ts, 'time_series_'+elementNo+'.txt')
                        # with open(file_path, 'w') as f:
                        #     for item in waterLevelSeriesDict[elementNo]:
                        #         f.write("%s\n" % item)
                        save_forecast_timeseries(
                            adapter, waterLevelSeriesDict[elementNo], run_date,
                            run_time, opts)
                        # print('Extracted Cell No', elementNo, FLOOD_PLAIN_CELL_MAP[elementNo])
            except Exception as ex:
                print('source data loading exception:', str(ex))
    except Exception as e:
        print('config reading exception:', str(e))
Esempio n. 5
0
            fcst_db_configs = config['fcst_db_configs']
        else:
            log.error('fcst_db_configs data not found')
        klb_observed_stations = config['klb_obs_stations']
        kub_observed_stations = config['kub_obs_stations']

        kub_basin_extent = config['KELANI_UPPER_BASIN_EXTENT']
        klb_basin_extent = config['KELANI_LOWER_BASIN_EXTENT']

        kelani_lower_basin_shp = get_resource_path('extraction/shp/klb-wgs84/klb-wgs84.shp')
        kelani_upper_basin_shp = get_resource_path('extraction/shp/kub-wgs84/kub-wgs84.shp')
        hourly_csv_file_dir = os.path.join(wrf_data_dir, run_date, run_time)
        create_dir_if_not_exists(hourly_csv_file_dir)
        raincsv_file_path = os.path.join(hourly_csv_file_dir, 'DailyRain.csv')
        if not os.path.isfile(raincsv_file_path):
            curw_adapter = MySQLAdapter(host=curw_db_config['host'], user=curw_db_config['user'],
                                        password=curw_db_config['password'], db=curw_db_config['db'])

            # fcst_adapter = MySQLAdapter(host=fcst_db_configs['host'], user=fcst_db_configs['user'],
            #                             password=fcst_db_configs['password'], db=fcst_db_configs['db'])

            obs_start = ts_start_datetime.strftime('%Y-%m-%d %H:%M:%S')
            obs_end = run_datetime.strftime('%Y-%m-%d %H:%M:%S')
            print('[obs_start, obs_end] : ', [obs_start, obs_end])

            fcst_start = obs_end
            fcst_end = ts_end_datetime.strftime('%Y-%m-%d %H:%M:%S')
            print('[fcst_start, fcst_end] : ', [fcst_start, fcst_end])
            forecast_duration = int((datetime.strptime(fcst_end, '%Y-%m-%d %H:%M:%S') - datetime.strptime(
                fcst_start, '%Y-%m-%d %H:%M:%S')).total_seconds() / (60 * 60))

            obs_kub_mean_df = get_observed_kub_mean(curw_adapter, kub_observed_stations, obs_start, obs_end)
Esempio n. 6
0
        print('WARNING: Force Insert enabled')

    WU_DATA = json.loads(open(WU_CONFIG).read())

    stations = WU_DATA['stations']

    metaData = {
        'station': 'Hanwella',
        'variable': 'Precipitation',
        'unit': 'mm',
        'type': 'Observed',
        'source': 'WeatherStation',
        'name': 'WUnderground',
    }
    adapter = MySQLAdapter(host=MYSQL_HOST,
                           user=MYSQL_USER,
                           password=MYSQL_PASSWORD,
                           db=MYSQL_DB)

    for station in stations:
        print('station:', station)
        WUndergroundMeta, *timeseries = getTimeseries(
            BASE_URL, station['stationId'], now)  # List Destructuring
        DateUTCIndex = WUndergroundMeta.index('DateUTC')

        if (len(timeseries) < 1):
            print('INFO: Timeseries doesn\'t have any data on :',
                  now.strftime("%Y-%m-%d"), timeseries)
            continue
        print('Start Date :', timeseries[0][0])
        print('End Date :', timeseries[-1][0])
        startDateTime = datetime.datetime.strptime(timeseries[0][0],
Esempio n. 7
0
                         month=data_array[1],
                         day=data_array[2],
                         hour=data_array[3])
    return timestamp.strftime(Common_DateTime_Format)


def prepare_timeseries(df):
    timeseries = []
    for index, row in df.iterrows():
        timeseries.append([row['timestamp'], row['value']])
    return timeseries


# Create database connections.
mysql_adapter = MySQLAdapter(host=DB_CONFIG['MYSQL_HOST'],
                             user=DB_CONFIG['MYSQL_USER'],
                             password=DB_CONFIG['MYSQL_PASSWORD'],
                             db=DB_CONFIG['MYSQL_DB'])

print(
    '\n##########################################################################################'
)
print("Extracting Observed data from Icharm on %s" %
      datetime.now().strftime(Common_DateTime_Format))

# Iterate over Icharm csv file download links and push date to DB
for station_id, url in Links.items():
    # Load the .csv to a Dataframe
    df = pd.read_csv(url,
                     header=None,
                     names=['year', 'month', 'day', 'hour', 'value'])
    df['timestamp'] = df[['year', 'month', 'day',
Esempio n. 8
0
        obs_end = '2019-03-20 00:00:00'
        forecast_end = '2019-03-22 23:00:00'

        points = np.genfromtxt(kelani_basin_points_file, delimiter=',')

        kel_lon_min = np.min(points, 0)[1]
        kel_lat_min = np.min(points, 0)[2]
        kel_lon_max = np.max(points, 0)[1]
        kel_lat_max = np.max(points, 0)[2]

        print('[kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max] : ',
              [kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max])

        forecast_adapter = MySQLAdapter(
            host=forecast_db_config['host'],
            user=forecast_db_config['user'],
            password=forecast_db_config['password'],
            db=forecast_db_config['db'])
        # #min_lat, min_lon, max_lat, max_lon
        forecast_stations, station_points = get_forecast_stations_from_net_cdf(
            reference_net_cdf, kel_lat_min, kel_lon_min, kel_lat_max,
            kel_lon_max)
        print('forecast_stations length : ', len(forecast_stations))

        forecast_precipitations = get_forecast_precipitation(
            forecast_stations, obs_end, forecast_end, forecast_adapter)
        # print('forecast_precipitations : ', forecast_precipitations)
        forecast_adapter.close()

        observed_adapter = MySQLAdapter(
            host=observed_db_config['host'],
Esempio n. 9
0
            CONFIG['WATER_DISCHARGE_DIR']) > 0:
        WATER_DISCHARGE_DIR = CONFIG['WATER_DISCHARGE_DIR']
    if 'OUTPUT_DIR' in CONFIG:
        OUTPUT_DIR = CONFIG['OUTPUT_DIR']

    if 'MYSQL_HOST' in CONFIG:
        MYSQL_HOST = CONFIG['MYSQL_HOST']
    if 'MYSQL_USER' in CONFIG:
        MYSQL_USER = CONFIG['MYSQL_USER']
    if 'MYSQL_DB' in CONFIG:
        MYSQL_DB = CONFIG['MYSQL_DB']
    if 'MYSQL_PASSWORD' in CONFIG:
        MYSQL_PASSWORD = CONFIG['MYSQL_PASSWORD']

    adapter = MySQLAdapter(host=MYSQL_HOST,
                           user=MYSQL_USER,
                           password=MYSQL_PASSWORD,
                           db=MYSQL_DB)
    # TODO: Pass source name as a paramter to script
    flo2d_source = adapter.get_source(name='FLO2D')
    try:
        flo2d_source = json.loads(flo2d_source.get('parameters', "{}"))
    except Exception as e:
        print(e)
        traceback.print_exc()
    CHANNEL_CELL_MAP = {}
    if 'CHANNEL_CELL_MAP' in flo2d_source:
        CHANNEL_CELL_MAP = flo2d_source['CHANNEL_CELL_MAP']
    FLOOD_PLAIN_CELL_MAP = {}
    if 'FLOOD_PLAIN_CELL_MAP' in flo2d_source:
        FLOOD_PLAIN_CELL_MAP = flo2d_source['FLOOD_PLAIN_CELL_MAP']
    """
Esempio n. 10
0
def create_inflow(dir_path, run_date, run_time):
    try:
        # run_date = datetime.now().strftime("%Y-%m-%d")
        # run_time = datetime.now().strftime("%H:00:00")
        # FLO-2D parameters
        IHOURDAILY = 0  # 0-hourly interval, 1-daily interval
        IDEPLT = 0  # Set to 0 on running with Text mode. Otherwise cell number e.g. 8672
        IFC = 'C'  # foodplain 'F' or a channel 'C'
        INOUTFC = 0  # 0-inflow, 1-outflow
        KHIN = 8655  # inflow nodes
        HYDCHAR = 'H'  # Denote line of inflow hydrograph time and discharge pairs
        # try:
        #     opts, args = getopt.getopt(sys.argv[1:], "hd:t:T:f:b:", [
        #         "help", "date=", "time=", "forward=", "backward=", "wrf-rf=", "wrf-kub=", "tag="
        #     ])
        # except getopt.GetoptError:
        #     sys.exit(2)
        # for opt, arg in opts:
        #     if opt in ("-h", "--help"):
        #         sys.exit()
        #     elif opt in ("-d", "--date"):
        #         run_date = arg # 2018-05-24
        #     elif opt in ("-t", "--time"):
        #         run_time = arg # 16:00:00
        #     elif opt in ("-f","--forward"):
        #         forward = arg
        #     elif opt in ("-b","--backward"):
        #         backward = arg
        #     elif opt in ("--wrf-rf"):
        #         RF_DIR_PATH = arg
        #     elif opt in ("--wrf-kub"):
        #         KUB_DIR_PATH = arg
        #     elif opt in ("-T", "--tag"):
        #         tag = arg
        #run_date = '2019-04-29'
        print("WrfTrigger run_date : ", run_date)
        print("WrfTrigger run_time : ", run_time)
        # backward = 2
        # forward = 3
        startDateTime = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S')
        print("startDateTime : ", startDateTime)
        config_path = os.path.join(os.getcwd(), 'inflowdat', 'config.json')
        print('config_path : ', config_path)
        with open(config_path) as json_file:
            config_data = json.load(json_file)
            output_dir = dir_path
            inflow_file = config_data["inflow_file"]

            # CONTROL_INTERVAL = config_data["CONTROL_INTERVAL"]
            # CSV_NUM_METADATA_LINES = config_data["CSV_NUM_METADATA_LINES"]
            DAT_WIDTH = config_data["DAT_WIDTH"]
            OBSERVED_WL_IDS = config_data["OBSERVED_WL_IDS"]

            MYSQL_HOST = config_data['db_host']
            MYSQL_USER = config_data['db_user']
            MYSQL_DB = config_data['db_name']
            MYSQL_PASSWORD = config_data['db_password']

            adapter = MySQLAdapter(host=MYSQL_HOST, user=MYSQL_USER, password=MYSQL_PASSWORD, db=MYSQL_DB)
            try:
                #hourly_inflow_file_dir = os.path.join(output_dir, run_date, run_time)
                hourly_inflow_file = os.path.join(output_dir, inflow_file)
                #create_dir_if_not_exists(hourly_inflow_file_dir)
                print("hourly_outflow_file : ", hourly_inflow_file)
                if not os.path.isfile(hourly_inflow_file):
                    discharge_df = get_discharge_data(adapter, startDateTime)
                    print('discharge_df', discharge_df)
                    initial_water_levels = update_initial_water_levels(OBSERVED_WL_IDS, adapter, startDateTime.strftime("%Y-%m-%d %H:%M:%S"))
                    f = open(hourly_inflow_file, 'w')
                    line1 = '{0} {1:{w}{b}}\n'.format(IHOURDAILY, IDEPLT, b='d', w=DAT_WIDTH)
                    line2 = '{0} {1:{w}{b}} {2:{w}{b}}\n'.format(IFC, INOUTFC, KHIN, b='d', w=DAT_WIDTH)
                    line3 = '{0} {1:{w}{b}} {2:{w}{b}}\n'.format(HYDCHAR, 0.0, 0.0, b='.1f', w=DAT_WIDTH)
                    f.writelines([line1, line2, line3])
                    lines = [];
                    i = 1.0
                    for time, row in discharge_df.iterrows():
                        lines.append(
                            '{0} {1:{w}{b}} {2:{w}{b}}\n'.format(HYDCHAR, i, float(row["value"]), b='.1f', w=DAT_WIDTH))
                        i += 1.0
                    lines.extend(initial_water_levels)
                    f.writelines(lines)
                    f.close()
                adapter.close()
            except Exception as ex:
                adapter.close()
                print("Download required files|Exception: ", str(ex))
    except Exception as e:
        print("Exception occurred: ", str(e))
Esempio n. 11
0
def create_hybrid_raincell(dir_path,
                           run_date,
                           run_time,
                           forward=3,
                           backward=2):
    try:
        #60 120 2019-05-01 00:00:00 2019-05-05 23:00:00
        #60 120 2019-05-01 23:00:00 2019-05-06 23:00:00
        #60 120 2019-05-01 23:00:00 2019-05-06 23:00:00
        #60 120 2019-04-30 23:00:00 2019-05-05 23:00:00
        # run_date = datetime.now().strftime("%Y-%m-%d")
        # run_time = datetime.now().strftime("%H:00:00")
        # run_date = '2019-05-02'
        # run_time = '23:00:00'
        tag = ''
        # try:
        #     opts, args = getopt.getopt(sys.argv[1:], "hd:t:T:f:b:", [
        #         "help", "date=", "time=", "forward=", "backward=", "wrf-rf=", "wrf-kub=", "tag="
        #     ])
        # except getopt.GetoptError:
        #     usage()
        #     sys.exit(2)
        # for opt, arg in opts:
        #     if opt in ("-h", "--help"):
        #         usage()
        #         sys.exit()
        #     elif opt in ("-d", "--date"):
        #         run_date = arg # 2018-05-24
        #     elif opt in ("-t", "--time"):
        #         run_time = arg # 16:00:00
        #     elif opt in ("-f","--forward"):
        #         forward = arg
        #     elif opt in ("-b","--backward"):
        #         backward = arg
        #     elif opt in ("--wrf-rf"):
        #         RF_DIR_PATH = arg
        #     elif opt in ("--wrf-kub"):
        #         KUB_DIR_PATH = arg
        #     elif opt in ("-T", "--tag"):
        #         tag = arg
        #run_date = '2019-04-29'
        print("WrfTrigger run_date : ", run_date)
        print("WrfTrigger run_time : ", run_time)
        # backward = 2
        # forward = 3
        start_ts_lk = datetime.strptime('%s %s' % (run_date, run_time),
                                        '%Y-%m-%d %H:%M:%S')
        start_ts_lk = start_ts_lk.strftime(
            '%Y-%m-%d_%H:00')  # '2018-05-24_08:00'
        print("WrfTrigger start_ts_lk : ", start_ts_lk)
        duration_days = (int(backward), int(forward))
        print("WrfTrigger duration_days : ", duration_days)
        config_path = os.path.join(os.getcwd(), 'raincelldat', 'config.json')
        print('config_path : ', config_path)
        with open(config_path) as json_file:
            config_data = json.load(json_file)
            key_file = os.path.join(os.getcwd(), 'raincelldat',
                                    'uwcc-admin.json')
            bucket_name = config_data["BUCKET_NAME"]
            initial_path_prefix = config_data["INITIAL_PATH_PREFIX"]
            net_cdf_file_format = config_data["NET_CDF_FILE"]
            wrf_data_dir = dir_path
            print("wrf_data_dir : ", wrf_data_dir)
            net_cdf_date = datetime.strptime(run_date,
                                             '%Y-%m-%d') - timedelta(hours=24)
            net_cdf_date = net_cdf_date.strftime("%Y-%m-%d")
            download_location = os.path.join(os.getcwd(), run_date)
            print("download_location : ", download_location)
            print("net_cdf_date : ", net_cdf_date)

            MYSQL_HOST = config_data['db_host']
            MYSQL_USER = config_data['db_user']
            MYSQL_DB = config_data['db_name']
            MYSQL_PASSWORD = config_data['db_password']
            klb_observed_stations = config_data['klb_obs_stations']
            klb_points = get_resource_path(
                'extraction/local/kelani_basin_points_250m.txt')

            adapter = MySQLAdapter(host=MYSQL_HOST,
                                   user=MYSQL_USER,
                                   password=MYSQL_PASSWORD,
                                   db=MYSQL_DB)
            name_list = net_cdf_file_format.split("-")
            net_cdf_file_name = name_list[
                0] + "_" + net_cdf_date + "_" + name_list[1]
            try:
                net_cdf_file_path = os.path.join(download_location,
                                                 net_cdf_file_name)
                print("net_cdf_file_path : ", net_cdf_file_path)
                if not os.path.isfile(net_cdf_file_path):
                    download_netcdf(initial_path_prefix, download_location,
                                    net_cdf_file_name, key_file, bucket_name)
                if os.path.isfile(net_cdf_file_path):
                    raincell_file_path = os.path.join(wrf_data_dir, run_date,
                                                      run_time, 'RAINCELL.DAT')
                    if not os.path.isfile(raincell_file_path):
                        create_raincell_file(
                            run_time, adapter, net_cdf_file_path, start_ts_lk,
                            os.path.join(wrf_data_dir, run_date),
                            klb_observed_stations, klb_points, duration_days)
                adapter.close()
            except Exception as ex:
                adapter.close()
                print("Download required files|Exception: ", str(ex))
    except Exception as e:
        print("Exception occurred: ", str(e))
Esempio n. 12
0
def create_hybrid_mike_input(dir_path, run_date, run_time, forward, backward):
    try:
        res_mins = '60'
        model_prefix = 'wrf'
        forecast_source = 'wrf0'
        run_name = 'Cloud-1'
        forecast_adapter = None
        observed_adapter = None
        kelani_basin_mike_points_file = get_resource_path(
            'extraction/local/metro_col_sub_catch_centroids.csv')
        kelani_basin_points_file = get_resource_path(
            'extraction/local/kelani_basin_points_250m.txt')
        kelani_lower_basin_shp_file = get_resource_path(
            'extraction/shp/klb-wgs84/klb-wgs84.shp')
        reference_net_cdf = get_resource_path(
            'extraction/netcdf/wrf_wrfout_d03_2019-03-31_18_00_00_rf')
        #config_path = os.path.join(os.getcwd(), 'raincelldat', 'config.json')
        config_path = os.path.join(os.getcwd(), 'config.json')
        with open(config_path) as json_file:
            config = json.load(json_file)
            if 'forecast_db_config' in config:
                forecast_db_config = config['forecast_db_config']
            if 'observed_db_config' in config:
                observed_db_config = config['observed_db_config']
            if 'klb_obs_stations' in config:
                obs_stations = copy.deepcopy(config['klb_obs_stations'])

            res_mins = int(res_mins)
            print('[run_date, run_time] : ', [run_date, run_time])
            start_ts_lk = datetime.strptime('%s %s' % (run_date, run_time),
                                            '%Y-%m-%d %H:%M:%S')
            start_ts_lk = start_ts_lk.strftime(
                '%Y-%m-%d_%H:00')  # '2018-05-24_08:00'
            duration_days = (int(backward), int(forward))
            obs_start = datetime.strptime(
                start_ts_lk,
                '%Y-%m-%d_%H:%M') - timedelta(days=duration_days[0])
            obs_end = datetime.strptime(start_ts_lk, '%Y-%m-%d_%H:%M')
            forecast_end = datetime.strptime(
                start_ts_lk,
                '%Y-%m-%d_%H:%M') + timedelta(days=duration_days[1])
            print([obs_start, obs_end, forecast_end])

            fcst_duration_start = obs_end.strftime('%Y-%m-%d %H:%M:%S')
            fcst_duration_end = (
                datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') +
                timedelta(days=3)).strftime('%Y-%m-%d 00:00:00')
            obs_duration_start = (
                datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') -
                timedelta(days=2)).strftime('%Y-%m-%d 00:00:00')

            print('obs_duration_start : ', obs_duration_start)
            print('fcst_duration_start : ', fcst_duration_start)
            print('fcst_duration_end : ', fcst_duration_end)

            observed_duration = int(
                (datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') -
                 datetime.strptime(obs_duration_start,
                                   '%Y-%m-%d %H:%M:%S')).total_seconds() /
                (60 * res_mins))
            forecast_duration = int(
                (datetime.strptime(fcst_duration_end, '%Y-%m-%d %H:%M:%S') -
                 datetime.strptime(fcst_duration_start,
                                   '%Y-%m-%d %H:%M:%S')).total_seconds() /
                (60 * res_mins))
            total_duration = int(
                (datetime.strptime(fcst_duration_end, '%Y-%m-%d %H:%M:%S') -
                 datetime.strptime(obs_duration_start,
                                   '%Y-%m-%d %H:%M:%S')).total_seconds() /
                (60 * res_mins))

            print('observed_duration : ', observed_duration)
            print('forecast_duration : ', forecast_duration)
            print('total_duration : ', total_duration)

            mike_input_file_path = os.path.join(dir_path, 'mike_input.txt')
            print('mike_input_file_path : ', mike_input_file_path)
            if not os.path.isfile(mike_input_file_path):
                points = np.genfromtxt(kelani_basin_points_file, delimiter=',')

                kel_lon_min = np.min(points, 0)[1]
                kel_lat_min = np.min(points, 0)[2]
                kel_lon_max = np.max(points, 0)[1]
                kel_lat_max = np.max(points, 0)[2]

                mike_points = np.genfromtxt(kelani_basin_mike_points_file,
                                            delimiter=',',
                                            names=True,
                                            dtype=None)
                print('mike_points : ', mike_points)
                print('mike_points : ', mike_points[0][0].decode())
                print('mike_points : ', mike_points[1][0].decode())
                print('mike_points : ', mike_points[2][0].decode())

                def _get_points_names(mike_points):
                    mike_point_names = []
                    for p in mike_points:
                        mike_point_names.append(p[0].decode())
                    return mike_point_names

                #mike_point_names = get_centroid_names(kelani_basin_mike_points_file)
                mike_point_names = _get_points_names(mike_points)

                print('mike_point_names : ', mike_point_names)

                print('mike_point_names[0] : ', mike_point_names[0])
                print('mike_point_names[1] : ', mike_point_names[1])
                print('mike_point_names[2] : ', mike_point_names[2])
                print('mike_point_names[-1] : ', mike_point_names[-1])

                print(
                    '[kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max] : ',
                    [kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max])
                #"""
                # #min_lat, min_lon, max_lat, max_lon
                forecast_stations, station_points = get_forecast_stations_from_net_cdf(
                    model_prefix, reference_net_cdf, kel_lat_min, kel_lon_min,
                    kel_lat_max, kel_lon_max)
                print('forecast_stations length : ', len(forecast_stations))
                file_header = ','.join(mike_point_names)
                print('file_header : ', file_header)
                observed_adapter = MySQLAdapter(
                    host=observed_db_config['host'],
                    user=observed_db_config['user'],
                    password=observed_db_config['password'],
                    db=observed_db_config['db'])

                # print('obs_stations : ', obs_stations)
                observed_precipitations = get_observed_precip(
                    obs_stations,
                    obs_duration_start,
                    fcst_duration_start,
                    observed_duration,
                    observed_adapter,
                    forecast_source='wrf0')
                observed_adapter.close()
                observed_adapter = None
                validated_obs_station = {}
                # print('obs_stations.keys() : ', obs_stations.keys())
                # print('observed_precipitations.keys() : ', observed_precipitations.keys())

                for station_name in obs_stations.keys():
                    if station_name in observed_precipitations.keys():
                        validated_obs_station[station_name] = obs_stations[
                            station_name]
                    else:
                        print('invalid station_name : ', station_name)

                # if bool(observed_precipitations):
                if len(validated_obs_station) >= 1:
                    thess_poly = get_voronoi_polygons(
                        validated_obs_station,
                        kelani_lower_basin_shp_file,
                        add_total_area=False)
                    forecast_adapter = MySQLAdapter(
                        host=forecast_db_config['host'],
                        user=forecast_db_config['user'],
                        password=forecast_db_config['password'],
                        db=forecast_db_config['db'])

                    forecast_precipitations = get_forecast_precipitation(
                        forecast_source,
                        run_name,
                        forecast_stations,
                        forecast_adapter,
                        obs_end.strftime('%Y-%m-%d %H:%M:%S'),
                        forward_days=3)
                    forecast_adapter.close()
                    forecast_adapter = None
                    if bool(forecast_precipitations):
                        fcst_thess_poly = get_voronoi_polygons(
                            station_points,
                            kelani_lower_basin_shp_file,
                            add_total_area=False)

                        fcst_point_thess_idx = []
                        for point in mike_points:
                            fcst_point_thess_idx.append(
                                is_inside_geo_df(fcst_thess_poly,
                                                 lon=point[1],
                                                 lat=point[2]))
                            pass
                        # print('fcst_point_thess_idx : ', fcst_point_thess_idx)

                        # create_dir_if_not_exists(dir_path)
                        point_thess_idx = []
                        for point in mike_points:
                            point_thess_idx.append(
                                is_inside_geo_df(thess_poly,
                                                 lon=point[1],
                                                 lat=point[2]))
                            pass

                        print('len(mike_points)', len(mike_points))
                        print('len(point_thess_idx)', len(point_thess_idx))
                        print('len(fcst_point_thess_idx)',
                              len(fcst_point_thess_idx))

                        print('point_thess_idx : ', point_thess_idx)
                        print('fcst_point_thess_idx : ', fcst_point_thess_idx)
                        print('mike_point_names : ', mike_point_names)
                        with open(mike_input_file_path,
                                  mode='w') as output_file:
                            output_writer = csv.writer(output_file,
                                                       delimiter=',',
                                                       dialect='excel')
                            header = ['Times']
                            header.extend(mike_point_names)
                            output_writer.writerow(header)
                            print(
                                'range 1 : ',
                                int(24 * 60 * duration_days[0] / res_mins) + 1)
                            print(
                                'range 2 : ',
                                int(24 * 60 * duration_days[1] / res_mins) - 1)
                            obs_duration_end = None
                            for t in range(observed_duration):
                                date_time = datetime.strptime(
                                    obs_duration_start,
                                    '%Y-%m-%d %H:%M:%S') + timedelta(hours=t)
                                obs_duration_end = date_time.strftime(
                                    '%Y-%m-%d %H:%M:%S')
                                print(date_time.strftime('%Y-%m-%d %H:%M:%S'))
                                obs_rf_list = []
                                for i, point in enumerate(mike_points):
                                    rf = float(
                                        observed_precipitations[
                                            point_thess_idx[i]].values[t]
                                    ) if point_thess_idx[i] is not None else 0
                                    obs_rf_list.append('%.6f' % rf)
                                row = [date_time.strftime('%Y-%m-%d %H:%M:%S')]
                                row.extend(obs_rf_list)
                                output_writer.writerow(row)
                            print(
                                'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
                            )
                            next_time_step = datetime.strptime(
                                obs_duration_end,
                                '%Y-%m-%d %H:%M:%S') + timedelta(hours=1)
                            for t in range(forecast_duration):
                                date_time = next_time_step + timedelta(hours=t)
                                print(date_time.strftime('%Y-%m-%d %H:%M:%S'))
                                fcst_rf_list = []
                                for i, point in enumerate(mike_points):
                                    rf = float(forecast_precipitations[
                                        fcst_point_thess_idx[i]].values[t]
                                               ) if fcst_point_thess_idx[
                                                   i] is not None else 0
                                    fcst_rf_list.append('%.6f' % rf)
                                row = [date_time.strftime('%Y-%m-%d %H:%M:%S')]
                                row.extend(fcst_rf_list)
                                output_writer.writerow(row)
                    else:
                        print('----------------------------------------------')
                        print('No forecast data.')
                        print('----------------------------------------------')
                else:
                    print('----------------------------------------------')
                    print('No observed data.')
                    print('Available station count: ',
                          len(validated_obs_station))
                    print('Proceed with forecast data.')
                    print('----------------------------------------------')
            # """
    except Exception as e:
        print('Raincell generation error|Exception:', str(e))
        traceback.print_exc()
        try:
            if forecast_adapter is not None:
                forecast_adapter.close()
            if observed_adapter is not None:
                observed_adapter.close()
        except Exception as ex:
            print(str(ex))
Esempio n. 13
0
def create_outflow(dir_path, run_date, run_time, forward = 3, backward = 2):
    try:
        # run_date = datetime.now().strftime("%Y-%m-%d")
        # run_time = datetime.now().strftime("%H:00:00")
        # tag = ''
        # try:
        #     opts, args = getopt.getopt(sys.argv[1:], "hd:t:T:f:b:", [
        #         "help", "date=", "time=", "forward=", "backward=", "wrf-rf=", "wrf-kub=", "tag="
        #     ])
        # except getopt.GetoptError:
        #     sys.exit(2)
        # for opt, arg in opts:
        #     if opt in ("-h", "--help"):
        #         sys.exit()
        #     elif opt in ("-d", "--date"):
        #         run_date = arg # 2018-05-24
        #     elif opt in ("-t", "--time"):
        #         run_time = arg # 16:00:00
        #     elif opt in ("-f","--forward"):
        #         forward = arg
        #     elif opt in ("-b","--backward"):
        #         backward = arg
        #     elif opt in ("--wrf-rf"):
        #         RF_DIR_PATH = arg
        #     elif opt in ("--wrf-kub"):
        #         KUB_DIR_PATH = arg
        #     elif opt in ("-T", "--tag"):
        #         tag = arg
        #run_date = '2019-04-29'
        print("WrfTrigger run_date : ", run_date)
        print("WrfTrigger run_time : ", run_time)
        # backward = 2
        # forward = 3
        startDateTime = datetime.strptime('%s %s' % (run_date, run_time), '%Y-%m-%d %H:%M:%S')
        print("startDateTime : ", startDateTime)
        config_path = os.path.join(os.getcwd(), 'outflowdat', 'config.json')
        print('config_path : ', config_path)
        with open(config_path) as json_file:
            config_data = json.load(json_file)
            output_dir = dir_path
            inittidal_conf_path = os.path.join(os.getcwd(), 'outflowdat', 'INITTIDAL.CONF')

            CONTROL_INTERVAL = config_data["CONTROL_INTERVAL"]
            DAT_WIDTH = config_data["DAT_WIDTH"]
            TIDAL_FORECAST_ID = config_data["TIDAL_FORECAST_ID"]

            MYSQL_HOST = config_data['db_host']
            MYSQL_USER = config_data['db_user']
            MYSQL_DB = config_data['db_name']
            MYSQL_PASSWORD = config_data['db_password']

            adapter = MySQLAdapter(host=MYSQL_HOST, user=MYSQL_USER, password=MYSQL_PASSWORD, db=MYSQL_DB)
            try:
                hourly_outflow_file = os.path.join(output_dir, 'OUTFLOW.DAT')
                print("hourly_outflow_file : ", hourly_outflow_file)
                if not os.path.isfile(hourly_outflow_file):
                    opts = {
                        'from': (startDateTime - timedelta(minutes=0)).strftime("%Y-%m-%d %H:%M:%S"),
                        'to': (startDateTime + timedelta(minutes=CONTROL_INTERVAL)).strftime("%Y-%m-%d %H:%M:%S"),
                    }
                    tidal_timeseries = get_forecast_timeseries(adapter, TIDAL_FORECAST_ID, opts)
                    if len(tidal_timeseries) > 0:
                        print('tidal_timeseries::', len(tidal_timeseries), tidal_timeseries[0], tidal_timeseries[-1])
                        f = open(hourly_outflow_file, 'w')
                        lines = []
                        print('Reading INIT TIDAL CONF...')
                        with open(inittidal_conf_path) as initTidalConfFile:
                            initTidalLevels = initTidalConfFile.readlines()
                            for initTidalLevel in initTidalLevels:
                                if len(initTidalLevel.split()):  # Check if not empty line
                                    lines.append(initTidalLevel)
                                    if initTidalLevel[0] == 'N':
                                        lines.append('{0} {1:{w}} {2:{w}}\n'.format('S', 0, 0, w=DAT_WIDTH))
                                        base_date_time = startDateTime.replace(minute=0, second=0, microsecond=0)
                                        for step in tidal_timeseries:
                                            hours_so_far = (step[0] - base_date_time)
                                            hours_so_far = 24 * hours_so_far.days + hours_so_far.seconds / (60 * 60)
                                            lines.append('{0} {1:{w}} {2:{w}{b}}\n'
                                                         .format('S', int(hours_so_far), float(step[1]), b='.2f',
                                                                 w=DAT_WIDTH))
                        f.writelines(lines)
                        f.close()
                        print('Finished writing OUTFLOW.DAT')
                    else:
                        print('No data found for tidal timeseries: ', tidal_timeseries)
                        sys.exit(1)
                adapter.close()
            except Exception as ex:
                adapter.close()
                print("Download required files|Exception: ", str(ex))
    except Exception as e:
        print("Exception occurred: ", str(e))
Esempio n. 14
0
    def setUpClass(cls):
        try:
            root_dir = os.path.dirname(os.path.realpath(__file__))
            config = json.loads(open(root_dir + '/CONFIG.json').read())

            # Initialize Logger
            logging_config = json.loads(
                open(root_dir + '/LOGGING_CONFIG.json').read())
            logging.config.dictConfig(logging_config)
            cls.logger = logging.getLogger('MySQLAdapterTest')
            cls.logger.addHandler(logging.StreamHandler())
            cls.logger.info('setUpClass')

            MYSQL_HOST = "localhost"
            MYSQL_USER = "******"
            MYSQL_DB = "curw"
            MYSQL_PASSWORD = ""

            DAY_INTERVAL = 24

            if 'MYSQL_HOST' in config:
                MYSQL_HOST = config['MYSQL_HOST']
            if 'MYSQL_USER' in config:
                MYSQL_USER = config['MYSQL_USER']
            if 'MYSQL_DB' in config:
                MYSQL_DB = config['MYSQL_DB']
            if 'MYSQL_PASSWORD' in config:
                MYSQL_PASSWORD = config['MYSQL_PASSWORD']

            cls.adapter = MySQLAdapter(host=MYSQL_HOST,
                                       user=MYSQL_USER,
                                       password=MYSQL_PASSWORD,
                                       db=MYSQL_DB)
            cls.eventIds = []

            # Store Rainfall Data
            stations = ['Colombo', 'Hanwella', 'Norwood']
            types = [
                'Forecast-0-d', 'Forecast-1-d-after', 'Forecast-2-d-after'
            ]
            meta_data = {
                'station': 'Hanwella',
                'variable': 'Precipitation',
                'unit': 'mm',
                'type': 'Forecast-0-d',
                'source': 'WRF',
                'name': 'Forecast Test',
                'start_date': '2017-05-01 00:00:00',
                'end_date': '2017-05-03 23:00:00'
            }
            RAINFALL_DIR = os.path.join(root_dir, 'data', 'Rainfall')
            cls.logger.debug(RAINFALL_DIR)
            for station in stations:
                cls.logger.info('Inserting Rainfall at %s', station)
                for file in glob(os.path.join(RAINFALL_DIR,
                                              station + '*.csv')):
                    timeseries = csv.reader(open(file, 'r'),
                                            delimiter=' ',
                                            skipinitialspace=True)
                    timeseries = list(timeseries)

                    filename = os.path.basename(file)
                    date_arr = filename.split('.')[0].split('-')
                    cls.logger.debug('-'.join(date_arr[1:]))
                    start_date = datetime.datetime.strptime(
                        '-'.join(date_arr[1:]), '%Y-%m-%d')
                    end_date = start_date + datetime.timedelta(
                        hours=len(timeseries))
                    cls.logger.debug('start_date: %s, end_date: %s',
                                     start_date, end_date)
                    station_meta = dict(meta_data)
                    station_meta['station'] = station
                    station_meta['start_date'] = start_date.strftime(
                        "%Y-%m-%d %H:%M:%S")
                    station_meta['end_date'] = end_date.strftime(
                        "%Y-%m-%d %H:%M:%S")

                    for i in range(0, 3):
                        station_meta['type'] = types[i]
                        event_id = cls.adapter.get_event_id(station_meta)
                        if event_id is None:
                            event_id = cls.adapter.create_event_id(
                                station_meta)
                            cls.logger.debug('HASH SHA256 created: %s',
                                             event_id)
                        else:
                            cls.logger.debug('HASH SHA256 exists: %s',
                                             event_id)

                        # for l in timeseries[:3] + timeseries[-2:] :
                        #     print(l)
                        if event_id not in cls.eventIds:
                            cls.eventIds.append(event_id)
                        row_count = cls.adapter.insert_timeseries(
                            event_id, timeseries[i * DAY_INTERVAL:(i + 1) *
                                                 DAY_INTERVAL], True)
                        cls.logger.debug('%s rows inserted.', row_count)
            cls.logger.info('Inserted Rainfall data.')

            # Store Discharge Data
            stations = ['Hanwella']
            types = [
                'Forecast-0-d', 'Forecast-1-d-after', 'Forecast-2-d-after',
                'Forecast-3-d-after', 'Forecast-4-d-after',
                'Forecast-5-d-after'
            ]
            meta_data = {
                'station': 'Hanwella',
                'variable': 'Discharge',
                'unit': 'm3/s',
                'type': 'Forecast-0-d',
                'source': 'HEC-HMS',
                'name': 'Forecast Test',
                'start_date': '2017-05-01 00:00:00',
                'end_date': '2017-05-03 23:00:00'
            }
            DISCHARGE_DIR = os.path.join(root_dir, 'data', 'Discharge')
            for station in stations:
                cls.logger.info('Inserting Discharges at %s', station)
                for file in glob(os.path.join(DISCHARGE_DIR,
                                              station + '*.csv')):
                    timeseries = csv.reader(open(file, 'r'),
                                            delimiter=',',
                                            skipinitialspace=True)
                    timeseries = list(timeseries)

                    filename = os.path.basename(file)
                    date_arr = filename.split('.')[0].split('-')
                    cls.logger.debug('-'.join(date_arr[1:]))
                    start_date = datetime.datetime.strptime(
                        '-'.join(date_arr[1:]), '%Y-%m-%d')
                    end_date = start_date + datetime.timedelta(
                        hours=len(timeseries))
                    cls.logger.debug('start_date: %s, end_date: %s',
                                     start_date, end_date)
                    station_meta = dict(meta_data)
                    station_meta['station'] = station
                    station_meta['start_date'] = start_date.strftime(
                        "%Y-%m-%d %H:%M:%S")
                    station_meta['end_date'] = end_date.strftime(
                        "%Y-%m-%d %H:%M:%S")

                    for i in range(0, 6):
                        station_meta['type'] = types[i]
                        event_id = cls.adapter.get_event_id(station_meta)
                        if event_id is None:
                            event_id = cls.adapter.create_event_id(
                                station_meta)
                            cls.logger.debug('HASH SHA256 created: %s',
                                             event_id)
                        else:
                            cls.logger.debug('HASH SHA256 exists: %s',
                                             event_id)

                        # for l in timeseries[:3] + timeseries[-2:] :
                        #     print(l)
                        if event_id not in cls.eventIds:
                            cls.eventIds.append(event_id)
                        row_count = cls.adapter.insert_timeseries(
                            event_id, timeseries[i * DAY_INTERVAL:(i + 1) *
                                                 DAY_INTERVAL], True)
                        cls.logger.debug('%s rows inserted.', row_count)
            cls.logger.info("Inserted Discharge data.")

            cls.contain_stations = [
                'Attanagalla', 'Daraniyagala', 'Glencourse', 'Holombuwa',
                'Kitulgala'
            ]

        except Exception as e:
            traceback.print_exc()
Esempio n. 15
0
def create_hybrid_raincell(dir_path,
                           run_date,
                           run_time,
                           forward,
                           backward,
                           res_mins='60',
                           model_prefix='wrf',
                           forecast_source='wrf0',
                           run_name='Cloud-1'):
    try:
        observed_adapter = None
        kelani_basin_points_file = get_resource_path(
            'extraction/local/kelani_basin_points_250m.txt')
        kelani_lower_basin_shp_file = get_resource_path(
            'extraction/shp/klb-wgs84/klb-wgs84.shp')
        reference_net_cdf = get_resource_path(
            'extraction/netcdf/wrf_wrfout_d03_2019-03-31_18_00_00_rf')
        # config_path = os.path.join(os.getcwd(), 'raincelldat', 'config.json')
        config_path = os.path.join(os.getcwd(), 'config.json')
        with open(config_path) as json_file:
            config = json.load(json_file)
            if 'forecast_db_config' in config:
                forecast_db_config = config['forecast_db_config']
            if 'observed_db_config' in config:
                observed_db_config = config['observed_db_config']
            if 'klb_obs_stations' in config:
                obs_stations = copy.deepcopy(config['klb_obs_stations'])
            print('[run_date, run_time] : ', [run_date, run_time])
            start_ts_lk = datetime.strptime('%s %s' % (run_date, run_time),
                                            '%Y-%m-%d %H:%M:%S')
            start_ts_lk = start_ts_lk.strftime(
                '%Y-%m-%d_%H:00')  # '2018-05-24_08:00'
            duration_days = (int(backward), int(forward))
            obs_start = datetime.strptime(
                start_ts_lk,
                '%Y-%m-%d_%H:%M') - timedelta(days=duration_days[0])
            obs_end = datetime.strptime(start_ts_lk, '%Y-%m-%d_%H:%M')
            forecast_end = datetime.strptime(
                start_ts_lk,
                '%Y-%m-%d_%H:%M') + timedelta(days=duration_days[1])
            print([obs_start, obs_end, forecast_end])

            fcst_duration_start = obs_end.strftime('%Y-%m-%d %H:%M:%S')
            obs_duration_start = (
                datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') -
                timedelta(days=backward)).strftime('%Y-%m-%d 00:00:00')

            print('obs_duration_start : ', obs_duration_start)
            print('fcst_duration_start : ', fcst_duration_start)

            observed_duration = int(
                (datetime.strptime(fcst_duration_start, '%Y-%m-%d %H:%M:%S') -
                 datetime.strptime(obs_duration_start,
                                   '%Y-%m-%d %H:%M:%S')).total_seconds() /
                (60 * res_mins))

            print('observed_duration : ', observed_duration)

            raincell_file_path = os.path.join(dir_path, 'RAINCELL.DAT')
            if not os.path.isfile(raincell_file_path):
                points = np.genfromtxt(kelani_basin_points_file, delimiter=',')

                kel_lon_min = np.min(points, 0)[1]
                kel_lat_min = np.min(points, 0)[2]
                kel_lon_max = np.max(points, 0)[1]
                kel_lat_max = np.max(points, 0)[2]

                print(
                    '[kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max] : ',
                    [kel_lon_min, kel_lat_min, kel_lon_max, kel_lat_max])
                observed_adapter = MySQLAdapter(
                    host=observed_db_config['host'],
                    user=observed_db_config['user'],
                    password=observed_db_config['password'],
                    db=observed_db_config['db'])

                # print('obs_stations : ', obs_stations)
                observed_precipitations = get_observed_precip(
                    obs_stations,
                    obs_duration_start,
                    fcst_duration_start,
                    observed_duration,
                    observed_adapter,
                    forecast_source='wrf0')
                #print('observed_precipitations : ', observed_precipitations)
                observed_adapter.close()
                observed_adapter = None
                validated_obs_station = {}
                print('observed_precipitations.keys() : ',
                      observed_precipitations.keys())
                #"""
                for station_name in obs_stations.keys():
                    if station_name in observed_precipitations.keys():
                        validated_obs_station[station_name] = obs_stations[
                            station_name]
                    else:
                        print('station_name : ', station_name)
                print('validated_obs_station : ', validated_obs_station)

                if len(validated_obs_station) >= 3 and bool(
                        observed_precipitations):
                    # if bool(observed_precipitations):
                    thess_poly = get_voronoi_polygons(
                        validated_obs_station,
                        kelani_lower_basin_shp_file,
                        add_total_area=False)
                    print('thess_poly : ', thess_poly)
                    point_thess_idx = []
                    for point in points:
                        point_thess_idx.append(
                            is_inside_geo_df(thess_poly,
                                             lon=point[1],
                                             lat=point[2]))
                        pass

                    print('len(points)', len(points))
                    print('len(point_thess_idx)', len(point_thess_idx))
                    print('point_thess_idx', point_thess_idx)

                    with open(raincell_file_path, 'w') as output_file:
                        output_file.write(
                            "%d %d %s %s\n" %
                            (res_mins, observed_duration, obs_duration_start,
                             fcst_duration_start))

                        print('range 1 : ',
                              int(24 * 60 * duration_days[0] / res_mins) + 1)
                        print('range 2 : ',
                              int(24 * 60 * duration_days[1] / res_mins) - 1)

                        for t in range(observed_duration - 5):
                            for i, point in enumerate(points):
                                rf = float(
                                    observed_precipitations[
                                        point_thess_idx[i]].values[t]
                                ) if point_thess_idx[i] is not None else 0
                                output_file.write('%d %.1f\n' % (point[0], rf))
                else:
                    print('----------------------------------------------')
                    print('No observed data.')
                    print('----------------------------------------------')
                #"""
    except Exception as e:
        print('Raincell generation error.')
        traceback.print_exc()
        try:
            if observed_adapter is not None:
                observed_adapter.close()
        except Exception as ex:
            print(str(ex))
Esempio n. 16
0
    if args.config:
        CONFIG = json.loads(open(os.path.join(ROOT_DIR, args.config)).read())
    else:
        CONFIG = json.loads(
            open(os.path.join(ROOT_DIR, './CONFIG.json')).read())
    forceInsert = args.force

    weather_stations = CONFIG['weather_stations']
    water_level_stations = CONFIG['water_level_stations']
    stations = weather_stations + water_level_stations

    extract_from_db = CONFIG['extract_from']
    push_to_db = CONFIG['push_to']

    extract_adapter = MySQLAdapter(host=extract_from_db['MYSQL_HOST'],
                                   user=extract_from_db['MYSQL_USER'],
                                   password=extract_from_db['MYSQL_PASSWORD'],
                                   db=extract_from_db['MYSQL_DB'])
    push_adapter = MySQLAdapter(host=push_to_db['MYSQL_HOST'],
                                user=push_to_db['MYSQL_USER'],
                                password=push_to_db['MYSQL_PASSWORD'],
                                db=push_to_db['MYSQL_DB'])

    # Prepare start and date times.
    now_date = utc_to_sl(datetime.now())
    # now_date = datetime.now()
    start_datetime_obj = now_date - timedelta(hours=2)
    end_datetime_obj = now_date
    start_datetime = start_datetime_obj.strftime(COMMON_DATE_FORMAT)
    end_datetime = end_datetime_obj.strftime(COMMON_DATE_FORMAT)

    # start_datetime = '2018-07-04 00:00:00'
Esempio n. 17
0
        i += 1.0

    print('Inserting Initial Water levels...')
    with open(INIT_WL_CONFIG) as initWLConfFile:
        initWaterLevels = initWLConfFile.readlines()
        for initWaterLevel in initWaterLevels:
            if len(initWaterLevel.split()):
                lines.append(initWaterLevel)

    f.writelines(lines)

    # Save Forecast values into Database
    opts = {'forceInsert': forceInsert, 'runName': runName}
    if storeData:
        adapter = MySQLAdapter(host=MYSQL_HOST,
                               user=MYSQL_USER,
                               password=MYSQL_PASSWORD,
                               db=MYSQL_DB)
        save_forecast_timeseries(adapter, csvList[CSV_NUM_METADATA_LINES:],
                                 date, time, opts)
    else:
        print(
            "---------------------------------------Mysql db not updated.-------------------------------------"
        )

except Exception as e:
    print(e)
    traceback.print_exc()
finally:
    f.close()
    print('Completed ', DISCHARGE_CSV_FILE_PATH, ' to ', INFLOW_DAT_FILE_PATH)
Esempio n. 18
0
        print('WARNING: Force Insert enabled.')

    CON_DATA = json.loads(open(CONTINUOUS_CONFIG).read())

    stations = CON_DATA['stations']

    metaData = {
        'station': '',
        'variable': '',
        'unit': '',
        'type': '',
        'source': '',
        'name': '',
    }
    adapter = MySQLAdapter(host=MYSQL_HOST,
                           user=MYSQL_USER,
                           password=MYSQL_PASSWORD,
                           db=MYSQL_DB)

    for station in stations:
        print('station:', station)
        #  Check whether station exists
        is_station_exists = adapter.get_station({'name': station['name']})
        if is_station_exists is None:
            print('Station %s does not exists.', station['name'])
            if 'station_meta' in station and 'station_type' in station:
                station_meta = station['station_meta']
                station_meta.insert(0,
                                    Station.getType(station['station_type']))
                row_count = adapter.create_station(station_meta)
                if row_count > 0:
                    print('Created new station %s', station_meta)