Ejemplo n.º 1
0
etopo_regrid = hdf_io['etopo_regrid'][...]
lon_4km = hdf_io['lon_4km'][...]
lat_4km = hdf_io['lat_4km'][...]
hdf_io.close()

grid_shape = land_mask.shape

# defining prediction range
# dt list for all
N_all = 365 + 366 + 365 + 365 + 365
all_list = [datetime(2015, 1, 1, 0) + timedelta(days=x) for x in range(N_all)]
# dt list for pred, 2018-2020
N_pred = 15
pred_list = [datetime(2019, 1, 1, 0) + timedelta(days=x) for x in range(N_pred)]
# indices
ind_pred = du.dt_match(all_list, pred_list)
ind_pred_sea  = du.season_ind_sep(pred_list, key_format='{}')

# macros
# ind_trans = 504 # now in the namelist
model_import_dir = temp_dir
# overlapped tile prediction settings
param = {}
param['gap'] = 8
param['edge'] = 32
param['size'] = 128


# loop over variables and seasons
VARS = ['TMAX', 'TMIN', 'TMEAN']
seasons = ['djf', 'mam', 'jja', 'son']
Ejemplo n.º 2
0
sine_fit = False  # <------- !!!!!

VAR_list = ['TMAX', 'TMIN',
            'TMEAN']  # PCT not here because of different feature eng.

# datetime info
N_days = 365 + 366 + 365 + 365 + 365  # 2015-2020 (period ending)
date_list = [
    datetime(2015, 1, 1, 0) + timedelta(days=x) for x in range(N_days)
]

N_train = 365 + 366 + 365
train_list = [
    datetime(2015, 1, 1, 0) + timedelta(days=x) for x in range(N_train)
]
ind_train = du.dt_match(date_list, train_list)

# import geographical variables
with h5py.File(PRISM_dir + 'PRISM_regrid_2015_2020.hdf', 'r') as hdf_io:
    # etopo
    etopo_4km = hdf_io['etopo_4km'][...]
    etopo_regrid = hdf_io['etopo_regrid'][...]
    # lon/lat
    lon_4km = hdf_io['lon_4km'][...]
    lat_4km = hdf_io['lat_4km'][...]
    lon_025 = hdf_io['lon_025'][...]
    lat_025 = hdf_io['lat_025'][...]
    # land_mask
    land_mask = hdf_io['land_mask'][...]
# non negative elevation correction
etopo_4km[etopo_4km < 0] = 0
# dt list for all
N_all = 365 + 366 + 365 + 365 + 365
all_list = [datetime(2015, 1, 1, 0) + timedelta(days=x) for x in range(N_all)]

# dt list for train and valid (test)
N_train = 365 + 366 + 365  # 2015-2018 (period ending)
N_valid = 365  # 2018-2019 (period ending)
train_list = [
    datetime(2015, 1, 1, 0) + timedelta(days=x) for x in range(N_train)
]
valid_list = [
    datetime(2018, 1, 1, 0) + timedelta(days=x) for x in range(N_valid)
]

# pick ind
ind_train = du.dt_match(all_list, train_list)
ind_valid = du.dt_match(all_list, valid_list)
# inds
ind_train_sea = du.season_ind_sep(train_list, key_format='{}')
ind_valid_sea = du.season_ind_sep(valid_list, key_format='{}')
# shape
grid_shape = land_mask.shape
N_no_clim = 3  # total number of features in the "no clim" case
N_with_clim = 4  # total number of features in the "with clim" case

# loop over variables
VARS = ['TMAX', 'TMIN']
for var in VARS:
    print('===== {} ====='.format(var))
    C_no_clim = np.zeros(grid_shape + (4 * N_no_clim, ))
    I_no_clim = np.zeros(grid_shape + (4 * 1, ))
        cmd = 'rm {}*T{}-*npy'.format(BATCH_dir, source)
        print(cmd)
        subprocess.call(cmd, shell=True)

# train date range
# datetime info
N_days = 365 + 366 + 365 + 365 + 365  # 2015-2020 (period ending)
date_list = [
    datetime(2015, 1, 1, 0) + timedelta(days=x) for x in range(N_days)
]
#
N_train = 365 + 366 + 365
train_list = [
    datetime(2015, 1, 1, 0) + timedelta(days=x) for x in range(N_train)
]
pick_train = du.dt_match(date_list, train_list)
IND_train = du.season_ind_sep(train_list,
                              key_format='{}_train')  # split by seasons
#
N_valid = 365
valid_list = [
    datetime(2018, 1, 1, 0) + timedelta(days=x) for x in range(N_valid)
]
pick_valid = du.dt_match(date_list, valid_list)
IND_valid = du.season_ind_sep(valid_list,
                              key_format='{}_valid')  # split by seasons

# etopo fields
input_2d = {}
keys_2d = ['etopo_4km', 'etopo_regrid']
# etopo from an example
Ejemplo n.º 5
0
    land_mask_clean = hdf_io['land_mask_025'][...]

# available time range of the file (2018-2019) <----- support 2020 test in the furture
N_all = 365 + 366 + 365 + 365 + 365
all_list = [datetime(2015, 1, 1, 0) + timedelta(days=x) for x in range(N_all)]
# time range for plotting (2018-2019)
N_pred_era = 365 + 243
pred_list_era = [
    datetime(2018, 1, 1, 0) + timedelta(days=x) for x in range(N_pred_era)
]
N_pred_jra = 365 + 365
pred_list_jra = [
    datetime(2018, 1, 1, 0) + timedelta(days=x) for x in range(N_pred_jra)
]
# indices
ind_era = du.dt_match(all_list, pred_list_era)
ind_jra = du.dt_match(all_list, pred_list_jra)

with h5py.File(ERA_dir + era_pred_name + '_2018.hdf', 'r') as hdf_io:
    ERA_2018 = hdf_io[TMEAN_key][...]
with h5py.File(ERA_dir + era_pred_name + '_2019.hdf', 'r') as hdf_io:
    ERA_2019 = hdf_io[TMEAN_key][...]

with h5py.File(JRA_dir + jra_pred_name + '_2018.hdf', 'r') as hdf_io:
    JRA_2018 = hdf_io[TMEAN_key][...]
with h5py.File(JRA_dir + jra_pred_name + '_2019.hdf', 'r') as hdf_io:
    JRA_2019 = hdf_io[TMEAN_key][...]

ERA_clean = np.concatenate((ERA_2018, ERA_2019), axis=0)
JRA_clean = np.concatenate((JRA_2018, JRA_2019), axis=0)