Beispiel #1
0
def run_snow_otf_nzcsm_main(hydro_years_to_take, run_id, met_inp, which_model, catchment, output_dem, mask_dem, mask_folder, dem_folder, output_folder, data_folder,
                      orog_infile, precip_infile, air_temp_infile, solar_rad_infile, config):
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # open input met data files
    nc_file_orog = nc.Dataset(data_folder + '/' + orog_infile, 'r')

    nc_file_rain = nc.Dataset(data_folder + '/' + precip_infile, 'r')
    nc_file_temp = nc.Dataset(data_folder + '/' + air_temp_infile, 'r')

    nc_rain = nc_file_rain.variables['sum_total_precip']
    nc_temp = nc_file_temp.variables['sfc_temp']

    if which_model == 'dsc_snow':
        nc_file_srad = nc.Dataset(data_folder + '/' + solar_rad_infile, 'r')
        nc_srad = nc_file_srad.variables['sfc_dw_sw_flux']
        vcsn_dt4 = nc.num2date(nc_file_srad.variables['time1'][:], nc_file_srad.variables['time1'].units,only_use_cftime_datetimes=False,only_use_python_datetimes=True)

    # load met grid (assume same for all input data)
    vcsn_elev = nc_file_orog.variables['orog_model'][:]
    vcsn_elev_interp = vcsn_elev.copy()
    vcsn_lats = nc_file_orog.variables['rlat'][:]
    vcsn_lons = nc_file_orog.variables['rlon'][:]
    rot_pole = nc_file_orog.variables['rotated_pole']
    rot_pole_crs = ccrs.RotatedPole(rot_pole.grid_north_pole_longitude, rot_pole.grid_north_pole_latitude, rot_pole.north_pole_grid_longitude)

    vcsn_dt = nc.num2date(nc_file_rain.variables['time2'][:], nc_file_rain.variables['time2'].units,only_use_cftime_datetimes=False,only_use_python_datetimes=True)
    vcsn_dt2 = nc.num2date(nc_file_temp.variables['time0'][:], nc_file_temp.variables['time0'].units,only_use_cftime_datetimes=False,only_use_python_datetimes=True)


    # calculate model grid etc:
    # output DEM
    dem_file = dem_folder + '/' + output_dem + '.tif'
    if output_dem == 'si_dem_250m':
        nztm_dem, x_centres, y_centres, lat_array, lon_array = setup_nztm_dem(dem_file, extent_w=1.08e6, extent_e=1.72e6, extent_n=5.52e6, extent_s=4.82e6,
                                                                              resolution=250)
    elif output_dem == 'nz_dem_250m':
        nztm_dem, x_centres, y_centres, lat_array, lon_array = setup_nztm_dem(dem_file, extent_w=1.05e6, extent_e=2.10e6, extent_n=6.275e6, extent_s=4.70e6,
                                                                              resolution=250, origin='bottomleft')
    else:
        print('incorrect dem chosen')

    if mask_dem == True:
        # Get the masks for the individual regions of interest
        mask = np.load(mask_folder + '/{}_{}.npy'.format(catchment, output_dem))
        # Trim down the number of latitudes requested so it all stays in memory
        wgs84_lats, wgs84_lons, elev, northings, eastings = trim_lat_lon_bounds(mask, lat_array, lon_array, nztm_dem, y_centres, x_centres)
        _, _, trimmed_mask, _, _ = trim_lat_lon_bounds(mask, lat_array, lon_array, mask.copy(), y_centres, x_centres)
    else:
        wgs84_lats = lat_array
        wgs84_lons = lon_array
        elev = nztm_dem
        northings = y_centres
        eastings = x_centres
        # set mask to all land points
        mask = elev > 0
        trimmed_mask = mask
    # calculate lat/lon on rotated grid of input
    yy, xx = np.meshgrid(northings, eastings, indexing='ij')
    rotated_coords = rot_pole_crs.transform_points(ccrs.epsg(2193), xx, yy)
    rlats = rotated_coords[:, :, 1]
    rlons = rotated_coords[:, :, 0]
    rlons[rlons < 0] = rlons[rlons < 0] + 360

    # set up time to run, paths to input files etc
    for year_to_take in hydro_years_to_take:
        print(year_to_take)
        # specify the days to run (output is at the end of each day)
        # out_dt = np.asarray(make_regular_timeseries(dt.datetime(year_to_take, 7, 1), dt.datetime(year_to_take, 7, 2), 86400))
        out_dt = np.asarray(make_regular_timeseries(dt.datetime(year_to_take - 1, 4, 1), dt.datetime(year_to_take, 4, 1), 86400))

        # set up output netCDF:
        out_nc_file = setup_nztm_grid_netcdf(
            output_folder + '/snow_out_{}_{}_{}_{}_{}_{}.nc'.format(met_inp, which_model, catchment, output_dem, run_id, year_to_take),
            None, ['swe', 'acc', 'melt', 'rain', 'ros', 'ros_melt'],
            out_dt, northings, eastings, wgs84_lats, wgs84_lons, elev)

        # set up initial states of prognostic variables
        init_swe = np.zeros(elev.shape)  # default to no snow
        init_d_snow = np.ones(elev.shape) * 30  # default to a month since snowfall
        swe = init_swe
        d_snow = init_d_snow
        # set up daily buckets for melt and accumulation
        bucket_melt = swe * 0
        bucket_acc = swe * 0
        swe_day_before = swe * 0
        bucket_rain = swe * 0
        bucket_ros = swe * 0
        bucket_ros_melt = swe * 0

        # store initial swe value
        out_nc_file.variables['swe'][0, :, :] = init_swe
        out_nc_file.variables['acc'][0, :, :] = 0
        out_nc_file.variables['melt'][0, :, :] = 0
        out_nc_file.variables['rain'][0, :, :] = 0
        out_nc_file.variables['ros'][0, :, :] = 0
        out_nc_file.variables['ros_melt'][0, :, :] = 0

        # for each day:
        for ii, dt_t in enumerate(out_dt[:-1]):
            print('processing', dt_t)
            # load one day of precip and shortwave rad data
            precip_hourly = nc_rain[int(np.where(vcsn_dt == dt_t)[0]):int(int(np.where(vcsn_dt == dt_t)[0]) + 24)]
            temp_hourly = nc_temp[int(np.where(vcsn_dt2 == dt_t)[0]):int(np.where(vcsn_dt2 == dt_t)[0]) + 24]

            # interpolate data to fine grid
            hi_res_precip = interpolate_met(precip_hourly.filled(np.nan), 'rain', vcsn_lons, vcsn_lats, vcsn_elev_interp, rlons, rlats, elev)
            hi_res_temp = interpolate_met(temp_hourly.filled(np.nan), 'tmax', vcsn_lons, vcsn_lats, vcsn_elev_interp, rlons, rlats, elev)

            # mask out areas we don't want/need
            if mask is not None:
                hi_res_precip[:, trimmed_mask == 0] = np.nan
                hi_res_temp[:, trimmed_mask == 0] = np.nan

            hourly_dt = np.asarray(make_regular_timeseries(dt_t, dt_t + dt.timedelta(hours=23), 3600))
            hourly_doy = convert_datetime_julian_day(hourly_dt)
            hourly_temp = hi_res_temp
            hourly_precip = hi_res_precip

            if which_model == 'dsc_snow':
                sw_rad_hourly = nc_srad[int(np.where(vcsn_dt4 == dt_t)[0]):int(np.where(vcsn_dt4 == dt_t)[0]) + 24]
                hi_res_sw_rad = interpolate_met(sw_rad_hourly.filled(np.nan), 'srad', vcsn_lons, vcsn_lats, vcsn_elev_interp, rlons, rlats, elev)
                if mask is not None:
                    hi_res_sw_rad[:, trimmed_mask == 0] = np.nan
                hourly_swin = hi_res_sw_rad

            # calculate snow and output to netcdf
            for i in range(len(hourly_dt)):
                # d_snow += dtstep / 86400.0
                if which_model == 'dsc_snow':
                    swe, d_snow, melt, acc = calc_dswe(swe, d_snow, hourly_temp[i], hourly_precip[i], hourly_doy[i], 3600, which_melt=which_model,
                                                       sw=hourly_swin[i],
                                                       **config)  #
                else:
                    swe, d_snow, melt, acc = calc_dswe(swe, d_snow, hourly_temp[i], hourly_precip[i], hourly_doy[i], 3600, which_melt=which_model,
                                                       **config)
                # print swe[0]
                bucket_melt = bucket_melt + melt
                bucket_acc = bucket_acc + acc
                rain = hourly_precip[i] - acc
                bucket_rain = bucket_rain + rain
                bucket_ros = bucket_ros + rain * (swe > 0).astype(np.int)  # creates binary snow cover then multiples by rain (0 or 1)

                # first calculate the energy availble for melting due to rainfall (Wm^-2) over snowcovered cells only
                qprc = (swe > 0).astype(np.int) * 4220. * rain / 3600. * (hourly_temp[i] - 273.16)
                # then calculate potential melt per timestep . don't limit to available swe as could have contributed to intial snow melt (accounted for by degree-day model)
                ros_melt = qprc / 334000. * 3600.
                ros_melt[(ros_melt < 0)] = 0  # only take positive portion (could be some rain at air temperature < 0)
                bucket_ros_melt = bucket_ros_melt + ros_melt
            # output at the end of each day,
            for var, data in zip(['swe', 'acc', 'melt', 'rain', 'ros', 'ros_melt'], [swe, bucket_acc, bucket_melt, bucket_rain, bucket_ros, bucket_ros_melt]):
                # data[(np.isnan(data))] = -9999.
                out_nc_file.variables[var][ii + 1, :, :] = data

            # decide if albedo is reset
            d_snow += 1
            swe_alb = swe - swe_day_before
            d_snow[(swe_alb > config['alb_swe_thres'])] = 0
            swe_day_before = swe * 1.0
            # reset buckets
            bucket_melt = bucket_melt * 0
            bucket_acc = bucket_acc * 0
            bucket_rain = bucket_rain * 0
            bucket_ros = bucket_ros * 0
            bucket_ros_melt = bucket_ros_melt * 0
        out_nc_file.close()

        json.dump(config, open(output_folder + '/config_{}_{}_{}_{}_{}_{}.json'.format(met_inp, which_model, catchment, output_dem, run_id, year_to_take), 'w'))
        pickle.dump(config,
                    open(output_folder + '/config_{}_{}_{}_{}_{}_{}.pkl'.format(met_inp, which_model, catchment, output_dem, run_id, year_to_take), 'wb'),
                    protocol=3)
import numpy as np
import pickle
import matplotlib.pylab as plt
import datetime as dt
from nz_snow_tools.util.utils import convert_datetime_julian_day

catchment = 'Wilkin'  # string identifying catchment modelled
output_dem = 'nztm250m'  # identifier for output dem
years_to_take = range(
    2000,
    2001 + 1)  # range(2016, 2016 + 1)  # [2013 + 1]  # range(2001, 2013 + 1)
modis_sc_threshold = 50  # value of fsca (in percent) that is counted as being snow covered
output_folder = r'C:\Users\conwayjp\OneDrive - NIWA\projects\DSC Snow\MODIS'

[ann_ts_av_sca_m, ann_ts_av_sca_thres_m, ann_dt_m, ann_scd_m] = pickle.load(
    open(
        output_folder + '/summary_MODIS_{}_{}_{}_{}_thres{}.pkl'.format(
            years_to_take[0], years_to_take[-1], catchment, output_dem,
            modis_sc_threshold), 'rb'))
for ts_av_sca_m, dt_m in zip(ann_ts_av_sca_m, ann_dt_m):
    plt.plot(convert_datetime_julian_day(dt_m),
             ts_av_sca_m,
             label=dt_m[0].year)

ax = plt.gca()
ax.set_ylim([0, 1])
ax.set_ylabel('SCA')
ax.set_xlabel('day of year')
plt.show()
# nc_file_VN = nc.Dataset(r"C:/Users/Bonnamourar/Desktop/SIN/VCSN/VN_2007-2017/tseries_2007010122_2017123121_utc_topnet_Mueller_strahler3-VN.nc",'r')
# PHILISTINE
# nc_file_VC = nc.Dataset(r"C:/Users/Bonnamourar/Desktop/SIN/VCSN/VC_2007-2019/tseries_2007010122_2019013121_utc_topnet_Philisti_strahler3-VC.nc",'r')
# nc_file_VN = nc.Dataset(r"C:/Users/Bonnamourar/Desktop/SIN/VCSN/VN_2007-2017/tseries_2007010122_2017123121_utc_topnet_Philisti_strahler3-VN.nc",'r')
# MURCHISON
nc_file_VC = nc.Dataset(r"C:/Users/Bonnamourar/Desktop/SIN/VCSN/VC_2007-2019/tseries_2007010122_2019013121_utc_topnet_Murchiso_strahler3-VC.nc",'r')
nc_file_VN = nc.Dataset(r"C:/Users/Bonnamourar/Desktop/SIN/VCSN/VN_2007-2017/tseries_2007010122_2017123121_utc_topnet_Murchiso_strahler3-VN.nc", 'r')

Stname = ['Murchison']
for i in range (0,10) :
    year = 2009 + i


    # load npy data
    inp_dat = np.load(Y_file.format(year),allow_pickle=True)
    inp_doy = np.asarray(convert_datetime_julian_day(inp_dat[:, 0]))
    inp_hourdec = convert_dt_to_hourdec(inp_dat[:, 0])
    plot_dt = inp_dat[:, 0] # model stores initial state
    inp_precip_obs = np.asarray(inp_dat[:, 4], dtype=np.float)

    # load VCSN files
    # snow storage VC&VN files
    swe_VC = nc_file_VC.variables['snwstor'][:,0,0,0]
    swe_VN = nc_file_VN.variables['snwstor'][:,0,0,0]

    # time VC&VN files
    nc_datetimes_VC = nc.num2date(nc_file_VC.variables['time'][:], nc_file_VC.variables['time'].units)
    nc_datetimes_VN = nc.num2date(nc_file_VN.variables['time'][:], nc_file_VN.variables['time'].units)

    # accumulate precipitation VCSN files
    precip_VC = nc_file_VC.variables['aprecip'][:,0,0,0]
def snow_main(inp_file, init_swe=None, init_d_snow=None, which_melt='clark2009', alb_swe_thres=5.0, **config):
    """
    main snow model loop. handles timestepping and storage of output.
    assumes all the input data is on the same spatial and temporal grid.
    Runs the model for the length of the netCDF file
    Output is writen at the end of each day.
    :param inp_file: full path to netCDF input file
    :param init_swe: inital grid of snow water equivalent (SWE; mm w.e.), dimensions (spatial:)
    :param init_d_snow: inital grid of times since last snowfall dimensions (spatial:)
    :param which_melt: string specifying which melt model to be run. options include 'clark2009', 'dsc_snow'
    :param alb_swe_thres: threshold for daily snowfall resetting albedo (mm w.e). calculated from daily swe change so that accumulation must be > melt
    :return: st_swe - calculated SWE (mm w.e.) at the end of each day. (n = number of days + 1)
    """

    # load netCDF file and get the spatial dimensions out of it.
    inp_nc_file = nc.Dataset(inp_file)
    inp_dt = nc.num2date(inp_nc_file.variables['time'][:], inp_nc_file.variables['time'].units)
    num_timesteps = len(inp_dt)
    inp_hourdec = convert_dt_to_hourdec(inp_dt)
    inp_doy = convert_datetime_julian_day(inp_dt)
    # assume timestep is constant through input data
    dtstep = int((inp_dt[1] - inp_dt[0]).total_seconds())
    # calculate how many days in input file
    num_out_steps = int(1 + num_timesteps * dtstep / 86400.0)

    inp_shape = inp_nc_file.get_variables_by_attributes(standard_name='air_temperature')[0].shape
    shape_xy = inp_shape[1:]

    # set up storage arrays
    if len(shape_xy) == 2:
        st_swe = np.empty((num_out_steps, shape_xy[0], shape_xy[1])) * np.nan
        st_melt = np.empty((num_out_steps, shape_xy[0], shape_xy[1])) * np.nan
        st_acc = np.empty((num_out_steps, shape_xy[0], shape_xy[1])) * np.nan
    elif len(shape_xy) == 1:
        st_swe = np.empty((num_out_steps, shape_xy[0])) * np.nan
        st_melt = np.empty((num_out_steps, shape_xy[0])) * np.nan
        st_acc = np.empty((num_out_steps, shape_xy[0])) * np.nan

    # set up initial states of prognostic variables if not passed in
    if init_swe is None:
        init_swe = np.zeros(shape_xy)  # default to no snow
    swe = init_swe
    if init_d_snow is None:
        init_d_snow = np.ones(shape_xy) * 30  # default to a month since snowfall
    d_snow = init_d_snow
    # set up daily buckets for melt and accumulation
    bucket_melt = swe * 0
    bucket_acc = swe * 0

    # store initial swe value
    st_swe[0, :] = init_swe
    st_melt[0, :] = 0
    st_acc[0, :] = 0
    ii = 1

    # run through and update SWE for each timestep in input data
    for i in range(num_timesteps):
        # d_snow += dtstep / 86400.0

        inp_ta, inp_precip, inp_sw = read_met_input(inp_nc_file, i)
        swe, d_snow, melt, acc = calc_dswe(swe, d_snow, inp_ta, inp_precip, inp_doy[i], dtstep, sw=inp_sw, which_melt=which_melt, **config)

        # print swe[0]
        bucket_melt = bucket_melt + melt
        bucket_acc = bucket_acc + acc
        if i != 0 and inp_hourdec[i] == 0 or inp_hourdec[i] == 24:  # output daily don't store if
            st_swe[ii, :] = swe
            st_melt[ii, :] = bucket_melt
            st_acc[ii, :] = bucket_acc
            swe_alb = st_swe[ii, :] - st_swe[ii - 1, :]
            d_snow[(swe_alb > alb_swe_thres)] = 0
            ii = ii + 1  # move storage counter for next output timestep
            bucket_melt = bucket_melt * 0  # reset buckets
            bucket_acc = bucket_acc * 0

    return st_swe, st_melt, st_acc
        hourly_dt = np.asarray(
            make_regular_timeseries(dt_t, dt_t + dt.timedelta(hours=23), 3600))
        hourly_swin = daily_to_hourly_swin_grids(hi_res_sw_rad,
                                                 lats,
                                                 lons,
                                                 hourly_dt,
                                                 single_dt=True)
        # air temperature is three part sinusoidal between min at 8am and max at 2pm. NOTE original VCSN data has correct timestamp - ie. minimum to 9am, maximum from 9am.
        # use three days but only keep middle day
        hourly_temp = daily_to_hourly_temp_grids(hi_res_max_temp,
                                                 hi_res_min_temp)
        hourly_temp = hourly_temp[24:48]

        # store precip day weights.
        day_weightings.extend(day_weightings_1)
        hourly_doy = convert_datetime_julian_day(hourly_dt)

        # calculate snow and output to netcdf
        for i in range(len(hourly_dt)):
            # d_snow += dtstep / 86400.0

            swe, d_snow, melt, acc = calc_dswe(swe,
                                               d_snow,
                                               hourly_temp[i],
                                               hourly_precip[i],
                                               hourly_doy[i],
                                               3600,
                                               sw=hourly_swin[i],
                                               which_melt=which_model,
                                               **config)